Repository: Luffy03/VoCo
Branch: main
Commit: ba7b7bd22e98
Files: 329
Total size: 2.7 MB
Directory structure:
gitextract_xwu1bqsv/
├── Finetune/
│ ├── AbdomenAtlas/
│ │ ├── Atlas_test.py
│ │ ├── Atlas_test.sh
│ │ ├── check.py
│ │ ├── dataset/
│ │ │ ├── __init__.py
│ │ │ ├── dataloader_bdmap.py
│ │ │ ├── dataloader_test.py
│ │ │ └── dataset_list/
│ │ │ └── AbdomenAtlas1.0.txt
│ │ ├── main.py
│ │ ├── optimizers/
│ │ │ ├── __init__.py
│ │ │ └── lr_scheduler.py
│ │ ├── preprocess/
│ │ │ └── try_load.py
│ │ ├── readme.md
│ │ ├── requirements.txt
│ │ ├── train.sh
│ │ ├── train.slurm
│ │ ├── trainer.py
│ │ └── utils/
│ │ ├── __init__.py
│ │ ├── data_trans.py
│ │ ├── mixup.py
│ │ └── utils.py
│ ├── Amos/
│ │ ├── check_test.py
│ │ ├── dataset/
│ │ │ ├── __init__.py
│ │ │ ├── dataset.json
│ │ │ └── dataset_test50.json
│ │ ├── dataset_CT.json
│ │ ├── gen_json.py
│ │ ├── inferers.py
│ │ ├── main.py
│ │ ├── optimizers/
│ │ │ ├── __init__.py
│ │ │ └── lr_scheduler.py
│ │ ├── pre_cache.py
│ │ ├── test.py
│ │ ├── train.sh
│ │ ├── trainer.py
│ │ ├── utils/
│ │ │ ├── __init__.py
│ │ │ ├── data_test.py
│ │ │ ├── data_utils.py
│ │ │ └── utils.py
│ │ └── val.py
│ ├── BTCV/
│ │ ├── dataset/
│ │ │ ├── __init__.py
│ │ │ └── dataset_0.json
│ │ ├── main.py
│ │ ├── optimizers/
│ │ │ ├── __init__.py
│ │ │ └── lr_scheduler.py
│ │ ├── trainer.py
│ │ ├── utils/
│ │ │ ├── __init__.py
│ │ │ ├── data_test.py
│ │ │ ├── data_utils.py
│ │ │ └── utils.py
│ │ └── val.py
│ ├── CC-CCII/
│ │ ├── csv/
│ │ │ ├── CC_CCII_fold0_train.csv
│ │ │ ├── CC_CCII_fold0_valid.csv
│ │ │ ├── CC_CCII_fold1_train.csv
│ │ │ ├── CC_CCII_fold1_valid.csv
│ │ │ ├── CC_CCII_fold2_train.csv
│ │ │ ├── CC_CCII_fold2_valid.csv
│ │ │ └── CC_CCII_metadata.csv
│ │ ├── dataset/
│ │ │ └── __init__.py
│ │ ├── eval.py
│ │ ├── main.py
│ │ ├── model.py
│ │ ├── optimizers/
│ │ │ ├── __init__.py
│ │ │ └── lr_scheduler.py
│ │ ├── train.sh
│ │ ├── trainer.py
│ │ └── utils/
│ │ ├── __init__.py
│ │ ├── data_utils.py
│ │ └── utils.py
│ ├── Flare22/
│ │ ├── __init__.py
│ │ ├── dataset/
│ │ │ ├── __init__.py
│ │ │ ├── dataset.json
│ │ │ └── dataset_test50.json
│ │ ├── inferers.py
│ │ ├── main.py
│ │ ├── optimizers/
│ │ │ ├── __init__.py
│ │ │ └── lr_scheduler.py
│ │ ├── train.sh
│ │ ├── trainer.py
│ │ ├── utils/
│ │ │ ├── __init__.py
│ │ │ ├── data_test.py
│ │ │ ├── data_utils.py
│ │ │ └── utils.py
│ │ └── val.py
│ ├── MM-WHS/
│ │ ├── dataset.json
│ │ ├── inferers.py
│ │ ├── main.py
│ │ ├── optimizers/
│ │ │ ├── __init__.py
│ │ │ └── lr_scheduler.py
│ │ ├── pretrained_models/
│ │ │ └── __init__.py
│ │ ├── test.py
│ │ ├── train.sh
│ │ ├── trainer.py
│ │ └── utils/
│ │ ├── __init__.py
│ │ ├── data_utils.py
│ │ └── utils.py
│ ├── Word/
│ │ ├── dataset/
│ │ │ ├── __init__.py
│ │ │ └── dataset_word.json
│ │ ├── main.py
│ │ ├── optimizers/
│ │ │ ├── __init__.py
│ │ │ └── lr_scheduler.py
│ │ ├── train.sh
│ │ ├── train.slurm
│ │ ├── trainer.py
│ │ └── utils/
│ │ ├── __init__.py
│ │ ├── data_utils.py
│ │ └── utils.py
│ └── nnUNet/
│ ├── LICENSE
│ ├── documentation/
│ │ ├── __init__.py
│ │ ├── benchmarking.md
│ │ ├── changelog.md
│ │ ├── competitions/
│ │ │ └── AutoPETII.md
│ │ ├── convert_msd_dataset.md
│ │ ├── dataset_format.md
│ │ ├── dataset_format_inference.md
│ │ ├── explanation_normalization.md
│ │ ├── explanation_plans_files.md
│ │ ├── extending_nnunet.md
│ │ ├── how_to_use_nnunet.md
│ │ ├── installation_instructions.md
│ │ ├── manual_data_splits.md
│ │ ├── pretraining_and_finetuning.md
│ │ ├── region_based_training.md
│ │ ├── run_inference_with_pretrained_models.md
│ │ ├── set_environment_variables.md
│ │ ├── setting_up_paths.md
│ │ └── tldr_migration_guide_from_v1.md
│ ├── msd.txt
│ ├── nnunetv2/
│ │ ├── __init__.py
│ │ ├── batch_running/
│ │ │ ├── __init__.py
│ │ │ ├── benchmarking/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── generate_benchmarking_commands.py
│ │ │ │ └── summarize_benchmark_results.py
│ │ │ ├── collect_results_custom_Decathlon.py
│ │ │ ├── collect_results_custom_Decathlon_2d.py
│ │ │ ├── generate_lsf_runs_customDecathlon.py
│ │ │ └── release_trainings/
│ │ │ ├── __init__.py
│ │ │ └── nnunetv2_v1/
│ │ │ ├── __init__.py
│ │ │ ├── collect_results.py
│ │ │ └── generate_lsf_commands.py
│ │ ├── configuration.py
│ │ ├── dataset_conversion/
│ │ │ ├── Dataset017_BTCV.py
│ │ │ ├── Dataset027_ACDC.py
│ │ │ ├── Dataset073_Fluo_C3DH_A549_SIM.py
│ │ │ ├── Dataset114_MNMs.py
│ │ │ ├── Dataset115_EMIDEC.py
│ │ │ ├── Dataset120_RoadSegmentation.py
│ │ │ ├── Dataset137_BraTS21.py
│ │ │ ├── Dataset218_Amos2022_task1.py
│ │ │ ├── Dataset219_Amos2022_task2.py
│ │ │ ├── Dataset220_KiTS2023.py
│ │ │ ├── Dataset221_AutoPETII_2023.py
│ │ │ ├── Dataset988_dummyDataset4.py
│ │ │ ├── __init__.py
│ │ │ ├── convert_MSD_dataset.py
│ │ │ ├── convert_raw_dataset_from_old_nnunet_format.py
│ │ │ ├── datasets_for_integration_tests/
│ │ │ │ ├── Dataset996_IntegrationTest_Hippocampus_regions_ignore.py
│ │ │ │ ├── Dataset997_IntegrationTest_Hippocampus_regions.py
│ │ │ │ ├── Dataset998_IntegrationTest_Hippocampus_ignore.py
│ │ │ │ ├── Dataset999_IntegrationTest_Hippocampus.py
│ │ │ │ └── __init__.py
│ │ │ └── generate_dataset_json.py
│ │ ├── ensembling/
│ │ │ ├── __init__.py
│ │ │ └── ensemble.py
│ │ ├── evaluation/
│ │ │ ├── __init__.py
│ │ │ ├── accumulate_cv_results.py
│ │ │ ├── evaluate_predictions.py
│ │ │ └── find_best_configuration.py
│ │ ├── experiment_planning/
│ │ │ ├── __init__.py
│ │ │ ├── dataset_fingerprint/
│ │ │ │ ├── __init__.py
│ │ │ │ └── fingerprint_extractor.py
│ │ │ ├── experiment_planners/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── default_experiment_planner.py
│ │ │ │ ├── network_topology.py
│ │ │ │ ├── readme.md
│ │ │ │ └── resencUNet_planner.py
│ │ │ ├── plan_and_preprocess_api.py
│ │ │ ├── plan_and_preprocess_entrypoints.py
│ │ │ ├── plans_for_pretraining/
│ │ │ │ ├── __init__.py
│ │ │ │ └── move_plans_between_datasets.py
│ │ │ └── verify_dataset_integrity.py
│ │ ├── imageio/
│ │ │ ├── __init__.py
│ │ │ ├── base_reader_writer.py
│ │ │ ├── natural_image_reader_writer.py
│ │ │ ├── nibabel_reader_writer.py
│ │ │ ├── reader_writer_registry.py
│ │ │ ├── readme.md
│ │ │ ├── simpleitk_reader_writer.py
│ │ │ └── tif_reader_writer.py
│ │ ├── inference/
│ │ │ ├── __init__.py
│ │ │ ├── data_iterators.py
│ │ │ ├── examples.py
│ │ │ ├── export_prediction.py
│ │ │ ├── predict_from_raw_data.py
│ │ │ ├── readme.md
│ │ │ └── sliding_window_prediction.py
│ │ ├── model_sharing/
│ │ │ ├── __init__.py
│ │ │ ├── entry_points.py
│ │ │ ├── model_download.py
│ │ │ ├── model_export.py
│ │ │ └── model_import.py
│ │ ├── paths.py
│ │ ├── postprocessing/
│ │ │ ├── __init__.py
│ │ │ └── remove_connected_components.py
│ │ ├── preprocessing/
│ │ │ ├── __init__.py
│ │ │ ├── cropping/
│ │ │ │ ├── __init__.py
│ │ │ │ └── cropping.py
│ │ │ ├── normalization/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── default_normalization_schemes.py
│ │ │ │ ├── map_channel_name_to_normalization.py
│ │ │ │ └── readme.md
│ │ │ ├── preprocessors/
│ │ │ │ ├── __init__.py
│ │ │ │ └── default_preprocessor.py
│ │ │ └── resampling/
│ │ │ ├── __init__.py
│ │ │ ├── default_resampling.py
│ │ │ └── utils.py
│ │ ├── run/
│ │ │ ├── __init__.py
│ │ │ ├── load_pretrained_weights.py
│ │ │ └── run_training.py
│ │ ├── tests/
│ │ │ ├── __init__.py
│ │ │ └── integration_tests/
│ │ │ ├── __init__.py
│ │ │ ├── add_lowres_and_cascade.py
│ │ │ ├── cleanup_integration_test.py
│ │ │ ├── lsf_commands.sh
│ │ │ ├── prepare_integration_tests.sh
│ │ │ ├── readme.md
│ │ │ ├── run_integration_test.sh
│ │ │ ├── run_integration_test_bestconfig_inference.py
│ │ │ └── run_integration_test_trainingOnly_DDP.sh
│ │ ├── training/
│ │ │ ├── __init__.py
│ │ │ ├── data_augmentation/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── compute_initial_patch_size.py
│ │ │ │ └── custom_transforms/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── cascade_transforms.py
│ │ │ │ ├── deep_supervision_donwsampling.py
│ │ │ │ ├── limited_length_multithreaded_augmenter.py
│ │ │ │ ├── manipulating_data_dict.py
│ │ │ │ ├── masking.py
│ │ │ │ ├── region_based_training.py
│ │ │ │ └── transforms_for_dummy_2d.py
│ │ │ ├── dataloading/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── base_data_loader.py
│ │ │ │ ├── data_loader_2d.py
│ │ │ │ ├── data_loader_3d.py
│ │ │ │ ├── nnunet_dataset.py
│ │ │ │ └── utils.py
│ │ │ ├── logging/
│ │ │ │ ├── __init__.py
│ │ │ │ └── nnunet_logger.py
│ │ │ ├── loss/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── compound_losses.py
│ │ │ │ ├── deep_supervision.py
│ │ │ │ ├── dice.py
│ │ │ │ └── robust_ce_loss.py
│ │ │ ├── lr_scheduler/
│ │ │ │ ├── __init__.py
│ │ │ │ └── polylr.py
│ │ │ └── nnUNetTrainer/
│ │ │ ├── __init__.py
│ │ │ ├── nnUNetTrainer.py
│ │ │ ├── nnUNetTrainer_swin.py
│ │ │ ├── variants/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── benchmarking/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── nnUNetTrainerBenchmark_5epochs.py
│ │ │ │ │ └── nnUNetTrainerBenchmark_5epochs_noDataLoading.py
│ │ │ │ ├── data_augmentation/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── nnUNetTrainerDA5.py
│ │ │ │ │ ├── nnUNetTrainerDAOrd0.py
│ │ │ │ │ ├── nnUNetTrainerNoDA.py
│ │ │ │ │ └── nnUNetTrainerNoMirroring.py
│ │ │ │ ├── loss/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── nnUNetTrainerCELoss.py
│ │ │ │ │ ├── nnUNetTrainerDiceLoss.py
│ │ │ │ │ └── nnUNetTrainerTopkLoss.py
│ │ │ │ ├── lr_schedule/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── nnUNetTrainerCosAnneal.py
│ │ │ │ ├── network_architecture/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── nnUNetTrainerBN.py
│ │ │ │ │ └── nnUNetTrainerNoDeepSupervision.py
│ │ │ │ ├── optimizer/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── nnUNetTrainerAdam.py
│ │ │ │ │ └── nnUNetTrainerAdan.py
│ │ │ │ ├── sampling/
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── nnUNetTrainer_probabilisticOversampling.py
│ │ │ │ └── training_length/
│ │ │ │ ├── __init__.py
│ │ │ │ ├── nnUNetTrainer_Xepochs.py
│ │ │ │ └── nnUNetTrainer_Xepochs_NoMirroring.py
│ │ │ └── vit.py
│ │ └── utilities/
│ │ ├── __init__.py
│ │ ├── collate_outputs.py
│ │ ├── dataset_name_id_conversion.py
│ │ ├── ddp_allgather.py
│ │ ├── default_n_proc_DA.py
│ │ ├── file_path_utilities.py
│ │ ├── find_class_by_name.py
│ │ ├── get_network_from_plans.py
│ │ ├── helpers.py
│ │ ├── json_export.py
│ │ ├── label_handling/
│ │ │ ├── __init__.py
│ │ │ └── label_handling.py
│ │ ├── network_initialization.py
│ │ ├── overlay_plots.py
│ │ ├── plans_handling/
│ │ │ ├── __init__.py
│ │ │ └── plans_handler.py
│ │ └── utils.py
│ ├── nnunetv2.egg-info/
│ │ ├── PKG-INFO
│ │ ├── SOURCES.txt
│ │ ├── dependency_links.txt
│ │ ├── entry_points.txt
│ │ ├── requires.txt
│ │ └── top_level.txt
│ ├── pyproject.toml
│ └── setup.py
├── LICENSE
├── README.md
├── jsons/
│ ├── HNSCC.json
│ ├── Totalsegmentator_dataset.json
│ ├── __init__.py
│ ├── btcv.json
│ ├── dataset_LUNA16_0.json
│ ├── dataset_TCIAcovid19_0.json
│ ├── flare23.json
│ └── stoic21.json
├── models/
│ └── voco_head.py
├── optimizers/
│ ├── __init__.py
│ └── lr_scheduler.py
├── requirements.txt
├── train.sh
├── utils/
│ ├── __init__.py
│ ├── data_utils.py
│ ├── ops.py
│ └── utils.py
└── voco_train.py
================================================
FILE CONTENTS
================================================
================================================
FILE: Finetune/AbdomenAtlas/Atlas_test.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
import torch.nn.functional as F
from torch.cuda.amp import GradScaler, autocast
from dataset.dataloader_test import get_test_loader_Atlas
import SimpleITK as sitk
from monai.inferers import sliding_window_inference
# from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import *
from monai.utils.enums import MetricReduction
from monai.handlers import StatsHandler, from_engine
import matplotlib.pyplot as plt
from utils.utils import *
from PIL import Image
from monai import data, transforms
from monai.data import *
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument(
"--test_data_path", default="./test_examples/AbdomenAtlasTest/", type=str, help="test_data_path")
parser.add_argument(
"--save_prediction_path", default="./test_examples/AbdomenAtlasPredict/", type=str, help="test_prediction_path")
parser.add_argument(
"--trained_pth", default="./runs/logs/model_val50_91.88.pt", type=str, help="trained checkpoint directory")
roi = 96
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=4, type=int, help="number of sliding window batch size")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=10, type=int, help="number of output channels")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=2.0, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--workers", default=16, type=int, help="number of workers")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
def main():
args = parser.parse_args()
test_loader, test_transforms = get_test_loader_Atlas(args)
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=0.0,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model_dict = torch.load(args.trained_pth)["state_dict"]
model.load_state_dict(model_dict, strict=True)
model.eval()
model.to(device)
# enable cuDNN benchmark
torch.backends.cudnn.benchmark = True
post_transforms = Compose([EnsureTyped(keys=["pred"]),
Invertd(keys=["pred"],
transform=test_transforms,
orig_keys="image",
meta_keys="pred_meta_dict",
orig_meta_keys="image_meta_dict",
meta_key_postfix="meta_dict",
nearest_interp=True,
to_tensor=True),
AsDiscreted(keys="pred", argmax=False, to_onehot=args.out_channels),
])
with torch.no_grad():
for idx, batch_data in enumerate(test_loader):
torch.cuda.empty_cache()
# data = batch_data["image"].cuda()
data = batch_data["image"]
data = data.cuda()
with autocast(enabled=True):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
batch_data['pred'] = logits.argmax(1)
batch_data = post_transforms(batch_data)
save_pred_dir = os.path.join(args.save_prediction_path, batch_data['name'][0], 'predictions')
check_dir(save_pred_dir)
organ_ls = ["aorta", "gall_bladder", "kidney_left", "kidney_right", "liver", "pancreas", "postcava",
"spleen", "stomach"]
for idx, organ_name in enumerate(organ_ls):
organ = batch_data['pred'][idx+1, :, :, :]
batch_data['organ'] = organ
save_transforms = Compose([SaveImaged(keys="organ", meta_keys="pred_meta_dict", output_dir=save_pred_dir,
separate_folder=False, folder_layout=None, output_postfix=organ_name,
resample=False)])
save_transforms(batch_data)
os.rename(os.path.join(save_pred_dir, 'ct_'+organ_name+'.nii.gz'), os.path.join(save_pred_dir, organ_name+'.nii.gz'))
if __name__ == "__main__":
main()
================================================
FILE: Finetune/AbdomenAtlas/Atlas_test.sh
================================================
test_data_path=./test_examples/AbdomenAtlasTest/
save_prediction_path=./test_examples/AbdomenAtlasPredict/
torchrun --master_port=21472 Atlas_test.py \
--test_data_path $test_data_path --save_prediction_path $save_prediction_path
================================================
FILE: Finetune/AbdomenAtlas/check.py
================================================
import torch
import os
from tqdm import tqdm
import numpy as np
from utils.utils import *
from PIL import Image
import matplotlib.pyplot as plt
def read(img, transpose=False):
img = sitk.ReadImage(img)
direction = img.GetDirection()
origin = img.GetOrigin()
Spacing = img.GetSpacing()
img = sitk.GetArrayFromImage(img)
if transpose:
img = img.transpose(1, 2, 0)
return img, direction, origin, Spacing
def vis():
path = 'D:\data\cache\Atlas'
ls = os.listdir(path)
num = 0
for i in ls:
data = torch.load(os.path.join(path, i))
img, lab = data['image'], data['label']
print(img.shape, lab.shape)
img = img[0].data.cpu().numpy()
#
# lab_bg = lab.sum(0).unsqueeze(0)
#
# la = lab.argmax(0).unsqueeze(0)
# la += 1
# la[lab_bg == 0] = 0
#
lab = lab[0].data.cpu().numpy()
cls_set = list(np.unique(lab))
print(cls_set)
h, w, c = img.shape
cmap = color_map()
for j in range(c):
im = img[:, :, j]
la = lab[:, :, j]
if len(list(np.unique(la))) > 5:
im = (255 * im).astype(np.uint8)
la = Image.fromarray(la.astype(np.uint8), mode='P')
la.putpalette(cmap)
num += 1
fig, axs = plt.subplots(1, 2, figsize=(16, 5))
axs[0].imshow(im, cmap='gray')
axs[0].axis("off")
axs[1].imshow(la)
axs[1].axis("off")
plt.tight_layout()
plt.show()
plt.close()
def check_original():
path = 'D:\data\cache\Atlas\BDMAP_00000870/'
img = read(path + 'ct.nii.gz', True)[0]
gt = read(path + 'label.nii.gz', True)[0]
label_path = path+'segmentations'
organ_ls = ["aorta", "gall_bladder", "kidney_left", "kidney_right", "liver", "pancreas", "postcava", "spleen",
"stomach"]
lab = []
for i in organ_ls:
la = read(label_path + '/' + i + '.nii.gz', True)[0]
la = np.expand_dims(la, 0)
lab.append(la)
labs = np.concatenate(lab, 0)
print(img.shape, labs.shape)
lab_bg = labs.sum(0)
print(np.unique(labs.sum(0)))
lab = labs.argmax(0)
lab += 1
lab[lab_bg == 0] = 0
print(np.unique(lab))
h, w, c = img.shape
cmap = color_map()
for j in range(c):
im = img[:, :, j]
la = lab[:, :, j]
g = gt[:, :, j]
if len(list(np.unique(la))) > 1:
im = (255 * im).astype(np.uint8)
la = Image.fromarray(la.astype(np.uint8), mode='P')
la.putpalette(cmap)
g = Image.fromarray(g.astype(np.uint8), mode='P')
g.putpalette(cmap)
fig, axs = plt.subplots(1, 3, figsize=(16, 5))
axs[0].imshow(im, cmap='gray')
axs[0].axis("off")
axs[1].imshow(la)
axs[1].axis("off")
axs[2].imshow(g)
axs[2].axis("off")
plt.tight_layout()
plt.show()
plt.close()
def exe(path):
root = '/project/medimgfmod/CT/AbdomenAtlasMini1.0/'
path = root + path
label_path = path + '/segmentations'
organ_ls = ["aorta", "gall_bladder", "kidney_left", "kidney_right", "liver", "pancreas", "postcava", "spleen", "stomach"]
lab = []
for i in organ_ls:
la, direction, origin, Spacing = read(label_path + '/' + i+'.nii.gz')
la = np.expand_dims(la, 0)
lab.append(la)
labs = np.concatenate(lab, 0)
lab_bg = labs.sum(0)
lab = labs.argmax(0)
lab += 1
lab[lab_bg == 0] = 0
new = sitk.GetImageFromArray(lab)
new.SetDirection(direction)
new.SetOrigin(origin)
new.SetSpacing(Spacing)
sitk.WriteImage(new, path + '/' + 'label.nii.gz')
print('save:', path + '/' + 'label.nii.gz')
def trans_lab(path):
organ_ls = ["aorta", "gall_bladder", "kidney_left", "kidney_right", "liver", "pancreas", "postcava", "spleen",
"stomach"]
lab = []
for i in organ_ls:
la = read(path + '/' + i + '.nii.gz', True)[0]
la = np.expand_dims(la, 0)
lab.append(la)
labs = np.concatenate(lab, 0)
lab_bg = labs.sum(0)
lab = labs.argmax(0)
lab += 1
lab[lab_bg == 0] = 0
return lab
def check_pred_vis():
path = 'test_examples/AbdomenAtlasPredict/BDMAP_A0000002/predictions'
path_temp = 'test_examples/AbdomenAtlasPredict_temp/BDMAP_A0000002/predictions'
pred, pred_temp = trans_lab(path), trans_lab(path_temp)
print(np.unique(pred), np.unique(pred_temp))
h, w, c = pred.shape
cmap = color_map()
for j in range(c):
la = pred[:, :, j]
g = pred_temp[:, :, j]
if len(list(np.unique(la))) > 5:
la = Image.fromarray(la.astype(np.uint8), mode='P')
la.putpalette(cmap)
g = Image.fromarray(g.astype(np.uint8), mode='P')
g.putpalette(cmap)
fig, axs = plt.subplots(1, 2, figsize=(16, 5))
axs[0].imshow(la)
axs[0].axis("off")
axs[1].imshow(g)
axs[1].axis("off")
plt.tight_layout()
plt.show()
plt.close()
def check_pred_acc():
root = '/project/medimgfmod/CT/AbdomenAtlasMini1.0/'
ls = os.listdir(root)
num = np.zeros(9)
from utils.utils import dice
all_dice = None
for i in ls:
path = root + i
label_path = path + '/segmentations'
lab = trans_lab(label_path)
pred_path = os.path.join('./test_examples/AbdomenAtlasPredict_train/' + i, 'predictions')
pred = trans_lab(pred_path)
dice_list_sub = []
for i in range(1, 10):
num[i - 1] += (np.sum(lab == i) > 0).astype(np.uint8)
organ_Dice = dice(pred == i, lab == i)
dice_list_sub.append(organ_Dice)
if all_dice is None:
all_dice = (np.asarray(dice_list_sub)).copy()
else:
all_dice = all_dice + np.asarray(dice_list_sub)
print("Organ Dice accumulate:", (all_dice / num), (all_dice / num).mean())
if __name__=='__main__':
# vis()
# check_pred_acc()
# the path to Atlas train
path = '/project/medimgfmod/CT/AbdomenAtlasMini1.0/'
ls = os.listdir(path)
import multiprocessing
with multiprocessing.Pool(20) as pool:
pool.map(exe, ls, 1)
================================================
FILE: Finetune/AbdomenAtlas/dataset/__init__.py
================================================
================================================
FILE: Finetune/AbdomenAtlas/dataset/dataloader_bdmap.py
================================================
from monai.transforms import *
import sys
import nibabel as nib
import os
import torch
import numpy as np
from typing import Optional, Union
import math
import pickle
from monai.data import *
from monai.data import DataLoader, Dataset, list_data_collate, DistributedSampler, CacheDataset, SmartCacheDataset
from monai.config import DtypeLike, KeysCollection
from monai.transforms.transform import MapTransform
from monai.transforms.io.array import LoadImage
from monai.utils import ensure_tuple, ensure_tuple_rep
from monai.data.image_reader import ImageReader
from monai.utils.enums import PostFix
from utils.data_trans import *
DEFAULT_POST_FIX = PostFix.meta()
# class map for the AbdomenAtlas 1.0 dataset
class_map_abdomenatlas_1_0 = {
0: "aorta",
1: "gall_bladder",
2: "kidney_left",
3: "kidney_right",
4: "liver",
5: "pancreas",
6: "postcava",
7: "spleen",
8: "stomach",
}
# class map for the AbdomenAtlas 1.1 dataset
class_map_abdomenatlas_1_1 = {
0: 'aorta',
1: 'gall_bladder',
2: 'kidney_left',
3: 'kidney_right',
4: 'liver',
5: 'pancreas',
6: 'postcava',
7: 'spleen',
8: 'stomach',
9: 'adrenal_gland_left',
10: 'adrenal_gland_right',
11: 'bladder',
12: 'celiac_truck',
13: 'colon',
14: 'duodenum',
15: 'esophagus',
16: 'femur_left',
17: 'femur_right',
18: 'hepatic_vessel',
19: 'intestine',
20: 'lung_left',
21: 'lung_right',
22: 'portal_vein_and_splenic_vein',
23: 'prostate',
24: 'rectum'
}
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank: self.total_size: self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank: self.total_size: self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
class LoadSelectedImaged(MapTransform):
"""
Custom transform to load a specific image and metadata using a flexible reader.
Args:
keys: Keys of the data dictionary to load selected images.
reader: Image reader object or string reference.
dtype: Data type for loaded images.
meta_keys: Keys to store metadata along with image data.
meta_key_postfix: Suffix for metadata keys.
overwriting: Flag to allow overwriting existing metadata.
image_only: Load only the image data (not metadata).
ensure_channel_first: Reshape image into channel-first format if necessary.
simple_keys: Use simplified, top-level data keys.
allow_missing_keys: If True, missing data keys are ignored
"""
def __init__(
self,
keys: KeysCollection,
reader: Optional[Union[ImageReader, str]] = None,
dtype: DtypeLike = np.float32,
meta_keys: Optional[KeysCollection] = None,
meta_key_postfix: str = DEFAULT_POST_FIX,
overwriting: bool = False,
image_only: bool = False,
ensure_channel_first: bool = False,
simple_keys: bool = False,
allow_missing_keys: bool = False,
*args,
**kwargs,
) -> None:
super().__init__(keys, allow_missing_keys)
self._loader = LoadImage(reader, image_only, dtype, ensure_channel_first, simple_keys, *args, **kwargs)
if not isinstance(meta_key_postfix, str):
raise TypeError(f"meta_key_postfix must be a str but is {type(meta_key_postfix).__name__}.")
self.meta_keys = ensure_tuple_rep(None, len(self.keys)) if meta_keys is None else ensure_tuple(meta_keys)
if len(self.keys) != len(self.meta_keys):
raise ValueError("meta_keys should have the same length as keys.")
self.meta_key_postfix = ensure_tuple_rep(meta_key_postfix, len(self.keys))
self.overwriting = overwriting
def register(self, reader: ImageReader):
self._loader.register(reader)
def __call__(self, data, reader: Optional[ImageReader] = None):
d = dict(data)
for key, meta_key, meta_key_postfix in self.key_iterator(d, self.meta_keys, self.meta_key_postfix):
data = self._loader(d[key], reader)
if self._loader.image_only:
d[key] = data
else:
if not isinstance(data, (tuple, list)):
raise ValueError("loader must return a tuple or list (because image_only=False was used).")
d[key] = data[0]
if not isinstance(data[1], dict):
raise ValueError("metadata must be a dict.")
meta_key = meta_key or f"{key}_{meta_key_postfix}"
if meta_key in d and not self.overwriting:
raise KeyError(f"Metadata with key {meta_key} already exists and overwriting=False.")
d[meta_key] = data[1]
return d
def get_loader_Atlas(args):
"""
Creates training transforms, constructs a dataset, and returns a dataloader.
Args:
args: Command line arguments containing dataset paths and hyperparameters.
"""
base_trans, random_trans = get_trans(args)
train_transforms = base_trans + random_trans
val_transforms = base_trans
# constructing training dataset
train_img = []
label_img = []
# train_lbl_parents = []
train_name = []
for item in args.dataset_list:
for line in open(os.path.join(args.data_txt_path, item + '.txt')):
name = line.strip().split('\t')[0]
train_img_path = os.path.join(args.data_dir, name, 'ct.nii.gz')
label_img_path = os.path.join(args.data_dir, name, 'label.nii.gz')
train_img.append(train_img_path)
label_img.append(label_img_path)
train_name.append(name)
data_dicts_train = [{'image': image, 'label': label, 'name': name}
for image, label, name in zip(train_img, label_img, train_name)]
print('train len {}'.format(len(data_dicts_train)))
if args.cache_dataset:
train_ds = PersistentDataset(data=data_dicts_train[:-50],
transform=train_transforms,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir=args.cache_dir)
val_ds = PersistentDataset(data=data_dicts_train[-50:],
transform=val_transforms,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir=args.cache_dir)
else:
train_ds = Dataset(data=data_dicts_train[:-50], transform=Compose(train_transforms))
val_ds = Dataset(data=data_dicts_train[-50:], transform=Compose(val_transforms))
# distributed sampler settings
train_sampler = Sampler(train_ds) if args.distributed else None
train_loader = DataLoader(train_ds, batch_size=args.batch_size, shuffle=(train_sampler is None),
num_workers=args.workers, pin_memory=True,
collate_fn=list_data_collate, sampler=train_sampler)
val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None
val_loader = DataLoader(
val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=True
)
loader = [train_loader, val_loader]
return loader
class Filter_Atlas_Labels(MapTransform):
"""Filter unsed label.
"""
def __call__(self, data):
d = dict(data)
for key in self.keys:
lab_bg = d[key].clone().sum(0).unsqueeze(0)
la = d[key].argmax(0).unsqueeze(0)
la += 1
la[lab_bg == 0] = 0
d[key] = la.float()
return d
================================================
FILE: Finetune/AbdomenAtlas/dataset/dataloader_test.py
================================================
from monai.transforms import *
import sys
import nibabel as nib
import os
import torch
import numpy as np
from typing import Optional, Union
import math
import pickle
from monai.data import *
from monai.data import DataLoader, Dataset, list_data_collate, DistributedSampler, CacheDataset, SmartCacheDataset
from monai.config import DtypeLike, KeysCollection
from monai.transforms.transform import MapTransform
from monai.transforms.io.array import LoadImage
from monai.utils import ensure_tuple, ensure_tuple_rep
from monai.data.image_reader import ImageReader
from monai.utils.enums import PostFix
from utils.data_trans import *
DEFAULT_POST_FIX = PostFix.meta()
# class map for the AbdomenAtlas 1.0 dataset
class_map_abdomenatlas_1_0 = {
0: "aorta",
1: "gall_bladder",
2: "kidney_left",
3: "kidney_right",
4: "liver",
5: "pancreas",
6: "postcava",
7: "spleen",
8: "stomach",
}
# class map for the AbdomenAtlas 1.1 dataset
class_map_abdomenatlas_1_1 = {
0: 'aorta',
1: 'gall_bladder',
2: 'kidney_left',
3: 'kidney_right',
4: 'liver',
5: 'pancreas',
6: 'postcava',
7: 'spleen',
8: 'stomach',
9: 'adrenal_gland_left',
10: 'adrenal_gland_right',
11: 'bladder',
12: 'celiac_truck',
13: 'colon',
14: 'duodenum',
15: 'esophagus',
16: 'femur_left',
17: 'femur_right',
18: 'hepatic_vessel',
19: 'intestine',
20: 'lung_left',
21: 'lung_right',
22: 'portal_vein_and_splenic_vein',
23: 'prostate',
24: 'rectum'
}
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank: self.total_size: self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank: self.total_size: self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
class LoadSelectedImaged(MapTransform):
"""
Custom transform to load a specific image and metadata using a flexible reader.
Args:
keys: Keys of the data dictionary to load selected images.
reader: Image reader object or string reference.
dtype: Data type for loaded images.
meta_keys: Keys to store metadata along with image data.
meta_key_postfix: Suffix for metadata keys.
overwriting: Flag to allow overwriting existing metadata.
image_only: Load only the image data (not metadata).
ensure_channel_first: Reshape image into channel-first format if necessary.
simple_keys: Use simplified, top-level data keys.
allow_missing_keys: If True, missing data keys are ignored
"""
def __init__(
self,
keys: KeysCollection,
reader: Optional[Union[ImageReader, str]] = None,
dtype: DtypeLike = np.float32,
meta_keys: Optional[KeysCollection] = None,
meta_key_postfix: str = DEFAULT_POST_FIX,
overwriting: bool = False,
image_only: bool = False,
ensure_channel_first: bool = False,
simple_keys: bool = False,
allow_missing_keys: bool = False,
*args,
**kwargs,
) -> None:
super().__init__(keys, allow_missing_keys)
self._loader = LoadImage(reader, image_only, dtype, ensure_channel_first, simple_keys, *args, **kwargs)
if not isinstance(meta_key_postfix, str):
raise TypeError(f"meta_key_postfix must be a str but is {type(meta_key_postfix).__name__}.")
self.meta_keys = ensure_tuple_rep(None, len(self.keys)) if meta_keys is None else ensure_tuple(meta_keys)
if len(self.keys) != len(self.meta_keys):
raise ValueError("meta_keys should have the same length as keys.")
self.meta_key_postfix = ensure_tuple_rep(meta_key_postfix, len(self.keys))
self.overwriting = overwriting
def register(self, reader: ImageReader):
self._loader.register(reader)
def __call__(self, data, reader: Optional[ImageReader] = None):
d = dict(data)
for key, meta_key, meta_key_postfix in self.key_iterator(d, self.meta_keys, self.meta_key_postfix):
data = self._loader(d[key], reader)
if self._loader.image_only:
d[key] = data
else:
if not isinstance(data, (tuple, list)):
raise ValueError("loader must return a tuple or list (because image_only=False was used).")
d[key] = data[0]
if not isinstance(data[1], dict):
raise ValueError("metadata must be a dict.")
meta_key = meta_key or f"{key}_{meta_key_postfix}"
if meta_key in d and not self.overwriting:
raise KeyError(f"Metadata with key {meta_key} already exists and overwriting=False.")
d[meta_key] = data[1]
return d
def get_test_loader_Atlas(args):
"""
Creates training transforms, constructs a dataset, and returns a dataloader.
Args:
args: Command line arguments containing dataset paths and hyperparameters.
"""
test_transforms = transforms.Compose([
LoadImaged(keys=["image"]),
EnsureChannelFirstd(keys=["image"]),
Orientationd(keys=["image"], axcodes="RAS"),
Spacingd(keys=["image"], pixdim=(args.space_x, args.space_y, args.space_z),
mode=("bilinear")),
ScaleIntensityRanged(
keys=["image"],
a_min=args.a_min,
a_max=args.a_max,
b_min=0.0,
b_max=1.0,
clip=True,
),
CropForegroundd(keys=["image"], source_key="image"),
SpatialPadd(keys=["image"], spatial_size=(args.roi_x, args.roi_y, args.roi_z),
mode='constant'),
])
# constructing training dataset
test_img = []
test_name = []
dataset_list = os.listdir(args.test_data_path)
for item in dataset_list:
name = item
test_img_path = os.path.join(args.test_data_path, name, 'ct.nii.gz')
test_img.append(test_img_path)
test_name.append(name)
data_dicts_test = [{'image': image, 'name': name}
for image, name in zip(test_img, test_name)]
print('test len {}'.format(len(data_dicts_test)))
test_ds = Dataset(data=data_dicts_test, transform=test_transforms)
test_loader = DataLoader(
test_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=None, pin_memory=True
)
return test_loader, test_transforms
================================================
FILE: Finetune/AbdomenAtlas/dataset/dataset_list/AbdomenAtlas1.0.txt
================================================
BDMAP_00000001
BDMAP_00000002
BDMAP_00000003
BDMAP_00000004
BDMAP_00000005
BDMAP_00000006
BDMAP_00000007
BDMAP_00000008
BDMAP_00000009
BDMAP_00000010
BDMAP_00000011
BDMAP_00000012
BDMAP_00000013
BDMAP_00000014
BDMAP_00000015
BDMAP_00000016
BDMAP_00000017
BDMAP_00000018
BDMAP_00000019
BDMAP_00000020
BDMAP_00000021
BDMAP_00000022
BDMAP_00000023
BDMAP_00000024
BDMAP_00000025
BDMAP_00000026
BDMAP_00000027
BDMAP_00000028
BDMAP_00000029
BDMAP_00000030
BDMAP_00000031
BDMAP_00000032
BDMAP_00000033
BDMAP_00000034
BDMAP_00000035
BDMAP_00000036
BDMAP_00000037
BDMAP_00000038
BDMAP_00000039
BDMAP_00000040
BDMAP_00000041
BDMAP_00000042
BDMAP_00000043
BDMAP_00000044
BDMAP_00000045
BDMAP_00000046
BDMAP_00000047
BDMAP_00000048
BDMAP_00000049
BDMAP_00000050
BDMAP_00000051
BDMAP_00000052
BDMAP_00000053
BDMAP_00000054
BDMAP_00000055
BDMAP_00000056
BDMAP_00000057
BDMAP_00000058
BDMAP_00000059
BDMAP_00000060
BDMAP_00000061
BDMAP_00000062
BDMAP_00000063
BDMAP_00000064
BDMAP_00000065
BDMAP_00000066
BDMAP_00000067
BDMAP_00000068
BDMAP_00000069
BDMAP_00000070
BDMAP_00000071
BDMAP_00000072
BDMAP_00000073
BDMAP_00000074
BDMAP_00000075
BDMAP_00000076
BDMAP_00000077
BDMAP_00000078
BDMAP_00000079
BDMAP_00000080
BDMAP_00000081
BDMAP_00000082
BDMAP_00000083
BDMAP_00000084
BDMAP_00000085
BDMAP_00000086
BDMAP_00000087
BDMAP_00000088
BDMAP_00000089
BDMAP_00000090
BDMAP_00000091
BDMAP_00000092
BDMAP_00000093
BDMAP_00000094
BDMAP_00000095
BDMAP_00000096
BDMAP_00000097
BDMAP_00000098
BDMAP_00000099
BDMAP_00000100
BDMAP_00000101
BDMAP_00000102
BDMAP_00000103
BDMAP_00000104
BDMAP_00000105
BDMAP_00000106
BDMAP_00000107
BDMAP_00000108
BDMAP_00000109
BDMAP_00000110
BDMAP_00000111
BDMAP_00000112
BDMAP_00000113
BDMAP_00000114
BDMAP_00000115
BDMAP_00000116
BDMAP_00000117
BDMAP_00000118
BDMAP_00000119
BDMAP_00000120
BDMAP_00000121
BDMAP_00000122
BDMAP_00000123
BDMAP_00000124
BDMAP_00000125
BDMAP_00000126
BDMAP_00000127
BDMAP_00000128
BDMAP_00000129
BDMAP_00000130
BDMAP_00000131
BDMAP_00000132
BDMAP_00000133
BDMAP_00000134
BDMAP_00000135
BDMAP_00000136
BDMAP_00000137
BDMAP_00000138
BDMAP_00000139
BDMAP_00000140
BDMAP_00000141
BDMAP_00000142
BDMAP_00000143
BDMAP_00000144
BDMAP_00000145
BDMAP_00000146
BDMAP_00000147
BDMAP_00000148
BDMAP_00000149
BDMAP_00000150
BDMAP_00000151
BDMAP_00000152
BDMAP_00000153
BDMAP_00000154
BDMAP_00000155
BDMAP_00000156
BDMAP_00000157
BDMAP_00000158
BDMAP_00000159
BDMAP_00000160
BDMAP_00000161
BDMAP_00000162
BDMAP_00000163
BDMAP_00000164
BDMAP_00000165
BDMAP_00000166
BDMAP_00000167
BDMAP_00000168
BDMAP_00000169
BDMAP_00000170
BDMAP_00000171
BDMAP_00000172
BDMAP_00000173
BDMAP_00000174
BDMAP_00000175
BDMAP_00000176
BDMAP_00000177
BDMAP_00000178
BDMAP_00000179
BDMAP_00000180
BDMAP_00000181
BDMAP_00000182
BDMAP_00000183
BDMAP_00000184
BDMAP_00000185
BDMAP_00000186
BDMAP_00000187
BDMAP_00000188
BDMAP_00000189
BDMAP_00000190
BDMAP_00000191
BDMAP_00000192
BDMAP_00000193
BDMAP_00000194
BDMAP_00000195
BDMAP_00000196
BDMAP_00000197
BDMAP_00000198
BDMAP_00000199
BDMAP_00000200
BDMAP_00000201
BDMAP_00000202
BDMAP_00000203
BDMAP_00000204
BDMAP_00000205
BDMAP_00000206
BDMAP_00000207
BDMAP_00000208
BDMAP_00000209
BDMAP_00000210
BDMAP_00000211
BDMAP_00000212
BDMAP_00000213
BDMAP_00000214
BDMAP_00000215
BDMAP_00000216
BDMAP_00000217
BDMAP_00000218
BDMAP_00000219
BDMAP_00000220
BDMAP_00000221
BDMAP_00000222
BDMAP_00000223
BDMAP_00000224
BDMAP_00000225
BDMAP_00000226
BDMAP_00000227
BDMAP_00000228
BDMAP_00000229
BDMAP_00000230
BDMAP_00000231
BDMAP_00000232
BDMAP_00000233
BDMAP_00000234
BDMAP_00000235
BDMAP_00000236
BDMAP_00000237
BDMAP_00000238
BDMAP_00000239
BDMAP_00000240
BDMAP_00000241
BDMAP_00000242
BDMAP_00000243
BDMAP_00000244
BDMAP_00000245
BDMAP_00000246
BDMAP_00000247
BDMAP_00000248
BDMAP_00000249
BDMAP_00000250
BDMAP_00000251
BDMAP_00000252
BDMAP_00000253
BDMAP_00000254
BDMAP_00000255
BDMAP_00000256
BDMAP_00000257
BDMAP_00000258
BDMAP_00000259
BDMAP_00000260
BDMAP_00000261
BDMAP_00000262
BDMAP_00000263
BDMAP_00000264
BDMAP_00000265
BDMAP_00000266
BDMAP_00000267
BDMAP_00000268
BDMAP_00000269
BDMAP_00000270
BDMAP_00000271
BDMAP_00000272
BDMAP_00000273
BDMAP_00000274
BDMAP_00000275
BDMAP_00000276
BDMAP_00000277
BDMAP_00000278
BDMAP_00000279
BDMAP_00000280
BDMAP_00000281
BDMAP_00000282
BDMAP_00000283
BDMAP_00000284
BDMAP_00000285
BDMAP_00000286
BDMAP_00000287
BDMAP_00000288
BDMAP_00000289
BDMAP_00000290
BDMAP_00000291
BDMAP_00000292
BDMAP_00000293
BDMAP_00000294
BDMAP_00000295
BDMAP_00000296
BDMAP_00000297
BDMAP_00000298
BDMAP_00000299
BDMAP_00000300
BDMAP_00000301
BDMAP_00000302
BDMAP_00000303
BDMAP_00000304
BDMAP_00000305
BDMAP_00000306
BDMAP_00000307
BDMAP_00000308
BDMAP_00000309
BDMAP_00000310
BDMAP_00000311
BDMAP_00000312
BDMAP_00000313
BDMAP_00000314
BDMAP_00000315
BDMAP_00000316
BDMAP_00000317
BDMAP_00000318
BDMAP_00000319
BDMAP_00000320
BDMAP_00000321
BDMAP_00000322
BDMAP_00000323
BDMAP_00000324
BDMAP_00000325
BDMAP_00000326
BDMAP_00000327
BDMAP_00000328
BDMAP_00000329
BDMAP_00000330
BDMAP_00000331
BDMAP_00000332
BDMAP_00000333
BDMAP_00000334
BDMAP_00000335
BDMAP_00000336
BDMAP_00000337
BDMAP_00000338
BDMAP_00000339
BDMAP_00000340
BDMAP_00000341
BDMAP_00000342
BDMAP_00000343
BDMAP_00000344
BDMAP_00000345
BDMAP_00000346
BDMAP_00000347
BDMAP_00000348
BDMAP_00000349
BDMAP_00000350
BDMAP_00000351
BDMAP_00000352
BDMAP_00000353
BDMAP_00000354
BDMAP_00000355
BDMAP_00000356
BDMAP_00000357
BDMAP_00000358
BDMAP_00000359
BDMAP_00000360
BDMAP_00000361
BDMAP_00000362
BDMAP_00000363
BDMAP_00000364
BDMAP_00000365
BDMAP_00000366
BDMAP_00000367
BDMAP_00000368
BDMAP_00000369
BDMAP_00000370
BDMAP_00000371
BDMAP_00000372
BDMAP_00000373
BDMAP_00000374
BDMAP_00000375
BDMAP_00000376
BDMAP_00000377
BDMAP_00000378
BDMAP_00000379
BDMAP_00000380
BDMAP_00000381
BDMAP_00000382
BDMAP_00000383
BDMAP_00000384
BDMAP_00000385
BDMAP_00000386
BDMAP_00000387
BDMAP_00000388
BDMAP_00000389
BDMAP_00000390
BDMAP_00000391
BDMAP_00000392
BDMAP_00000393
BDMAP_00000394
BDMAP_00000395
BDMAP_00000396
BDMAP_00000397
BDMAP_00000398
BDMAP_00000399
BDMAP_00000400
BDMAP_00000401
BDMAP_00000402
BDMAP_00000403
BDMAP_00000404
BDMAP_00000405
BDMAP_00000406
BDMAP_00000407
BDMAP_00000408
BDMAP_00000409
BDMAP_00000410
BDMAP_00000411
BDMAP_00000412
BDMAP_00000413
BDMAP_00000414
BDMAP_00000415
BDMAP_00000416
BDMAP_00000417
BDMAP_00000418
BDMAP_00000419
BDMAP_00000420
BDMAP_00000421
BDMAP_00000422
BDMAP_00000423
BDMAP_00000424
BDMAP_00000425
BDMAP_00000426
BDMAP_00000427
BDMAP_00000428
BDMAP_00000429
BDMAP_00000430
BDMAP_00000431
BDMAP_00000432
BDMAP_00000433
BDMAP_00000434
BDMAP_00000435
BDMAP_00000436
BDMAP_00000437
BDMAP_00000438
BDMAP_00000439
BDMAP_00000440
BDMAP_00000441
BDMAP_00000442
BDMAP_00000443
BDMAP_00000444
BDMAP_00000445
BDMAP_00000446
BDMAP_00000447
BDMAP_00000448
BDMAP_00000449
BDMAP_00000450
BDMAP_00000451
BDMAP_00000452
BDMAP_00000453
BDMAP_00000454
BDMAP_00000455
BDMAP_00000456
BDMAP_00000457
BDMAP_00000458
BDMAP_00000459
BDMAP_00000460
BDMAP_00000461
BDMAP_00000462
BDMAP_00000463
BDMAP_00000464
BDMAP_00000465
BDMAP_00000466
BDMAP_00000467
BDMAP_00000468
BDMAP_00000469
BDMAP_00000470
BDMAP_00000471
BDMAP_00000472
BDMAP_00000473
BDMAP_00000474
BDMAP_00000475
BDMAP_00000476
BDMAP_00000477
BDMAP_00000478
BDMAP_00000479
BDMAP_00000480
BDMAP_00000481
BDMAP_00000482
BDMAP_00000483
BDMAP_00000484
BDMAP_00000485
BDMAP_00000486
BDMAP_00000487
BDMAP_00000488
BDMAP_00000489
BDMAP_00000490
BDMAP_00000491
BDMAP_00000492
BDMAP_00000493
BDMAP_00000494
BDMAP_00000495
BDMAP_00000496
BDMAP_00000497
BDMAP_00000498
BDMAP_00000499
BDMAP_00000500
BDMAP_00000501
BDMAP_00000502
BDMAP_00000503
BDMAP_00000504
BDMAP_00000505
BDMAP_00000506
BDMAP_00000507
BDMAP_00000508
BDMAP_00000509
BDMAP_00000510
BDMAP_00000511
BDMAP_00000512
BDMAP_00000513
BDMAP_00000514
BDMAP_00000515
BDMAP_00000516
BDMAP_00000517
BDMAP_00000518
BDMAP_00000519
BDMAP_00000520
BDMAP_00000521
BDMAP_00000522
BDMAP_00000523
BDMAP_00000524
BDMAP_00000525
BDMAP_00000526
BDMAP_00000527
BDMAP_00000528
BDMAP_00000529
BDMAP_00000530
BDMAP_00000531
BDMAP_00000532
BDMAP_00000533
BDMAP_00000534
BDMAP_00000535
BDMAP_00000536
BDMAP_00000537
BDMAP_00000538
BDMAP_00000539
BDMAP_00000540
BDMAP_00000541
BDMAP_00000542
BDMAP_00000543
BDMAP_00000544
BDMAP_00000545
BDMAP_00000546
BDMAP_00000547
BDMAP_00000548
BDMAP_00000549
BDMAP_00000550
BDMAP_00000551
BDMAP_00000552
BDMAP_00000553
BDMAP_00000554
BDMAP_00000555
BDMAP_00000556
BDMAP_00000557
BDMAP_00000558
BDMAP_00000559
BDMAP_00000560
BDMAP_00000561
BDMAP_00000562
BDMAP_00000563
BDMAP_00000564
BDMAP_00000565
BDMAP_00000566
BDMAP_00000567
BDMAP_00000568
BDMAP_00000569
BDMAP_00000570
BDMAP_00000571
BDMAP_00000572
BDMAP_00000573
BDMAP_00000574
BDMAP_00000575
BDMAP_00000576
BDMAP_00000577
BDMAP_00000578
BDMAP_00000579
BDMAP_00000580
BDMAP_00000581
BDMAP_00000582
BDMAP_00000583
BDMAP_00000584
BDMAP_00000585
BDMAP_00000586
BDMAP_00000587
BDMAP_00000588
BDMAP_00000589
BDMAP_00000590
BDMAP_00000591
BDMAP_00000592
BDMAP_00000593
BDMAP_00000594
BDMAP_00000595
BDMAP_00000596
BDMAP_00000597
BDMAP_00000598
BDMAP_00000599
BDMAP_00000600
BDMAP_00000601
BDMAP_00000602
BDMAP_00000603
BDMAP_00000604
BDMAP_00000605
BDMAP_00000606
BDMAP_00000607
BDMAP_00000608
BDMAP_00000609
BDMAP_00000610
BDMAP_00000611
BDMAP_00000612
BDMAP_00000613
BDMAP_00000614
BDMAP_00000615
BDMAP_00000616
BDMAP_00000617
BDMAP_00000618
BDMAP_00000619
BDMAP_00000620
BDMAP_00000621
BDMAP_00000622
BDMAP_00000623
BDMAP_00000624
BDMAP_00000625
BDMAP_00000626
BDMAP_00000627
BDMAP_00000628
BDMAP_00000629
BDMAP_00000630
BDMAP_00000631
BDMAP_00000632
BDMAP_00000633
BDMAP_00000634
BDMAP_00000635
BDMAP_00000636
BDMAP_00000637
BDMAP_00000638
BDMAP_00000639
BDMAP_00000640
BDMAP_00000641
BDMAP_00000642
BDMAP_00000643
BDMAP_00000644
BDMAP_00000645
BDMAP_00000646
BDMAP_00000647
BDMAP_00000648
BDMAP_00000649
BDMAP_00000650
BDMAP_00000651
BDMAP_00000652
BDMAP_00000653
BDMAP_00000654
BDMAP_00000655
BDMAP_00000656
BDMAP_00000657
BDMAP_00000658
BDMAP_00000659
BDMAP_00000660
BDMAP_00000661
BDMAP_00000662
BDMAP_00000663
BDMAP_00000664
BDMAP_00000665
BDMAP_00000666
BDMAP_00000667
BDMAP_00000668
BDMAP_00000669
BDMAP_00000670
BDMAP_00000671
BDMAP_00000672
BDMAP_00000673
BDMAP_00000674
BDMAP_00000675
BDMAP_00000676
BDMAP_00000677
BDMAP_00000678
BDMAP_00000679
BDMAP_00000680
BDMAP_00000681
BDMAP_00000682
BDMAP_00000683
BDMAP_00000684
BDMAP_00000685
BDMAP_00000686
BDMAP_00000687
BDMAP_00000688
BDMAP_00000689
BDMAP_00000690
BDMAP_00000691
BDMAP_00000692
BDMAP_00000693
BDMAP_00000694
BDMAP_00000695
BDMAP_00000696
BDMAP_00000697
BDMAP_00000698
BDMAP_00000699
BDMAP_00000700
BDMAP_00000701
BDMAP_00000702
BDMAP_00000703
BDMAP_00000704
BDMAP_00000705
BDMAP_00000706
BDMAP_00000707
BDMAP_00000708
BDMAP_00000709
BDMAP_00000710
BDMAP_00000711
BDMAP_00000712
BDMAP_00000713
BDMAP_00000714
BDMAP_00000715
BDMAP_00000716
BDMAP_00000717
BDMAP_00000718
BDMAP_00000719
BDMAP_00000720
BDMAP_00000721
BDMAP_00000722
BDMAP_00000723
BDMAP_00000724
BDMAP_00000725
BDMAP_00000726
BDMAP_00000727
BDMAP_00000728
BDMAP_00000729
BDMAP_00000730
BDMAP_00000731
BDMAP_00000732
BDMAP_00000733
BDMAP_00000734
BDMAP_00000735
BDMAP_00000736
BDMAP_00000737
BDMAP_00000738
BDMAP_00000739
BDMAP_00000740
BDMAP_00000741
BDMAP_00000742
BDMAP_00000743
BDMAP_00000744
BDMAP_00000745
BDMAP_00000746
BDMAP_00000747
BDMAP_00000748
BDMAP_00000749
BDMAP_00000750
BDMAP_00000751
BDMAP_00000752
BDMAP_00000753
BDMAP_00000754
BDMAP_00000755
BDMAP_00000756
BDMAP_00000757
BDMAP_00000758
BDMAP_00000759
BDMAP_00000760
BDMAP_00000761
BDMAP_00000762
BDMAP_00000763
BDMAP_00000764
BDMAP_00000765
BDMAP_00000766
BDMAP_00000767
BDMAP_00000768
BDMAP_00000769
BDMAP_00000770
BDMAP_00000771
BDMAP_00000772
BDMAP_00000773
BDMAP_00000774
BDMAP_00000775
BDMAP_00000776
BDMAP_00000777
BDMAP_00000778
BDMAP_00000779
BDMAP_00000780
BDMAP_00000781
BDMAP_00000782
BDMAP_00000783
BDMAP_00000784
BDMAP_00000785
BDMAP_00000786
BDMAP_00000787
BDMAP_00000788
BDMAP_00000789
BDMAP_00000790
BDMAP_00000791
BDMAP_00000792
BDMAP_00000793
BDMAP_00000794
BDMAP_00000795
BDMAP_00000796
BDMAP_00000797
BDMAP_00000798
BDMAP_00000799
BDMAP_00000800
BDMAP_00000801
BDMAP_00000802
BDMAP_00000803
BDMAP_00000804
BDMAP_00000805
BDMAP_00000806
BDMAP_00000807
BDMAP_00000808
BDMAP_00000809
BDMAP_00000810
BDMAP_00000811
BDMAP_00000812
BDMAP_00000813
BDMAP_00000814
BDMAP_00000815
BDMAP_00000816
BDMAP_00000817
BDMAP_00000818
BDMAP_00000819
BDMAP_00000820
BDMAP_00000821
BDMAP_00000822
BDMAP_00000823
BDMAP_00000824
BDMAP_00000825
BDMAP_00000826
BDMAP_00000827
BDMAP_00000828
BDMAP_00000829
BDMAP_00000830
BDMAP_00000831
BDMAP_00000832
BDMAP_00000833
BDMAP_00000834
BDMAP_00000835
BDMAP_00000836
BDMAP_00000837
BDMAP_00000838
BDMAP_00000839
BDMAP_00000840
BDMAP_00000841
BDMAP_00000842
BDMAP_00000843
BDMAP_00000844
BDMAP_00000845
BDMAP_00000846
BDMAP_00000847
BDMAP_00000848
BDMAP_00000849
BDMAP_00000850
BDMAP_00000851
BDMAP_00000852
BDMAP_00000853
BDMAP_00000854
BDMAP_00000855
BDMAP_00000856
BDMAP_00000857
BDMAP_00000858
BDMAP_00000859
BDMAP_00000860
BDMAP_00000861
BDMAP_00000862
BDMAP_00000863
BDMAP_00000864
BDMAP_00000865
BDMAP_00000866
BDMAP_00000867
BDMAP_00000868
BDMAP_00000869
BDMAP_00000870
BDMAP_00000871
BDMAP_00000872
BDMAP_00000873
BDMAP_00000874
BDMAP_00000875
BDMAP_00000876
BDMAP_00000877
BDMAP_00000878
BDMAP_00000879
BDMAP_00000880
BDMAP_00000881
BDMAP_00000882
BDMAP_00000883
BDMAP_00000884
BDMAP_00000885
BDMAP_00000886
BDMAP_00000887
BDMAP_00000888
BDMAP_00000889
BDMAP_00000890
BDMAP_00000891
BDMAP_00000892
BDMAP_00000893
BDMAP_00000894
BDMAP_00000895
BDMAP_00000896
BDMAP_00000897
BDMAP_00000898
BDMAP_00000899
BDMAP_00000900
BDMAP_00000901
BDMAP_00000902
BDMAP_00000903
BDMAP_00000904
BDMAP_00000905
BDMAP_00000906
BDMAP_00000907
BDMAP_00000908
BDMAP_00000909
BDMAP_00000910
BDMAP_00000911
BDMAP_00000912
BDMAP_00000913
BDMAP_00000914
BDMAP_00000915
BDMAP_00000916
BDMAP_00000917
BDMAP_00000918
BDMAP_00000919
BDMAP_00000920
BDMAP_00000921
BDMAP_00000922
BDMAP_00000923
BDMAP_00000924
BDMAP_00000925
BDMAP_00000926
BDMAP_00000927
BDMAP_00000928
BDMAP_00000929
BDMAP_00000930
BDMAP_00000931
BDMAP_00000932
BDMAP_00000933
BDMAP_00000934
BDMAP_00000935
BDMAP_00000936
BDMAP_00000937
BDMAP_00000938
BDMAP_00000939
BDMAP_00000940
BDMAP_00000941
BDMAP_00000942
BDMAP_00000943
BDMAP_00000944
BDMAP_00000945
BDMAP_00000946
BDMAP_00000947
BDMAP_00000948
BDMAP_00000949
BDMAP_00000950
BDMAP_00000951
BDMAP_00000952
BDMAP_00000953
BDMAP_00000954
BDMAP_00000955
BDMAP_00000956
BDMAP_00000957
BDMAP_00000958
BDMAP_00000959
BDMAP_00000960
BDMAP_00000961
BDMAP_00000962
BDMAP_00000963
BDMAP_00000964
BDMAP_00000965
BDMAP_00000966
BDMAP_00000967
BDMAP_00000968
BDMAP_00000969
BDMAP_00000970
BDMAP_00000971
BDMAP_00000972
BDMAP_00000973
BDMAP_00000974
BDMAP_00000975
BDMAP_00000976
BDMAP_00000977
BDMAP_00000978
BDMAP_00000979
BDMAP_00000980
BDMAP_00000981
BDMAP_00000982
BDMAP_00000983
BDMAP_00000984
BDMAP_00000985
BDMAP_00000986
BDMAP_00000987
BDMAP_00000988
BDMAP_00000989
BDMAP_00000990
BDMAP_00000991
BDMAP_00000992
BDMAP_00000993
BDMAP_00000994
BDMAP_00000995
BDMAP_00000996
BDMAP_00000997
BDMAP_00000998
BDMAP_00000999
BDMAP_00001000
BDMAP_00001001
BDMAP_00001002
BDMAP_00001003
BDMAP_00001004
BDMAP_00001005
BDMAP_00001006
BDMAP_00001007
BDMAP_00001008
BDMAP_00001009
BDMAP_00001010
BDMAP_00001011
BDMAP_00001012
BDMAP_00001013
BDMAP_00001014
BDMAP_00001015
BDMAP_00001016
BDMAP_00001017
BDMAP_00001018
BDMAP_00001019
BDMAP_00001020
BDMAP_00001021
BDMAP_00001022
BDMAP_00001023
BDMAP_00001024
BDMAP_00001025
BDMAP_00001026
BDMAP_00001027
BDMAP_00001028
BDMAP_00001029
BDMAP_00001030
BDMAP_00001031
BDMAP_00001032
BDMAP_00001033
BDMAP_00001034
BDMAP_00001035
BDMAP_00001036
BDMAP_00001037
BDMAP_00001038
BDMAP_00001039
BDMAP_00001040
BDMAP_00001041
BDMAP_00001042
BDMAP_00001043
BDMAP_00001044
BDMAP_00001045
BDMAP_00001046
BDMAP_00001047
BDMAP_00001048
BDMAP_00001049
BDMAP_00001050
BDMAP_00001051
BDMAP_00001052
BDMAP_00001053
BDMAP_00001054
BDMAP_00001055
BDMAP_00001056
BDMAP_00001057
BDMAP_00001058
BDMAP_00001059
BDMAP_00001060
BDMAP_00001061
BDMAP_00001062
BDMAP_00001063
BDMAP_00001064
BDMAP_00001065
BDMAP_00001066
BDMAP_00001067
BDMAP_00001068
BDMAP_00001069
BDMAP_00001070
BDMAP_00001071
BDMAP_00001072
BDMAP_00001073
BDMAP_00001074
BDMAP_00001075
BDMAP_00001076
BDMAP_00001077
BDMAP_00001078
BDMAP_00001079
BDMAP_00001080
BDMAP_00001081
BDMAP_00001082
BDMAP_00001083
BDMAP_00001084
BDMAP_00001085
BDMAP_00001086
BDMAP_00001087
BDMAP_00001088
BDMAP_00001089
BDMAP_00001090
BDMAP_00001091
BDMAP_00001092
BDMAP_00001093
BDMAP_00001094
BDMAP_00001095
BDMAP_00001096
BDMAP_00001097
BDMAP_00001098
BDMAP_00001099
BDMAP_00001100
BDMAP_00001101
BDMAP_00001102
BDMAP_00001103
BDMAP_00001104
BDMAP_00001105
BDMAP_00001106
BDMAP_00001107
BDMAP_00001108
BDMAP_00001109
BDMAP_00001110
BDMAP_00001111
BDMAP_00001112
BDMAP_00001113
BDMAP_00001114
BDMAP_00001115
BDMAP_00001116
BDMAP_00001117
BDMAP_00001118
BDMAP_00001119
BDMAP_00001120
BDMAP_00001121
BDMAP_00001122
BDMAP_00001123
BDMAP_00001124
BDMAP_00001125
BDMAP_00001126
BDMAP_00001127
BDMAP_00001128
BDMAP_00001129
BDMAP_00001130
BDMAP_00001131
BDMAP_00001132
BDMAP_00001133
BDMAP_00001134
BDMAP_00001135
BDMAP_00001136
BDMAP_00001137
BDMAP_00001138
BDMAP_00001139
BDMAP_00001140
BDMAP_00001141
BDMAP_00001142
BDMAP_00001143
BDMAP_00001144
BDMAP_00001145
BDMAP_00001146
BDMAP_00001147
BDMAP_00001148
BDMAP_00001149
BDMAP_00001150
BDMAP_00001151
BDMAP_00001152
BDMAP_00001153
BDMAP_00001154
BDMAP_00001155
BDMAP_00001156
BDMAP_00001157
BDMAP_00001158
BDMAP_00001159
BDMAP_00001160
BDMAP_00001161
BDMAP_00001162
BDMAP_00001163
BDMAP_00001164
BDMAP_00001165
BDMAP_00001166
BDMAP_00001167
BDMAP_00001168
BDMAP_00001169
BDMAP_00001170
BDMAP_00001171
BDMAP_00001172
BDMAP_00001173
BDMAP_00001174
BDMAP_00001175
BDMAP_00001176
BDMAP_00001177
BDMAP_00001178
BDMAP_00001179
BDMAP_00001180
BDMAP_00001181
BDMAP_00001182
BDMAP_00001183
BDMAP_00001184
BDMAP_00001185
BDMAP_00001186
BDMAP_00001187
BDMAP_00001188
BDMAP_00001189
BDMAP_00001190
BDMAP_00001191
BDMAP_00001192
BDMAP_00001193
BDMAP_00001194
BDMAP_00001195
BDMAP_00001196
BDMAP_00001197
BDMAP_00001198
BDMAP_00001199
BDMAP_00001200
BDMAP_00001201
BDMAP_00001202
BDMAP_00001203
BDMAP_00001204
BDMAP_00001205
BDMAP_00001206
BDMAP_00001207
BDMAP_00001208
BDMAP_00001209
BDMAP_00001210
BDMAP_00001211
BDMAP_00001212
BDMAP_00001213
BDMAP_00001214
BDMAP_00001215
BDMAP_00001216
BDMAP_00001217
BDMAP_00001218
BDMAP_00001219
BDMAP_00001220
BDMAP_00001221
BDMAP_00001222
BDMAP_00001223
BDMAP_00001224
BDMAP_00001225
BDMAP_00001226
BDMAP_00001227
BDMAP_00001228
BDMAP_00001229
BDMAP_00001230
BDMAP_00001231
BDMAP_00001232
BDMAP_00001233
BDMAP_00001234
BDMAP_00001235
BDMAP_00001236
BDMAP_00001237
BDMAP_00001238
BDMAP_00001239
BDMAP_00001240
BDMAP_00001241
BDMAP_00001242
BDMAP_00001243
BDMAP_00001244
BDMAP_00001245
BDMAP_00001246
BDMAP_00001247
BDMAP_00001248
BDMAP_00001249
BDMAP_00001250
BDMAP_00001251
BDMAP_00001252
BDMAP_00001253
BDMAP_00001254
BDMAP_00001255
BDMAP_00001256
BDMAP_00001257
BDMAP_00001258
BDMAP_00001259
BDMAP_00001260
BDMAP_00001261
BDMAP_00001262
BDMAP_00001263
BDMAP_00001264
BDMAP_00001265
BDMAP_00001266
BDMAP_00001267
BDMAP_00001268
BDMAP_00001269
BDMAP_00001270
BDMAP_00001271
BDMAP_00001272
BDMAP_00001273
BDMAP_00001274
BDMAP_00001275
BDMAP_00001276
BDMAP_00001277
BDMAP_00001278
BDMAP_00001279
BDMAP_00001280
BDMAP_00001281
BDMAP_00001282
BDMAP_00001283
BDMAP_00001284
BDMAP_00001285
BDMAP_00001286
BDMAP_00001287
BDMAP_00001288
BDMAP_00001289
BDMAP_00001290
BDMAP_00001291
BDMAP_00001292
BDMAP_00001293
BDMAP_00001294
BDMAP_00001295
BDMAP_00001296
BDMAP_00001297
BDMAP_00001298
BDMAP_00001299
BDMAP_00001300
BDMAP_00001301
BDMAP_00001302
BDMAP_00001303
BDMAP_00001304
BDMAP_00001305
BDMAP_00001306
BDMAP_00001307
BDMAP_00001308
BDMAP_00001309
BDMAP_00001310
BDMAP_00001311
BDMAP_00001312
BDMAP_00001313
BDMAP_00001314
BDMAP_00001315
BDMAP_00001316
BDMAP_00001317
BDMAP_00001318
BDMAP_00001319
BDMAP_00001320
BDMAP_00001321
BDMAP_00001322
BDMAP_00001323
BDMAP_00001324
BDMAP_00001325
BDMAP_00001326
BDMAP_00001327
BDMAP_00001328
BDMAP_00001329
BDMAP_00001330
BDMAP_00001331
BDMAP_00001332
BDMAP_00001333
BDMAP_00001334
BDMAP_00001335
BDMAP_00001336
BDMAP_00001337
BDMAP_00001338
BDMAP_00001339
BDMAP_00001340
BDMAP_00001341
BDMAP_00001342
BDMAP_00001343
BDMAP_00001344
BDMAP_00001345
BDMAP_00001346
BDMAP_00001347
BDMAP_00001348
BDMAP_00001349
BDMAP_00001350
BDMAP_00001351
BDMAP_00001352
BDMAP_00001353
BDMAP_00001354
BDMAP_00001355
BDMAP_00001356
BDMAP_00001357
BDMAP_00001358
BDMAP_00001359
BDMAP_00001360
BDMAP_00001361
BDMAP_00001362
BDMAP_00001363
BDMAP_00001364
BDMAP_00001365
BDMAP_00001366
BDMAP_00001367
BDMAP_00001368
BDMAP_00001369
BDMAP_00001370
BDMAP_00001371
BDMAP_00001372
BDMAP_00001373
BDMAP_00001374
BDMAP_00001375
BDMAP_00001376
BDMAP_00001377
BDMAP_00001378
BDMAP_00001379
BDMAP_00001380
BDMAP_00001381
BDMAP_00001382
BDMAP_00001383
BDMAP_00001384
BDMAP_00001385
BDMAP_00001386
BDMAP_00001387
BDMAP_00001388
BDMAP_00001389
BDMAP_00001390
BDMAP_00001391
BDMAP_00001392
BDMAP_00001393
BDMAP_00001394
BDMAP_00001395
BDMAP_00001396
BDMAP_00001397
BDMAP_00001398
BDMAP_00001399
BDMAP_00001400
BDMAP_00001401
BDMAP_00001402
BDMAP_00001403
BDMAP_00001404
BDMAP_00001405
BDMAP_00001406
BDMAP_00001407
BDMAP_00001408
BDMAP_00001409
BDMAP_00001410
BDMAP_00001411
BDMAP_00001412
BDMAP_00001413
BDMAP_00001414
BDMAP_00001415
BDMAP_00001416
BDMAP_00001417
BDMAP_00001418
BDMAP_00001419
BDMAP_00001420
BDMAP_00001421
BDMAP_00001422
BDMAP_00001423
BDMAP_00001424
BDMAP_00001425
BDMAP_00001426
BDMAP_00001427
BDMAP_00001428
BDMAP_00001429
BDMAP_00001430
BDMAP_00001431
BDMAP_00001432
BDMAP_00001433
BDMAP_00001434
BDMAP_00001435
BDMAP_00001436
BDMAP_00001437
BDMAP_00001438
BDMAP_00001439
BDMAP_00001440
BDMAP_00001441
BDMAP_00001442
BDMAP_00001443
BDMAP_00001444
BDMAP_00001445
BDMAP_00001446
BDMAP_00001447
BDMAP_00001448
BDMAP_00001449
BDMAP_00001450
BDMAP_00001451
BDMAP_00001452
BDMAP_00001453
BDMAP_00001454
BDMAP_00001455
BDMAP_00001456
BDMAP_00001457
BDMAP_00001458
BDMAP_00001459
BDMAP_00001460
BDMAP_00001461
BDMAP_00001462
BDMAP_00001463
BDMAP_00001464
BDMAP_00001465
BDMAP_00001466
BDMAP_00001467
BDMAP_00001468
BDMAP_00001469
BDMAP_00001470
BDMAP_00001471
BDMAP_00001472
BDMAP_00001473
BDMAP_00001474
BDMAP_00001475
BDMAP_00001476
BDMAP_00001477
BDMAP_00001478
BDMAP_00001479
BDMAP_00001480
BDMAP_00001481
BDMAP_00001482
BDMAP_00001483
BDMAP_00001484
BDMAP_00001485
BDMAP_00001486
BDMAP_00001487
BDMAP_00001488
BDMAP_00001489
BDMAP_00001490
BDMAP_00001491
BDMAP_00001492
BDMAP_00001493
BDMAP_00001494
BDMAP_00001495
BDMAP_00001496
BDMAP_00001497
BDMAP_00001498
BDMAP_00001499
BDMAP_00001500
BDMAP_00001501
BDMAP_00001502
BDMAP_00001503
BDMAP_00001504
BDMAP_00001505
BDMAP_00001506
BDMAP_00001507
BDMAP_00001508
BDMAP_00001509
BDMAP_00001510
BDMAP_00001511
BDMAP_00001512
BDMAP_00001513
BDMAP_00001514
BDMAP_00001515
BDMAP_00001516
BDMAP_00001517
BDMAP_00001518
BDMAP_00001519
BDMAP_00001520
BDMAP_00001521
BDMAP_00001522
BDMAP_00001523
BDMAP_00001524
BDMAP_00001525
BDMAP_00001526
BDMAP_00001527
BDMAP_00001528
BDMAP_00001529
BDMAP_00001530
BDMAP_00001531
BDMAP_00001532
BDMAP_00001533
BDMAP_00001534
BDMAP_00001535
BDMAP_00001536
BDMAP_00001537
BDMAP_00001538
BDMAP_00001539
BDMAP_00001540
BDMAP_00001541
BDMAP_00001542
BDMAP_00001543
BDMAP_00001544
BDMAP_00001545
BDMAP_00001546
BDMAP_00001547
BDMAP_00001548
BDMAP_00001549
BDMAP_00001550
BDMAP_00001551
BDMAP_00001552
BDMAP_00001553
BDMAP_00001554
BDMAP_00001555
BDMAP_00001556
BDMAP_00001557
BDMAP_00001558
BDMAP_00001559
BDMAP_00001560
BDMAP_00001561
BDMAP_00001562
BDMAP_00001563
BDMAP_00001564
BDMAP_00001565
BDMAP_00001566
BDMAP_00001567
BDMAP_00001568
BDMAP_00001569
BDMAP_00001570
BDMAP_00001571
BDMAP_00001572
BDMAP_00001573
BDMAP_00001574
BDMAP_00001575
BDMAP_00001576
BDMAP_00001577
BDMAP_00001578
BDMAP_00001579
BDMAP_00001580
BDMAP_00001581
BDMAP_00001582
BDMAP_00001583
BDMAP_00001584
BDMAP_00001585
BDMAP_00001586
BDMAP_00001587
BDMAP_00001588
BDMAP_00001589
BDMAP_00001590
BDMAP_00001591
BDMAP_00001592
BDMAP_00001593
BDMAP_00001594
BDMAP_00001595
BDMAP_00001596
BDMAP_00001597
BDMAP_00001598
BDMAP_00001599
BDMAP_00001600
BDMAP_00001601
BDMAP_00001602
BDMAP_00001603
BDMAP_00001604
BDMAP_00001605
BDMAP_00001606
BDMAP_00001607
BDMAP_00001608
BDMAP_00001609
BDMAP_00001610
BDMAP_00001611
BDMAP_00001612
BDMAP_00001613
BDMAP_00001614
BDMAP_00001615
BDMAP_00001616
BDMAP_00001617
BDMAP_00001618
BDMAP_00001619
BDMAP_00001620
BDMAP_00001621
BDMAP_00001622
BDMAP_00001623
BDMAP_00001624
BDMAP_00001625
BDMAP_00001626
BDMAP_00001627
BDMAP_00001628
BDMAP_00001629
BDMAP_00001630
BDMAP_00001631
BDMAP_00001632
BDMAP_00001633
BDMAP_00001634
BDMAP_00001635
BDMAP_00001636
BDMAP_00001637
BDMAP_00001638
BDMAP_00001639
BDMAP_00001640
BDMAP_00001641
BDMAP_00001642
BDMAP_00001643
BDMAP_00001644
BDMAP_00001645
BDMAP_00001646
BDMAP_00001647
BDMAP_00001648
BDMAP_00001649
BDMAP_00001650
BDMAP_00001651
BDMAP_00001652
BDMAP_00001653
BDMAP_00001654
BDMAP_00001655
BDMAP_00001656
BDMAP_00001657
BDMAP_00001658
BDMAP_00001659
BDMAP_00001660
BDMAP_00001661
BDMAP_00001662
BDMAP_00001663
BDMAP_00001664
BDMAP_00001665
BDMAP_00001666
BDMAP_00001667
BDMAP_00001668
BDMAP_00001669
BDMAP_00001670
BDMAP_00001671
BDMAP_00001672
BDMAP_00001673
BDMAP_00001674
BDMAP_00001675
BDMAP_00001676
BDMAP_00001677
BDMAP_00001678
BDMAP_00001679
BDMAP_00001680
BDMAP_00001681
BDMAP_00001682
BDMAP_00001683
BDMAP_00001684
BDMAP_00001685
BDMAP_00001686
BDMAP_00001687
BDMAP_00001688
BDMAP_00001689
BDMAP_00001690
BDMAP_00001691
BDMAP_00001692
BDMAP_00001693
BDMAP_00001694
BDMAP_00001695
BDMAP_00001696
BDMAP_00001697
BDMAP_00001698
BDMAP_00001699
BDMAP_00001700
BDMAP_00001701
BDMAP_00001702
BDMAP_00001703
BDMAP_00001704
BDMAP_00001705
BDMAP_00001706
BDMAP_00001707
BDMAP_00001708
BDMAP_00001709
BDMAP_00001710
BDMAP_00001711
BDMAP_00001712
BDMAP_00001713
BDMAP_00001714
BDMAP_00001715
BDMAP_00001716
BDMAP_00001717
BDMAP_00001718
BDMAP_00001719
BDMAP_00001720
BDMAP_00001721
BDMAP_00001722
BDMAP_00001723
BDMAP_00001724
BDMAP_00001725
BDMAP_00001726
BDMAP_00001727
BDMAP_00001728
BDMAP_00001729
BDMAP_00001730
BDMAP_00001731
BDMAP_00001732
BDMAP_00001733
BDMAP_00001734
BDMAP_00001735
BDMAP_00001736
BDMAP_00001737
BDMAP_00001738
BDMAP_00001739
BDMAP_00001740
BDMAP_00001741
BDMAP_00001742
BDMAP_00001743
BDMAP_00001744
BDMAP_00001745
BDMAP_00001746
BDMAP_00001747
BDMAP_00001748
BDMAP_00001749
BDMAP_00001750
BDMAP_00001751
BDMAP_00001752
BDMAP_00001753
BDMAP_00001754
BDMAP_00001755
BDMAP_00001756
BDMAP_00001757
BDMAP_00001758
BDMAP_00001759
BDMAP_00001760
BDMAP_00001761
BDMAP_00001762
BDMAP_00001763
BDMAP_00001764
BDMAP_00001765
BDMAP_00001766
BDMAP_00001767
BDMAP_00001768
BDMAP_00001769
BDMAP_00001770
BDMAP_00001771
BDMAP_00001772
BDMAP_00001773
BDMAP_00001774
BDMAP_00001775
BDMAP_00001776
BDMAP_00001777
BDMAP_00001778
BDMAP_00001779
BDMAP_00001780
BDMAP_00001781
BDMAP_00001782
BDMAP_00001783
BDMAP_00001784
BDMAP_00001785
BDMAP_00001786
BDMAP_00001787
BDMAP_00001788
BDMAP_00001789
BDMAP_00001790
BDMAP_00001791
BDMAP_00001792
BDMAP_00001793
BDMAP_00001794
BDMAP_00001795
BDMAP_00001796
BDMAP_00001797
BDMAP_00001798
BDMAP_00001799
BDMAP_00001800
BDMAP_00001801
BDMAP_00001802
BDMAP_00001803
BDMAP_00001804
BDMAP_00001805
BDMAP_00001806
BDMAP_00001807
BDMAP_00001808
BDMAP_00001809
BDMAP_00001810
BDMAP_00001811
BDMAP_00001812
BDMAP_00001813
BDMAP_00001814
BDMAP_00001815
BDMAP_00001816
BDMAP_00001817
BDMAP_00001818
BDMAP_00001819
BDMAP_00001820
BDMAP_00001821
BDMAP_00001822
BDMAP_00001823
BDMAP_00001824
BDMAP_00001825
BDMAP_00001826
BDMAP_00001827
BDMAP_00001828
BDMAP_00001829
BDMAP_00001830
BDMAP_00001831
BDMAP_00001832
BDMAP_00001833
BDMAP_00001834
BDMAP_00001835
BDMAP_00001836
BDMAP_00001837
BDMAP_00001838
BDMAP_00001839
BDMAP_00001840
BDMAP_00001841
BDMAP_00001842
BDMAP_00001843
BDMAP_00001844
BDMAP_00001845
BDMAP_00001846
BDMAP_00001847
BDMAP_00001848
BDMAP_00001849
BDMAP_00001850
BDMAP_00001851
BDMAP_00001852
BDMAP_00001853
BDMAP_00001854
BDMAP_00001855
BDMAP_00001856
BDMAP_00001857
BDMAP_00001858
BDMAP_00001859
BDMAP_00001860
BDMAP_00001861
BDMAP_00001862
BDMAP_00001863
BDMAP_00001864
BDMAP_00001865
BDMAP_00001866
BDMAP_00001867
BDMAP_00001868
BDMAP_00001869
BDMAP_00001870
BDMAP_00001871
BDMAP_00001872
BDMAP_00001873
BDMAP_00001874
BDMAP_00001875
BDMAP_00001876
BDMAP_00001877
BDMAP_00001878
BDMAP_00001879
BDMAP_00001880
BDMAP_00001881
BDMAP_00001882
BDMAP_00001883
BDMAP_00001884
BDMAP_00001885
BDMAP_00001886
BDMAP_00001887
BDMAP_00001888
BDMAP_00001889
BDMAP_00001890
BDMAP_00001891
BDMAP_00001892
BDMAP_00001893
BDMAP_00001894
BDMAP_00001895
BDMAP_00001896
BDMAP_00001897
BDMAP_00001898
BDMAP_00001899
BDMAP_00001900
BDMAP_00001901
BDMAP_00001902
BDMAP_00001903
BDMAP_00001904
BDMAP_00001905
BDMAP_00001906
BDMAP_00001907
BDMAP_00001908
BDMAP_00001909
BDMAP_00001910
BDMAP_00001911
BDMAP_00001912
BDMAP_00001913
BDMAP_00001914
BDMAP_00001915
BDMAP_00001916
BDMAP_00001917
BDMAP_00001918
BDMAP_00001919
BDMAP_00001920
BDMAP_00001921
BDMAP_00001922
BDMAP_00001923
BDMAP_00001924
BDMAP_00001925
BDMAP_00001926
BDMAP_00001927
BDMAP_00001928
BDMAP_00001929
BDMAP_00001930
BDMAP_00001931
BDMAP_00001932
BDMAP_00001933
BDMAP_00001934
BDMAP_00001935
BDMAP_00001936
BDMAP_00001937
BDMAP_00001938
BDMAP_00001939
BDMAP_00001940
BDMAP_00001941
BDMAP_00001942
BDMAP_00001943
BDMAP_00001944
BDMAP_00001945
BDMAP_00001946
BDMAP_00001947
BDMAP_00001948
BDMAP_00001949
BDMAP_00001950
BDMAP_00001951
BDMAP_00001952
BDMAP_00001953
BDMAP_00001954
BDMAP_00001955
BDMAP_00001956
BDMAP_00001957
BDMAP_00001958
BDMAP_00001959
BDMAP_00001960
BDMAP_00001961
BDMAP_00001962
BDMAP_00001963
BDMAP_00001964
BDMAP_00001965
BDMAP_00001966
BDMAP_00001967
BDMAP_00001968
BDMAP_00001969
BDMAP_00001970
BDMAP_00001971
BDMAP_00001972
BDMAP_00001973
BDMAP_00001974
BDMAP_00001975
BDMAP_00001976
BDMAP_00001977
BDMAP_00001978
BDMAP_00001979
BDMAP_00001980
BDMAP_00001981
BDMAP_00001982
BDMAP_00001983
BDMAP_00001984
BDMAP_00001985
BDMAP_00001986
BDMAP_00001987
BDMAP_00001988
BDMAP_00001989
BDMAP_00001990
BDMAP_00001991
BDMAP_00001992
BDMAP_00001993
BDMAP_00001994
BDMAP_00001995
BDMAP_00001996
BDMAP_00001997
BDMAP_00001998
BDMAP_00001999
BDMAP_00002000
BDMAP_00002001
BDMAP_00002002
BDMAP_00002003
BDMAP_00002004
BDMAP_00002005
BDMAP_00002006
BDMAP_00002007
BDMAP_00002008
BDMAP_00002009
BDMAP_00002010
BDMAP_00002011
BDMAP_00002012
BDMAP_00002013
BDMAP_00002014
BDMAP_00002015
BDMAP_00002016
BDMAP_00002017
BDMAP_00002018
BDMAP_00002019
BDMAP_00002020
BDMAP_00002021
BDMAP_00002022
BDMAP_00002023
BDMAP_00002024
BDMAP_00002025
BDMAP_00002026
BDMAP_00002027
BDMAP_00002028
BDMAP_00002029
BDMAP_00002030
BDMAP_00002031
BDMAP_00002032
BDMAP_00002033
BDMAP_00002034
BDMAP_00002035
BDMAP_00002036
BDMAP_00002037
BDMAP_00002038
BDMAP_00002039
BDMAP_00002040
BDMAP_00002041
BDMAP_00002042
BDMAP_00002043
BDMAP_00002044
BDMAP_00002045
BDMAP_00002046
BDMAP_00002047
BDMAP_00002048
BDMAP_00002049
BDMAP_00002050
BDMAP_00002051
BDMAP_00002052
BDMAP_00002053
BDMAP_00002054
BDMAP_00002055
BDMAP_00002056
BDMAP_00002057
BDMAP_00002058
BDMAP_00002059
BDMAP_00002060
BDMAP_00002061
BDMAP_00002062
BDMAP_00002063
BDMAP_00002064
BDMAP_00002065
BDMAP_00002066
BDMAP_00002067
BDMAP_00002068
BDMAP_00002069
BDMAP_00002070
BDMAP_00002071
BDMAP_00002072
BDMAP_00002073
BDMAP_00002074
BDMAP_00002075
BDMAP_00002076
BDMAP_00002077
BDMAP_00002078
BDMAP_00002079
BDMAP_00002080
BDMAP_00002081
BDMAP_00002082
BDMAP_00002083
BDMAP_00002084
BDMAP_00002085
BDMAP_00002086
BDMAP_00002087
BDMAP_00002088
BDMAP_00002089
BDMAP_00002090
BDMAP_00002091
BDMAP_00002092
BDMAP_00002093
BDMAP_00002094
BDMAP_00002095
BDMAP_00002096
BDMAP_00002097
BDMAP_00002098
BDMAP_00002099
BDMAP_00002100
BDMAP_00002101
BDMAP_00002102
BDMAP_00002103
BDMAP_00002104
BDMAP_00002105
BDMAP_00002106
BDMAP_00002107
BDMAP_00002108
BDMAP_00002109
BDMAP_00002110
BDMAP_00002111
BDMAP_00002112
BDMAP_00002113
BDMAP_00002114
BDMAP_00002115
BDMAP_00002116
BDMAP_00002117
BDMAP_00002118
BDMAP_00002119
BDMAP_00002120
BDMAP_00002121
BDMAP_00002122
BDMAP_00002123
BDMAP_00002124
BDMAP_00002125
BDMAP_00002126
BDMAP_00002127
BDMAP_00002128
BDMAP_00002129
BDMAP_00002130
BDMAP_00002131
BDMAP_00002132
BDMAP_00002133
BDMAP_00002134
BDMAP_00002135
BDMAP_00002136
BDMAP_00002137
BDMAP_00002138
BDMAP_00002139
BDMAP_00002140
BDMAP_00002141
BDMAP_00002142
BDMAP_00002143
BDMAP_00002144
BDMAP_00002145
BDMAP_00002146
BDMAP_00002147
BDMAP_00002148
BDMAP_00002149
BDMAP_00002150
BDMAP_00002151
BDMAP_00002152
BDMAP_00002153
BDMAP_00002154
BDMAP_00002155
BDMAP_00002156
BDMAP_00002157
BDMAP_00002158
BDMAP_00002159
BDMAP_00002160
BDMAP_00002161
BDMAP_00002162
BDMAP_00002163
BDMAP_00002164
BDMAP_00002165
BDMAP_00002166
BDMAP_00002167
BDMAP_00002168
BDMAP_00002169
BDMAP_00002170
BDMAP_00002171
BDMAP_00002172
BDMAP_00002173
BDMAP_00002174
BDMAP_00002175
BDMAP_00002176
BDMAP_00002177
BDMAP_00002178
BDMAP_00002179
BDMAP_00002180
BDMAP_00002181
BDMAP_00002182
BDMAP_00002183
BDMAP_00002184
BDMAP_00002185
BDMAP_00002186
BDMAP_00002187
BDMAP_00002188
BDMAP_00002189
BDMAP_00002190
BDMAP_00002191
BDMAP_00002192
BDMAP_00002193
BDMAP_00002194
BDMAP_00002195
BDMAP_00002196
BDMAP_00002197
BDMAP_00002198
BDMAP_00002199
BDMAP_00002200
BDMAP_00002201
BDMAP_00002202
BDMAP_00002203
BDMAP_00002204
BDMAP_00002205
BDMAP_00002206
BDMAP_00002207
BDMAP_00002208
BDMAP_00002209
BDMAP_00002210
BDMAP_00002211
BDMAP_00002212
BDMAP_00002213
BDMAP_00002214
BDMAP_00002215
BDMAP_00002216
BDMAP_00002217
BDMAP_00002218
BDMAP_00002219
BDMAP_00002220
BDMAP_00002221
BDMAP_00002222
BDMAP_00002223
BDMAP_00002224
BDMAP_00002225
BDMAP_00002226
BDMAP_00002227
BDMAP_00002228
BDMAP_00002229
BDMAP_00002230
BDMAP_00002231
BDMAP_00002232
BDMAP_00002233
BDMAP_00002234
BDMAP_00002235
BDMAP_00002236
BDMAP_00002237
BDMAP_00002238
BDMAP_00002239
BDMAP_00002240
BDMAP_00002241
BDMAP_00002242
BDMAP_00002243
BDMAP_00002244
BDMAP_00002245
BDMAP_00002246
BDMAP_00002247
BDMAP_00002248
BDMAP_00002249
BDMAP_00002250
BDMAP_00002251
BDMAP_00002252
BDMAP_00002253
BDMAP_00002254
BDMAP_00002255
BDMAP_00002256
BDMAP_00002257
BDMAP_00002258
BDMAP_00002259
BDMAP_00002260
BDMAP_00002261
BDMAP_00002262
BDMAP_00002263
BDMAP_00002264
BDMAP_00002265
BDMAP_00002266
BDMAP_00002267
BDMAP_00002268
BDMAP_00002269
BDMAP_00002270
BDMAP_00002271
BDMAP_00002272
BDMAP_00002273
BDMAP_00002274
BDMAP_00002275
BDMAP_00002276
BDMAP_00002277
BDMAP_00002278
BDMAP_00002279
BDMAP_00002280
BDMAP_00002281
BDMAP_00002282
BDMAP_00002283
BDMAP_00002284
BDMAP_00002285
BDMAP_00002286
BDMAP_00002287
BDMAP_00002288
BDMAP_00002289
BDMAP_00002290
BDMAP_00002291
BDMAP_00002292
BDMAP_00002293
BDMAP_00002294
BDMAP_00002295
BDMAP_00002296
BDMAP_00002297
BDMAP_00002298
BDMAP_00002299
BDMAP_00002300
BDMAP_00002301
BDMAP_00002302
BDMAP_00002303
BDMAP_00002304
BDMAP_00002305
BDMAP_00002306
BDMAP_00002307
BDMAP_00002308
BDMAP_00002309
BDMAP_00002310
BDMAP_00002311
BDMAP_00002312
BDMAP_00002313
BDMAP_00002314
BDMAP_00002315
BDMAP_00002316
BDMAP_00002317
BDMAP_00002318
BDMAP_00002319
BDMAP_00002320
BDMAP_00002321
BDMAP_00002322
BDMAP_00002323
BDMAP_00002324
BDMAP_00002325
BDMAP_00002326
BDMAP_00002327
BDMAP_00002328
BDMAP_00002329
BDMAP_00002330
BDMAP_00002331
BDMAP_00002332
BDMAP_00002333
BDMAP_00002334
BDMAP_00002335
BDMAP_00002336
BDMAP_00002337
BDMAP_00002338
BDMAP_00002339
BDMAP_00002340
BDMAP_00002341
BDMAP_00002342
BDMAP_00002343
BDMAP_00002344
BDMAP_00002345
BDMAP_00002346
BDMAP_00002347
BDMAP_00002348
BDMAP_00002349
BDMAP_00002350
BDMAP_00002351
BDMAP_00002352
BDMAP_00002353
BDMAP_00002354
BDMAP_00002355
BDMAP_00002356
BDMAP_00002357
BDMAP_00002358
BDMAP_00002359
BDMAP_00002360
BDMAP_00002361
BDMAP_00002362
BDMAP_00002363
BDMAP_00002364
BDMAP_00002365
BDMAP_00002366
BDMAP_00002367
BDMAP_00002368
BDMAP_00002369
BDMAP_00002370
BDMAP_00002371
BDMAP_00002372
BDMAP_00002373
BDMAP_00002374
BDMAP_00002375
BDMAP_00002376
BDMAP_00002377
BDMAP_00002378
BDMAP_00002379
BDMAP_00002380
BDMAP_00002381
BDMAP_00002382
BDMAP_00002383
BDMAP_00002384
BDMAP_00002385
BDMAP_00002386
BDMAP_00002387
BDMAP_00002388
BDMAP_00002389
BDMAP_00002390
BDMAP_00002391
BDMAP_00002392
BDMAP_00002393
BDMAP_00002394
BDMAP_00002395
BDMAP_00002396
BDMAP_00002397
BDMAP_00002398
BDMAP_00002399
BDMAP_00002400
BDMAP_00002401
BDMAP_00002402
BDMAP_00002403
BDMAP_00002404
BDMAP_00002405
BDMAP_00002406
BDMAP_00002407
BDMAP_00002408
BDMAP_00002409
BDMAP_00002410
BDMAP_00002411
BDMAP_00002412
BDMAP_00002413
BDMAP_00002414
BDMAP_00002415
BDMAP_00002416
BDMAP_00002417
BDMAP_00002418
BDMAP_00002419
BDMAP_00002420
BDMAP_00002421
BDMAP_00002422
BDMAP_00002423
BDMAP_00002424
BDMAP_00002425
BDMAP_00002426
BDMAP_00002427
BDMAP_00002428
BDMAP_00002429
BDMAP_00002430
BDMAP_00002431
BDMAP_00002432
BDMAP_00002433
BDMAP_00002434
BDMAP_00002435
BDMAP_00002436
BDMAP_00002437
BDMAP_00002438
BDMAP_00002439
BDMAP_00002440
BDMAP_00002441
BDMAP_00002442
BDMAP_00002443
BDMAP_00002444
BDMAP_00002445
BDMAP_00002446
BDMAP_00002447
BDMAP_00002448
BDMAP_00002449
BDMAP_00002450
BDMAP_00002451
BDMAP_00002452
BDMAP_00002453
BDMAP_00002454
BDMAP_00002455
BDMAP_00002456
BDMAP_00002457
BDMAP_00002458
BDMAP_00002459
BDMAP_00002460
BDMAP_00002461
BDMAP_00002462
BDMAP_00002463
BDMAP_00002464
BDMAP_00002465
BDMAP_00002466
BDMAP_00002467
BDMAP_00002468
BDMAP_00002469
BDMAP_00002470
BDMAP_00002471
BDMAP_00002472
BDMAP_00002473
BDMAP_00002474
BDMAP_00002475
BDMAP_00002476
BDMAP_00002477
BDMAP_00002478
BDMAP_00002479
BDMAP_00002480
BDMAP_00002481
BDMAP_00002482
BDMAP_00002483
BDMAP_00002484
BDMAP_00002485
BDMAP_00002486
BDMAP_00002487
BDMAP_00002488
BDMAP_00002489
BDMAP_00002490
BDMAP_00002491
BDMAP_00002492
BDMAP_00002493
BDMAP_00002494
BDMAP_00002495
BDMAP_00002496
BDMAP_00002497
BDMAP_00002498
BDMAP_00002499
BDMAP_00002500
BDMAP_00002501
BDMAP_00002502
BDMAP_00002503
BDMAP_00002504
BDMAP_00002505
BDMAP_00002506
BDMAP_00002507
BDMAP_00002508
BDMAP_00002509
BDMAP_00002510
BDMAP_00002511
BDMAP_00002512
BDMAP_00002513
BDMAP_00002514
BDMAP_00002515
BDMAP_00002516
BDMAP_00002517
BDMAP_00002518
BDMAP_00002519
BDMAP_00002520
BDMAP_00002521
BDMAP_00002522
BDMAP_00002523
BDMAP_00002524
BDMAP_00002525
BDMAP_00002526
BDMAP_00002527
BDMAP_00002528
BDMAP_00002529
BDMAP_00002530
BDMAP_00002531
BDMAP_00002532
BDMAP_00002533
BDMAP_00002534
BDMAP_00002535
BDMAP_00002536
BDMAP_00002537
BDMAP_00002538
BDMAP_00002539
BDMAP_00002540
BDMAP_00002541
BDMAP_00002542
BDMAP_00002543
BDMAP_00002544
BDMAP_00002545
BDMAP_00002546
BDMAP_00002547
BDMAP_00002548
BDMAP_00002549
BDMAP_00002550
BDMAP_00002551
BDMAP_00002552
BDMAP_00002553
BDMAP_00002554
BDMAP_00002555
BDMAP_00002556
BDMAP_00002557
BDMAP_00002558
BDMAP_00002559
BDMAP_00002560
BDMAP_00002561
BDMAP_00002562
BDMAP_00002563
BDMAP_00002564
BDMAP_00002565
BDMAP_00002566
BDMAP_00002567
BDMAP_00002568
BDMAP_00002569
BDMAP_00002570
BDMAP_00002571
BDMAP_00002572
BDMAP_00002573
BDMAP_00002574
BDMAP_00002575
BDMAP_00002576
BDMAP_00002577
BDMAP_00002578
BDMAP_00002579
BDMAP_00002580
BDMAP_00002581
BDMAP_00002582
BDMAP_00002583
BDMAP_00002584
BDMAP_00002585
BDMAP_00002586
BDMAP_00002587
BDMAP_00002588
BDMAP_00002589
BDMAP_00002590
BDMAP_00002591
BDMAP_00002592
BDMAP_00002593
BDMAP_00002594
BDMAP_00002595
BDMAP_00002596
BDMAP_00002597
BDMAP_00002598
BDMAP_00002599
BDMAP_00002600
BDMAP_00002601
BDMAP_00002602
BDMAP_00002603
BDMAP_00002604
BDMAP_00002605
BDMAP_00002606
BDMAP_00002607
BDMAP_00002608
BDMAP_00002609
BDMAP_00002610
BDMAP_00002611
BDMAP_00002612
BDMAP_00002613
BDMAP_00002614
BDMAP_00002615
BDMAP_00002616
BDMAP_00002617
BDMAP_00002618
BDMAP_00002619
BDMAP_00002620
BDMAP_00002621
BDMAP_00002622
BDMAP_00002623
BDMAP_00002624
BDMAP_00002625
BDMAP_00002626
BDMAP_00002627
BDMAP_00002628
BDMAP_00002629
BDMAP_00002630
BDMAP_00002631
BDMAP_00002632
BDMAP_00002633
BDMAP_00002634
BDMAP_00002635
BDMAP_00002636
BDMAP_00002637
BDMAP_00002638
BDMAP_00002639
BDMAP_00002640
BDMAP_00002641
BDMAP_00002642
BDMAP_00002643
BDMAP_00002644
BDMAP_00002645
BDMAP_00002646
BDMAP_00002647
BDMAP_00002648
BDMAP_00002649
BDMAP_00002650
BDMAP_00002651
BDMAP_00002652
BDMAP_00002653
BDMAP_00002654
BDMAP_00002655
BDMAP_00002656
BDMAP_00002657
BDMAP_00002658
BDMAP_00002659
BDMAP_00002660
BDMAP_00002661
BDMAP_00002662
BDMAP_00002663
BDMAP_00002664
BDMAP_00002665
BDMAP_00002666
BDMAP_00002667
BDMAP_00002668
BDMAP_00002669
BDMAP_00002670
BDMAP_00002671
BDMAP_00002672
BDMAP_00002673
BDMAP_00002674
BDMAP_00002675
BDMAP_00002676
BDMAP_00002677
BDMAP_00002678
BDMAP_00002679
BDMAP_00002680
BDMAP_00002681
BDMAP_00002682
BDMAP_00002683
BDMAP_00002684
BDMAP_00002685
BDMAP_00002686
BDMAP_00002687
BDMAP_00002688
BDMAP_00002689
BDMAP_00002690
BDMAP_00002691
BDMAP_00002692
BDMAP_00002693
BDMAP_00002694
BDMAP_00002695
BDMAP_00002696
BDMAP_00002697
BDMAP_00002698
BDMAP_00002699
BDMAP_00002700
BDMAP_00002701
BDMAP_00002702
BDMAP_00002703
BDMAP_00002704
BDMAP_00002705
BDMAP_00002706
BDMAP_00002707
BDMAP_00002708
BDMAP_00002709
BDMAP_00002710
BDMAP_00002711
BDMAP_00002712
BDMAP_00002713
BDMAP_00002714
BDMAP_00002715
BDMAP_00002716
BDMAP_00002717
BDMAP_00002718
BDMAP_00002719
BDMAP_00002720
BDMAP_00002721
BDMAP_00002722
BDMAP_00002723
BDMAP_00002724
BDMAP_00002725
BDMAP_00002726
BDMAP_00002727
BDMAP_00002728
BDMAP_00002729
BDMAP_00002730
BDMAP_00002731
BDMAP_00002732
BDMAP_00002733
BDMAP_00002734
BDMAP_00002735
BDMAP_00002736
BDMAP_00002737
BDMAP_00002738
BDMAP_00002739
BDMAP_00002740
BDMAP_00002741
BDMAP_00002742
BDMAP_00002743
BDMAP_00002744
BDMAP_00002745
BDMAP_00002746
BDMAP_00002747
BDMAP_00002748
BDMAP_00002749
BDMAP_00002750
BDMAP_00002751
BDMAP_00002752
BDMAP_00002753
BDMAP_00002754
BDMAP_00002755
BDMAP_00002756
BDMAP_00002757
BDMAP_00002758
BDMAP_00002759
BDMAP_00002760
BDMAP_00002761
BDMAP_00002762
BDMAP_00002763
BDMAP_00002764
BDMAP_00002765
BDMAP_00002766
BDMAP_00002767
BDMAP_00002768
BDMAP_00002769
BDMAP_00002770
BDMAP_00002771
BDMAP_00002772
BDMAP_00002773
BDMAP_00002774
BDMAP_00002775
BDMAP_00002776
BDMAP_00002777
BDMAP_00002778
BDMAP_00002779
BDMAP_00002780
BDMAP_00002781
BDMAP_00002782
BDMAP_00002783
BDMAP_00002784
BDMAP_00002785
BDMAP_00002786
BDMAP_00002787
BDMAP_00002788
BDMAP_00002789
BDMAP_00002790
BDMAP_00002791
BDMAP_00002792
BDMAP_00002793
BDMAP_00002794
BDMAP_00002795
BDMAP_00002796
BDMAP_00002797
BDMAP_00002798
BDMAP_00002799
BDMAP_00002800
BDMAP_00002801
BDMAP_00002802
BDMAP_00002803
BDMAP_00002804
BDMAP_00002805
BDMAP_00002806
BDMAP_00002807
BDMAP_00002808
BDMAP_00002809
BDMAP_00002810
BDMAP_00002811
BDMAP_00002812
BDMAP_00002813
BDMAP_00002814
BDMAP_00002815
BDMAP_00002816
BDMAP_00002817
BDMAP_00002818
BDMAP_00002819
BDMAP_00002820
BDMAP_00002821
BDMAP_00002822
BDMAP_00002823
BDMAP_00002824
BDMAP_00002825
BDMAP_00002826
BDMAP_00002827
BDMAP_00002828
BDMAP_00002829
BDMAP_00002830
BDMAP_00002831
BDMAP_00002832
BDMAP_00002833
BDMAP_00002834
BDMAP_00002835
BDMAP_00002836
BDMAP_00002837
BDMAP_00002838
BDMAP_00002839
BDMAP_00002840
BDMAP_00002841
BDMAP_00002842
BDMAP_00002843
BDMAP_00002844
BDMAP_00002845
BDMAP_00002846
BDMAP_00002847
BDMAP_00002848
BDMAP_00002849
BDMAP_00002850
BDMAP_00002851
BDMAP_00002852
BDMAP_00002853
BDMAP_00002854
BDMAP_00002855
BDMAP_00002856
BDMAP_00002857
BDMAP_00002858
BDMAP_00002859
BDMAP_00002860
BDMAP_00002861
BDMAP_00002862
BDMAP_00002863
BDMAP_00002864
BDMAP_00002865
BDMAP_00002866
BDMAP_00002867
BDMAP_00002868
BDMAP_00002869
BDMAP_00002870
BDMAP_00002871
BDMAP_00002872
BDMAP_00002873
BDMAP_00002874
BDMAP_00002875
BDMAP_00002876
BDMAP_00002877
BDMAP_00002878
BDMAP_00002879
BDMAP_00002880
BDMAP_00002881
BDMAP_00002882
BDMAP_00002883
BDMAP_00002884
BDMAP_00002885
BDMAP_00002886
BDMAP_00002887
BDMAP_00002888
BDMAP_00002889
BDMAP_00002890
BDMAP_00002891
BDMAP_00002892
BDMAP_00002893
BDMAP_00002894
BDMAP_00002895
BDMAP_00002896
BDMAP_00002897
BDMAP_00002898
BDMAP_00002899
BDMAP_00002900
BDMAP_00002901
BDMAP_00002902
BDMAP_00002903
BDMAP_00002904
BDMAP_00002905
BDMAP_00002906
BDMAP_00002907
BDMAP_00002908
BDMAP_00002909
BDMAP_00002910
BDMAP_00002911
BDMAP_00002912
BDMAP_00002913
BDMAP_00002914
BDMAP_00002915
BDMAP_00002916
BDMAP_00002917
BDMAP_00002918
BDMAP_00002919
BDMAP_00002920
BDMAP_00002921
BDMAP_00002922
BDMAP_00002923
BDMAP_00002924
BDMAP_00002925
BDMAP_00002926
BDMAP_00002927
BDMAP_00002928
BDMAP_00002929
BDMAP_00002930
BDMAP_00002931
BDMAP_00002932
BDMAP_00002933
BDMAP_00002934
BDMAP_00002935
BDMAP_00002936
BDMAP_00002937
BDMAP_00002938
BDMAP_00002939
BDMAP_00002940
BDMAP_00002941
BDMAP_00002942
BDMAP_00002943
BDMAP_00002944
BDMAP_00002945
BDMAP_00002946
BDMAP_00002947
BDMAP_00002948
BDMAP_00002949
BDMAP_00002950
BDMAP_00002951
BDMAP_00002952
BDMAP_00002953
BDMAP_00002954
BDMAP_00002955
BDMAP_00002956
BDMAP_00002957
BDMAP_00002958
BDMAP_00002959
BDMAP_00002960
BDMAP_00002961
BDMAP_00002962
BDMAP_00002963
BDMAP_00002964
BDMAP_00002965
BDMAP_00002966
BDMAP_00002967
BDMAP_00002968
BDMAP_00002969
BDMAP_00002970
BDMAP_00002971
BDMAP_00002972
BDMAP_00002973
BDMAP_00002974
BDMAP_00002975
BDMAP_00002976
BDMAP_00002977
BDMAP_00002978
BDMAP_00002979
BDMAP_00002980
BDMAP_00002981
BDMAP_00002982
BDMAP_00002983
BDMAP_00002984
BDMAP_00002985
BDMAP_00002986
BDMAP_00002987
BDMAP_00002988
BDMAP_00002989
BDMAP_00002990
BDMAP_00002991
BDMAP_00002992
BDMAP_00002993
BDMAP_00002994
BDMAP_00002995
BDMAP_00002996
BDMAP_00002997
BDMAP_00002998
BDMAP_00002999
BDMAP_00003000
BDMAP_00003001
BDMAP_00003002
BDMAP_00003003
BDMAP_00003004
BDMAP_00003005
BDMAP_00003006
BDMAP_00003007
BDMAP_00003008
BDMAP_00003009
BDMAP_00003010
BDMAP_00003011
BDMAP_00003012
BDMAP_00003013
BDMAP_00003014
BDMAP_00003015
BDMAP_00003016
BDMAP_00003017
BDMAP_00003018
BDMAP_00003019
BDMAP_00003020
BDMAP_00003021
BDMAP_00003022
BDMAP_00003023
BDMAP_00003024
BDMAP_00003025
BDMAP_00003026
BDMAP_00003027
BDMAP_00003028
BDMAP_00003029
BDMAP_00003030
BDMAP_00003031
BDMAP_00003032
BDMAP_00003033
BDMAP_00003034
BDMAP_00003035
BDMAP_00003036
BDMAP_00003037
BDMAP_00003038
BDMAP_00003039
BDMAP_00003040
BDMAP_00003041
BDMAP_00003042
BDMAP_00003043
BDMAP_00003044
BDMAP_00003045
BDMAP_00003046
BDMAP_00003047
BDMAP_00003048
BDMAP_00003049
BDMAP_00003050
BDMAP_00003051
BDMAP_00003052
BDMAP_00003053
BDMAP_00003054
BDMAP_00003055
BDMAP_00003056
BDMAP_00003057
BDMAP_00003058
BDMAP_00003059
BDMAP_00003060
BDMAP_00003061
BDMAP_00003062
BDMAP_00003063
BDMAP_00003064
BDMAP_00003065
BDMAP_00003066
BDMAP_00003067
BDMAP_00003068
BDMAP_00003069
BDMAP_00003070
BDMAP_00003071
BDMAP_00003072
BDMAP_00003073
BDMAP_00003074
BDMAP_00003075
BDMAP_00003076
BDMAP_00003077
BDMAP_00003078
BDMAP_00003079
BDMAP_00003080
BDMAP_00003081
BDMAP_00003082
BDMAP_00003083
BDMAP_00003084
BDMAP_00003085
BDMAP_00003086
BDMAP_00003087
BDMAP_00003088
BDMAP_00003089
BDMAP_00003090
BDMAP_00003091
BDMAP_00003092
BDMAP_00003093
BDMAP_00003094
BDMAP_00003095
BDMAP_00003096
BDMAP_00003097
BDMAP_00003098
BDMAP_00003099
BDMAP_00003100
BDMAP_00003101
BDMAP_00003102
BDMAP_00003103
BDMAP_00003104
BDMAP_00003105
BDMAP_00003106
BDMAP_00003107
BDMAP_00003108
BDMAP_00003109
BDMAP_00003110
BDMAP_00003111
BDMAP_00003112
BDMAP_00003113
BDMAP_00003114
BDMAP_00003115
BDMAP_00003116
BDMAP_00003117
BDMAP_00003118
BDMAP_00003119
BDMAP_00003120
BDMAP_00003121
BDMAP_00003122
BDMAP_00003123
BDMAP_00003124
BDMAP_00003125
BDMAP_00003126
BDMAP_00003127
BDMAP_00003128
BDMAP_00003129
BDMAP_00003130
BDMAP_00003131
BDMAP_00003132
BDMAP_00003133
BDMAP_00003134
BDMAP_00003135
BDMAP_00003136
BDMAP_00003137
BDMAP_00003138
BDMAP_00003139
BDMAP_00003140
BDMAP_00003141
BDMAP_00003142
BDMAP_00003143
BDMAP_00003144
BDMAP_00003145
BDMAP_00003146
BDMAP_00003147
BDMAP_00003148
BDMAP_00003149
BDMAP_00003150
BDMAP_00003151
BDMAP_00003152
BDMAP_00003153
BDMAP_00003154
BDMAP_00003155
BDMAP_00003156
BDMAP_00003157
BDMAP_00003158
BDMAP_00003159
BDMAP_00003160
BDMAP_00003161
BDMAP_00003162
BDMAP_00003163
BDMAP_00003164
BDMAP_00003165
BDMAP_00003166
BDMAP_00003167
BDMAP_00003168
BDMAP_00003169
BDMAP_00003170
BDMAP_00003171
BDMAP_00003172
BDMAP_00003173
BDMAP_00003174
BDMAP_00003175
BDMAP_00003176
BDMAP_00003177
BDMAP_00003178
BDMAP_00003179
BDMAP_00003180
BDMAP_00003181
BDMAP_00003182
BDMAP_00003183
BDMAP_00003184
BDMAP_00003185
BDMAP_00003186
BDMAP_00003187
BDMAP_00003188
BDMAP_00003189
BDMAP_00003190
BDMAP_00003191
BDMAP_00003192
BDMAP_00003193
BDMAP_00003194
BDMAP_00003195
BDMAP_00003196
BDMAP_00003197
BDMAP_00003198
BDMAP_00003199
BDMAP_00003200
BDMAP_00003201
BDMAP_00003202
BDMAP_00003203
BDMAP_00003204
BDMAP_00003205
BDMAP_00003206
BDMAP_00003207
BDMAP_00003208
BDMAP_00003209
BDMAP_00003210
BDMAP_00003211
BDMAP_00003212
BDMAP_00003213
BDMAP_00003214
BDMAP_00003215
BDMAP_00003216
BDMAP_00003217
BDMAP_00003218
BDMAP_00003219
BDMAP_00003220
BDMAP_00003221
BDMAP_00003222
BDMAP_00003223
BDMAP_00003224
BDMAP_00003225
BDMAP_00003226
BDMAP_00003227
BDMAP_00003228
BDMAP_00003229
BDMAP_00003230
BDMAP_00003231
BDMAP_00003232
BDMAP_00003233
BDMAP_00003234
BDMAP_00003235
BDMAP_00003236
BDMAP_00003237
BDMAP_00003238
BDMAP_00003239
BDMAP_00003240
BDMAP_00003241
BDMAP_00003242
BDMAP_00003243
BDMAP_00003244
BDMAP_00003245
BDMAP_00003246
BDMAP_00003247
BDMAP_00003248
BDMAP_00003249
BDMAP_00003250
BDMAP_00003251
BDMAP_00003252
BDMAP_00003253
BDMAP_00003254
BDMAP_00003255
BDMAP_00003256
BDMAP_00003257
BDMAP_00003258
BDMAP_00003259
BDMAP_00003260
BDMAP_00003261
BDMAP_00003262
BDMAP_00003263
BDMAP_00003264
BDMAP_00003265
BDMAP_00003266
BDMAP_00003267
BDMAP_00003268
BDMAP_00003269
BDMAP_00003270
BDMAP_00003271
BDMAP_00003272
BDMAP_00003273
BDMAP_00003274
BDMAP_00003275
BDMAP_00003276
BDMAP_00003277
BDMAP_00003278
BDMAP_00003279
BDMAP_00003280
BDMAP_00003281
BDMAP_00003282
BDMAP_00003283
BDMAP_00003284
BDMAP_00003285
BDMAP_00003286
BDMAP_00003287
BDMAP_00003288
BDMAP_00003289
BDMAP_00003290
BDMAP_00003291
BDMAP_00003292
BDMAP_00003293
BDMAP_00003294
BDMAP_00003295
BDMAP_00003296
BDMAP_00003297
BDMAP_00003298
BDMAP_00003299
BDMAP_00003300
BDMAP_00003301
BDMAP_00003302
BDMAP_00003303
BDMAP_00003304
BDMAP_00003305
BDMAP_00003306
BDMAP_00003307
BDMAP_00003308
BDMAP_00003309
BDMAP_00003310
BDMAP_00003311
BDMAP_00003312
BDMAP_00003313
BDMAP_00003314
BDMAP_00003315
BDMAP_00003316
BDMAP_00003317
BDMAP_00003318
BDMAP_00003319
BDMAP_00003320
BDMAP_00003321
BDMAP_00003322
BDMAP_00003323
BDMAP_00003324
BDMAP_00003325
BDMAP_00003326
BDMAP_00003327
BDMAP_00003328
BDMAP_00003329
BDMAP_00003330
BDMAP_00003331
BDMAP_00003332
BDMAP_00003333
BDMAP_00003334
BDMAP_00003335
BDMAP_00003336
BDMAP_00003337
BDMAP_00003338
BDMAP_00003339
BDMAP_00003340
BDMAP_00003341
BDMAP_00003342
BDMAP_00003343
BDMAP_00003344
BDMAP_00003345
BDMAP_00003346
BDMAP_00003347
BDMAP_00003348
BDMAP_00003349
BDMAP_00003350
BDMAP_00003351
BDMAP_00003352
BDMAP_00003353
BDMAP_00003354
BDMAP_00003355
BDMAP_00003356
BDMAP_00003357
BDMAP_00003358
BDMAP_00003359
BDMAP_00003360
BDMAP_00003361
BDMAP_00003362
BDMAP_00003363
BDMAP_00003364
BDMAP_00003365
BDMAP_00003366
BDMAP_00003367
BDMAP_00003368
BDMAP_00003369
BDMAP_00003370
BDMAP_00003371
BDMAP_00003372
BDMAP_00003373
BDMAP_00003374
BDMAP_00003375
BDMAP_00003376
BDMAP_00003377
BDMAP_00003378
BDMAP_00003379
BDMAP_00003380
BDMAP_00003381
BDMAP_00003382
BDMAP_00003383
BDMAP_00003384
BDMAP_00003385
BDMAP_00003386
BDMAP_00003387
BDMAP_00003388
BDMAP_00003389
BDMAP_00003390
BDMAP_00003391
BDMAP_00003392
BDMAP_00003393
BDMAP_00003394
BDMAP_00003395
BDMAP_00003396
BDMAP_00003397
BDMAP_00003398
BDMAP_00003399
BDMAP_00003400
BDMAP_00003401
BDMAP_00003402
BDMAP_00003403
BDMAP_00003404
BDMAP_00003405
BDMAP_00003406
BDMAP_00003407
BDMAP_00003408
BDMAP_00003409
BDMAP_00003410
BDMAP_00003411
BDMAP_00003412
BDMAP_00003413
BDMAP_00003414
BDMAP_00003415
BDMAP_00003416
BDMAP_00003417
BDMAP_00003418
BDMAP_00003419
BDMAP_00003420
BDMAP_00003421
BDMAP_00003422
BDMAP_00003423
BDMAP_00003424
BDMAP_00003425
BDMAP_00003426
BDMAP_00003427
BDMAP_00003428
BDMAP_00003429
BDMAP_00003430
BDMAP_00003431
BDMAP_00003432
BDMAP_00003433
BDMAP_00003434
BDMAP_00003435
BDMAP_00003436
BDMAP_00003437
BDMAP_00003438
BDMAP_00003439
BDMAP_00003440
BDMAP_00003441
BDMAP_00003442
BDMAP_00003443
BDMAP_00003444
BDMAP_00003445
BDMAP_00003446
BDMAP_00003447
BDMAP_00003448
BDMAP_00003449
BDMAP_00003450
BDMAP_00003451
BDMAP_00003452
BDMAP_00003453
BDMAP_00003454
BDMAP_00003455
BDMAP_00003456
BDMAP_00003457
BDMAP_00003458
BDMAP_00003459
BDMAP_00003460
BDMAP_00003461
BDMAP_00003462
BDMAP_00003463
BDMAP_00003464
BDMAP_00003465
BDMAP_00003466
BDMAP_00003467
BDMAP_00003468
BDMAP_00003469
BDMAP_00003470
BDMAP_00003471
BDMAP_00003472
BDMAP_00003473
BDMAP_00003474
BDMAP_00003475
BDMAP_00003476
BDMAP_00003477
BDMAP_00003478
BDMAP_00003479
BDMAP_00003480
BDMAP_00003481
BDMAP_00003482
BDMAP_00003483
BDMAP_00003484
BDMAP_00003485
BDMAP_00003486
BDMAP_00003487
BDMAP_00003488
BDMAP_00003489
BDMAP_00003490
BDMAP_00003491
BDMAP_00003492
BDMAP_00003493
BDMAP_00003494
BDMAP_00003495
BDMAP_00003496
BDMAP_00003497
BDMAP_00003498
BDMAP_00003499
BDMAP_00003500
BDMAP_00003501
BDMAP_00003502
BDMAP_00003503
BDMAP_00003504
BDMAP_00003505
BDMAP_00003506
BDMAP_00003507
BDMAP_00003508
BDMAP_00003509
BDMAP_00003510
BDMAP_00003511
BDMAP_00003512
BDMAP_00003513
BDMAP_00003514
BDMAP_00003515
BDMAP_00003516
BDMAP_00003517
BDMAP_00003518
BDMAP_00003519
BDMAP_00003520
BDMAP_00003521
BDMAP_00003522
BDMAP_00003523
BDMAP_00003524
BDMAP_00003525
BDMAP_00003526
BDMAP_00003527
BDMAP_00003528
BDMAP_00003529
BDMAP_00003530
BDMAP_00003531
BDMAP_00003532
BDMAP_00003533
BDMAP_00003534
BDMAP_00003535
BDMAP_00003536
BDMAP_00003537
BDMAP_00003538
BDMAP_00003539
BDMAP_00003540
BDMAP_00003541
BDMAP_00003542
BDMAP_00003543
BDMAP_00003544
BDMAP_00003545
BDMAP_00003546
BDMAP_00003547
BDMAP_00003548
BDMAP_00003549
BDMAP_00003550
BDMAP_00003551
BDMAP_00003552
BDMAP_00003553
BDMAP_00003554
BDMAP_00003555
BDMAP_00003556
BDMAP_00003557
BDMAP_00003558
BDMAP_00003559
BDMAP_00003560
BDMAP_00003561
BDMAP_00003562
BDMAP_00003563
BDMAP_00003564
BDMAP_00003565
BDMAP_00003566
BDMAP_00003567
BDMAP_00003568
BDMAP_00003569
BDMAP_00003570
BDMAP_00003571
BDMAP_00003572
BDMAP_00003573
BDMAP_00003574
BDMAP_00003575
BDMAP_00003576
BDMAP_00003577
BDMAP_00003578
BDMAP_00003579
BDMAP_00003580
BDMAP_00003581
BDMAP_00003582
BDMAP_00003583
BDMAP_00003584
BDMAP_00003585
BDMAP_00003586
BDMAP_00003587
BDMAP_00003588
BDMAP_00003589
BDMAP_00003590
BDMAP_00003591
BDMAP_00003592
BDMAP_00003593
BDMAP_00003594
BDMAP_00003595
BDMAP_00003596
BDMAP_00003597
BDMAP_00003598
BDMAP_00003599
BDMAP_00003600
BDMAP_00003601
BDMAP_00003602
BDMAP_00003603
BDMAP_00003604
BDMAP_00003605
BDMAP_00003606
BDMAP_00003607
BDMAP_00003608
BDMAP_00003609
BDMAP_00003610
BDMAP_00003611
BDMAP_00003612
BDMAP_00003613
BDMAP_00003614
BDMAP_00003615
BDMAP_00003616
BDMAP_00003617
BDMAP_00003618
BDMAP_00003619
BDMAP_00003620
BDMAP_00003621
BDMAP_00003622
BDMAP_00003623
BDMAP_00003624
BDMAP_00003625
BDMAP_00003626
BDMAP_00003627
BDMAP_00003628
BDMAP_00003629
BDMAP_00003630
BDMAP_00003631
BDMAP_00003632
BDMAP_00003633
BDMAP_00003634
BDMAP_00003635
BDMAP_00003636
BDMAP_00003637
BDMAP_00003638
BDMAP_00003639
BDMAP_00003640
BDMAP_00003641
BDMAP_00003642
BDMAP_00003643
BDMAP_00003644
BDMAP_00003645
BDMAP_00003646
BDMAP_00003647
BDMAP_00003648
BDMAP_00003649
BDMAP_00003650
BDMAP_00003651
BDMAP_00003652
BDMAP_00003653
BDMAP_00003654
BDMAP_00003655
BDMAP_00003656
BDMAP_00003657
BDMAP_00003658
BDMAP_00003659
BDMAP_00003660
BDMAP_00003661
BDMAP_00003662
BDMAP_00003663
BDMAP_00003664
BDMAP_00003665
BDMAP_00003666
BDMAP_00003667
BDMAP_00003668
BDMAP_00003669
BDMAP_00003670
BDMAP_00003671
BDMAP_00003672
BDMAP_00003673
BDMAP_00003674
BDMAP_00003675
BDMAP_00003676
BDMAP_00003677
BDMAP_00003678
BDMAP_00003679
BDMAP_00003680
BDMAP_00003681
BDMAP_00003682
BDMAP_00003683
BDMAP_00003684
BDMAP_00003685
BDMAP_00003686
BDMAP_00003687
BDMAP_00003688
BDMAP_00003689
BDMAP_00003690
BDMAP_00003691
BDMAP_00003692
BDMAP_00003693
BDMAP_00003694
BDMAP_00003695
BDMAP_00003696
BDMAP_00003697
BDMAP_00003698
BDMAP_00003699
BDMAP_00003700
BDMAP_00003701
BDMAP_00003702
BDMAP_00003703
BDMAP_00003704
BDMAP_00003705
BDMAP_00003706
BDMAP_00003707
BDMAP_00003708
BDMAP_00003709
BDMAP_00003710
BDMAP_00003711
BDMAP_00003712
BDMAP_00003713
BDMAP_00003714
BDMAP_00003715
BDMAP_00003716
BDMAP_00003717
BDMAP_00003718
BDMAP_00003719
BDMAP_00003720
BDMAP_00003721
BDMAP_00003722
BDMAP_00003723
BDMAP_00003724
BDMAP_00003725
BDMAP_00003726
BDMAP_00003727
BDMAP_00003728
BDMAP_00003729
BDMAP_00003730
BDMAP_00003731
BDMAP_00003732
BDMAP_00003733
BDMAP_00003734
BDMAP_00003735
BDMAP_00003736
BDMAP_00003737
BDMAP_00003738
BDMAP_00003739
BDMAP_00003740
BDMAP_00003741
BDMAP_00003742
BDMAP_00003743
BDMAP_00003744
BDMAP_00003745
BDMAP_00003746
BDMAP_00003747
BDMAP_00003748
BDMAP_00003749
BDMAP_00003750
BDMAP_00003751
BDMAP_00003752
BDMAP_00003753
BDMAP_00003754
BDMAP_00003755
BDMAP_00003756
BDMAP_00003757
BDMAP_00003758
BDMAP_00003759
BDMAP_00003760
BDMAP_00003761
BDMAP_00003762
BDMAP_00003763
BDMAP_00003764
BDMAP_00003765
BDMAP_00003766
BDMAP_00003767
BDMAP_00003768
BDMAP_00003769
BDMAP_00003770
BDMAP_00003771
BDMAP_00003772
BDMAP_00003773
BDMAP_00003774
BDMAP_00003775
BDMAP_00003776
BDMAP_00003777
BDMAP_00003778
BDMAP_00003779
BDMAP_00003780
BDMAP_00003781
BDMAP_00003782
BDMAP_00003783
BDMAP_00003784
BDMAP_00003785
BDMAP_00003786
BDMAP_00003787
BDMAP_00003788
BDMAP_00003789
BDMAP_00003790
BDMAP_00003791
BDMAP_00003792
BDMAP_00003793
BDMAP_00003794
BDMAP_00003795
BDMAP_00003796
BDMAP_00003797
BDMAP_00003798
BDMAP_00003799
BDMAP_00003800
BDMAP_00003801
BDMAP_00003802
BDMAP_00003803
BDMAP_00003804
BDMAP_00003805
BDMAP_00003806
BDMAP_00003807
BDMAP_00003808
BDMAP_00003809
BDMAP_00003810
BDMAP_00003811
BDMAP_00003812
BDMAP_00003813
BDMAP_00003814
BDMAP_00003815
BDMAP_00003816
BDMAP_00003817
BDMAP_00003818
BDMAP_00003819
BDMAP_00003820
BDMAP_00003821
BDMAP_00003822
BDMAP_00003823
BDMAP_00003824
BDMAP_00003825
BDMAP_00003826
BDMAP_00003827
BDMAP_00003828
BDMAP_00003829
BDMAP_00003830
BDMAP_00003831
BDMAP_00003832
BDMAP_00003833
BDMAP_00003834
BDMAP_00003835
BDMAP_00003836
BDMAP_00003837
BDMAP_00003838
BDMAP_00003839
BDMAP_00003840
BDMAP_00003841
BDMAP_00003842
BDMAP_00003843
BDMAP_00003844
BDMAP_00003845
BDMAP_00003846
BDMAP_00003847
BDMAP_00003848
BDMAP_00003849
BDMAP_00003850
BDMAP_00003851
BDMAP_00003852
BDMAP_00003853
BDMAP_00003854
BDMAP_00003855
BDMAP_00003856
BDMAP_00003857
BDMAP_00003858
BDMAP_00003859
BDMAP_00003860
BDMAP_00003861
BDMAP_00003862
BDMAP_00003863
BDMAP_00003864
BDMAP_00003865
BDMAP_00003866
BDMAP_00003867
BDMAP_00003868
BDMAP_00003869
BDMAP_00003870
BDMAP_00003871
BDMAP_00003872
BDMAP_00003873
BDMAP_00003874
BDMAP_00003875
BDMAP_00003876
BDMAP_00003877
BDMAP_00003878
BDMAP_00003879
BDMAP_00003880
BDMAP_00003881
BDMAP_00003882
BDMAP_00003883
BDMAP_00003884
BDMAP_00003885
BDMAP_00003886
BDMAP_00003887
BDMAP_00003888
BDMAP_00003889
BDMAP_00003890
BDMAP_00003891
BDMAP_00003892
BDMAP_00003893
BDMAP_00003894
BDMAP_00003895
BDMAP_00003896
BDMAP_00003897
BDMAP_00003898
BDMAP_00003899
BDMAP_00003900
BDMAP_00003901
BDMAP_00003902
BDMAP_00003903
BDMAP_00003904
BDMAP_00003905
BDMAP_00003906
BDMAP_00003907
BDMAP_00003908
BDMAP_00003909
BDMAP_00003910
BDMAP_00003911
BDMAP_00003912
BDMAP_00003913
BDMAP_00003914
BDMAP_00003915
BDMAP_00003916
BDMAP_00003917
BDMAP_00003918
BDMAP_00003919
BDMAP_00003920
BDMAP_00003921
BDMAP_00003922
BDMAP_00003923
BDMAP_00003924
BDMAP_00003925
BDMAP_00003926
BDMAP_00003927
BDMAP_00003928
BDMAP_00003929
BDMAP_00003930
BDMAP_00003931
BDMAP_00003932
BDMAP_00003933
BDMAP_00003934
BDMAP_00003935
BDMAP_00003936
BDMAP_00003937
BDMAP_00003938
BDMAP_00003939
BDMAP_00003940
BDMAP_00003941
BDMAP_00003942
BDMAP_00003943
BDMAP_00003944
BDMAP_00003945
BDMAP_00003946
BDMAP_00003947
BDMAP_00003948
BDMAP_00003949
BDMAP_00003950
BDMAP_00003951
BDMAP_00003952
BDMAP_00003953
BDMAP_00003954
BDMAP_00003955
BDMAP_00003956
BDMAP_00003957
BDMAP_00003958
BDMAP_00003959
BDMAP_00003960
BDMAP_00003961
BDMAP_00003962
BDMAP_00003963
BDMAP_00003964
BDMAP_00003965
BDMAP_00003966
BDMAP_00003967
BDMAP_00003968
BDMAP_00003969
BDMAP_00003970
BDMAP_00003971
BDMAP_00003972
BDMAP_00003973
BDMAP_00003974
BDMAP_00003975
BDMAP_00003976
BDMAP_00003977
BDMAP_00003978
BDMAP_00003979
BDMAP_00003980
BDMAP_00003981
BDMAP_00003982
BDMAP_00003983
BDMAP_00003984
BDMAP_00003985
BDMAP_00003986
BDMAP_00003987
BDMAP_00003988
BDMAP_00003989
BDMAP_00003990
BDMAP_00003991
BDMAP_00003992
BDMAP_00003993
BDMAP_00003994
BDMAP_00003995
BDMAP_00003996
BDMAP_00003997
BDMAP_00003998
BDMAP_00003999
BDMAP_00004000
BDMAP_00004001
BDMAP_00004002
BDMAP_00004003
BDMAP_00004004
BDMAP_00004005
BDMAP_00004006
BDMAP_00004007
BDMAP_00004008
BDMAP_00004009
BDMAP_00004010
BDMAP_00004011
BDMAP_00004012
BDMAP_00004013
BDMAP_00004014
BDMAP_00004015
BDMAP_00004016
BDMAP_00004017
BDMAP_00004018
BDMAP_00004019
BDMAP_00004020
BDMAP_00004021
BDMAP_00004022
BDMAP_00004023
BDMAP_00004024
BDMAP_00004025
BDMAP_00004026
BDMAP_00004027
BDMAP_00004028
BDMAP_00004029
BDMAP_00004030
BDMAP_00004031
BDMAP_00004032
BDMAP_00004033
BDMAP_00004034
BDMAP_00004035
BDMAP_00004036
BDMAP_00004037
BDMAP_00004038
BDMAP_00004039
BDMAP_00004040
BDMAP_00004041
BDMAP_00004042
BDMAP_00004043
BDMAP_00004044
BDMAP_00004045
BDMAP_00004046
BDMAP_00004047
BDMAP_00004048
BDMAP_00004049
BDMAP_00004050
BDMAP_00004051
BDMAP_00004052
BDMAP_00004053
BDMAP_00004054
BDMAP_00004055
BDMAP_00004056
BDMAP_00004057
BDMAP_00004058
BDMAP_00004059
BDMAP_00004060
BDMAP_00004061
BDMAP_00004062
BDMAP_00004063
BDMAP_00004064
BDMAP_00004065
BDMAP_00004066
BDMAP_00004067
BDMAP_00004068
BDMAP_00004069
BDMAP_00004070
BDMAP_00004071
BDMAP_00004072
BDMAP_00004073
BDMAP_00004074
BDMAP_00004075
BDMAP_00004076
BDMAP_00004077
BDMAP_00004078
BDMAP_00004079
BDMAP_00004080
BDMAP_00004081
BDMAP_00004082
BDMAP_00004083
BDMAP_00004084
BDMAP_00004085
BDMAP_00004086
BDMAP_00004087
BDMAP_00004088
BDMAP_00004089
BDMAP_00004090
BDMAP_00004091
BDMAP_00004092
BDMAP_00004093
BDMAP_00004094
BDMAP_00004095
BDMAP_00004096
BDMAP_00004097
BDMAP_00004098
BDMAP_00004099
BDMAP_00004100
BDMAP_00004101
BDMAP_00004102
BDMAP_00004103
BDMAP_00004104
BDMAP_00004105
BDMAP_00004106
BDMAP_00004107
BDMAP_00004108
BDMAP_00004109
BDMAP_00004110
BDMAP_00004111
BDMAP_00004112
BDMAP_00004113
BDMAP_00004114
BDMAP_00004115
BDMAP_00004116
BDMAP_00004117
BDMAP_00004118
BDMAP_00004119
BDMAP_00004120
BDMAP_00004121
BDMAP_00004122
BDMAP_00004123
BDMAP_00004124
BDMAP_00004125
BDMAP_00004126
BDMAP_00004127
BDMAP_00004128
BDMAP_00004129
BDMAP_00004130
BDMAP_00004131
BDMAP_00004132
BDMAP_00004133
BDMAP_00004134
BDMAP_00004135
BDMAP_00004136
BDMAP_00004137
BDMAP_00004138
BDMAP_00004139
BDMAP_00004140
BDMAP_00004141
BDMAP_00004142
BDMAP_00004143
BDMAP_00004144
BDMAP_00004145
BDMAP_00004146
BDMAP_00004147
BDMAP_00004148
BDMAP_00004149
BDMAP_00004150
BDMAP_00004151
BDMAP_00004152
BDMAP_00004153
BDMAP_00004154
BDMAP_00004155
BDMAP_00004156
BDMAP_00004157
BDMAP_00004158
BDMAP_00004159
BDMAP_00004160
BDMAP_00004161
BDMAP_00004162
BDMAP_00004163
BDMAP_00004164
BDMAP_00004165
BDMAP_00004166
BDMAP_00004167
BDMAP_00004168
BDMAP_00004169
BDMAP_00004170
BDMAP_00004171
BDMAP_00004172
BDMAP_00004173
BDMAP_00004174
BDMAP_00004175
BDMAP_00004176
BDMAP_00004177
BDMAP_00004178
BDMAP_00004179
BDMAP_00004180
BDMAP_00004181
BDMAP_00004182
BDMAP_00004183
BDMAP_00004184
BDMAP_00004185
BDMAP_00004186
BDMAP_00004187
BDMAP_00004188
BDMAP_00004189
BDMAP_00004190
BDMAP_00004191
BDMAP_00004192
BDMAP_00004193
BDMAP_00004194
BDMAP_00004195
BDMAP_00004196
BDMAP_00004197
BDMAP_00004198
BDMAP_00004199
BDMAP_00004200
BDMAP_00004201
BDMAP_00004202
BDMAP_00004203
BDMAP_00004204
BDMAP_00004205
BDMAP_00004206
BDMAP_00004207
BDMAP_00004208
BDMAP_00004209
BDMAP_00004210
BDMAP_00004211
BDMAP_00004212
BDMAP_00004213
BDMAP_00004214
BDMAP_00004215
BDMAP_00004216
BDMAP_00004217
BDMAP_00004218
BDMAP_00004219
BDMAP_00004220
BDMAP_00004221
BDMAP_00004222
BDMAP_00004223
BDMAP_00004224
BDMAP_00004225
BDMAP_00004226
BDMAP_00004227
BDMAP_00004228
BDMAP_00004229
BDMAP_00004230
BDMAP_00004231
BDMAP_00004232
BDMAP_00004233
BDMAP_00004234
BDMAP_00004235
BDMAP_00004236
BDMAP_00004237
BDMAP_00004238
BDMAP_00004239
BDMAP_00004240
BDMAP_00004241
BDMAP_00004242
BDMAP_00004243
BDMAP_00004244
BDMAP_00004245
BDMAP_00004246
BDMAP_00004247
BDMAP_00004248
BDMAP_00004249
BDMAP_00004250
BDMAP_00004251
BDMAP_00004252
BDMAP_00004253
BDMAP_00004254
BDMAP_00004255
BDMAP_00004256
BDMAP_00004257
BDMAP_00004258
BDMAP_00004259
BDMAP_00004260
BDMAP_00004261
BDMAP_00004262
BDMAP_00004263
BDMAP_00004264
BDMAP_00004265
BDMAP_00004266
BDMAP_00004267
BDMAP_00004268
BDMAP_00004269
BDMAP_00004270
BDMAP_00004271
BDMAP_00004272
BDMAP_00004273
BDMAP_00004274
BDMAP_00004275
BDMAP_00004276
BDMAP_00004277
BDMAP_00004278
BDMAP_00004279
BDMAP_00004280
BDMAP_00004281
BDMAP_00004282
BDMAP_00004283
BDMAP_00004284
BDMAP_00004285
BDMAP_00004286
BDMAP_00004287
BDMAP_00004288
BDMAP_00004289
BDMAP_00004290
BDMAP_00004291
BDMAP_00004292
BDMAP_00004293
BDMAP_00004294
BDMAP_00004295
BDMAP_00004296
BDMAP_00004297
BDMAP_00004298
BDMAP_00004299
BDMAP_00004300
BDMAP_00004301
BDMAP_00004302
BDMAP_00004303
BDMAP_00004304
BDMAP_00004305
BDMAP_00004306
BDMAP_00004307
BDMAP_00004308
BDMAP_00004309
BDMAP_00004310
BDMAP_00004311
BDMAP_00004312
BDMAP_00004313
BDMAP_00004314
BDMAP_00004315
BDMAP_00004316
BDMAP_00004317
BDMAP_00004318
BDMAP_00004319
BDMAP_00004320
BDMAP_00004321
BDMAP_00004322
BDMAP_00004323
BDMAP_00004324
BDMAP_00004325
BDMAP_00004326
BDMAP_00004327
BDMAP_00004328
BDMAP_00004329
BDMAP_00004330
BDMAP_00004331
BDMAP_00004332
BDMAP_00004333
BDMAP_00004334
BDMAP_00004335
BDMAP_00004336
BDMAP_00004337
BDMAP_00004338
BDMAP_00004339
BDMAP_00004340
BDMAP_00004341
BDMAP_00004342
BDMAP_00004343
BDMAP_00004344
BDMAP_00004345
BDMAP_00004346
BDMAP_00004347
BDMAP_00004348
BDMAP_00004349
BDMAP_00004350
BDMAP_00004351
BDMAP_00004352
BDMAP_00004353
BDMAP_00004354
BDMAP_00004355
BDMAP_00004356
BDMAP_00004357
BDMAP_00004358
BDMAP_00004359
BDMAP_00004360
BDMAP_00004361
BDMAP_00004362
BDMAP_00004363
BDMAP_00004364
BDMAP_00004365
BDMAP_00004366
BDMAP_00004367
BDMAP_00004368
BDMAP_00004369
BDMAP_00004370
BDMAP_00004371
BDMAP_00004372
BDMAP_00004373
BDMAP_00004374
BDMAP_00004375
BDMAP_00004376
BDMAP_00004377
BDMAP_00004378
BDMAP_00004379
BDMAP_00004380
BDMAP_00004381
BDMAP_00004382
BDMAP_00004383
BDMAP_00004384
BDMAP_00004385
BDMAP_00004386
BDMAP_00004387
BDMAP_00004388
BDMAP_00004389
BDMAP_00004390
BDMAP_00004391
BDMAP_00004392
BDMAP_00004393
BDMAP_00004394
BDMAP_00004395
BDMAP_00004396
BDMAP_00004397
BDMAP_00004398
BDMAP_00004399
BDMAP_00004400
BDMAP_00004401
BDMAP_00004402
BDMAP_00004403
BDMAP_00004404
BDMAP_00004405
BDMAP_00004406
BDMAP_00004407
BDMAP_00004408
BDMAP_00004409
BDMAP_00004410
BDMAP_00004411
BDMAP_00004412
BDMAP_00004413
BDMAP_00004414
BDMAP_00004415
BDMAP_00004416
BDMAP_00004417
BDMAP_00004418
BDMAP_00004419
BDMAP_00004420
BDMAP_00004421
BDMAP_00004422
BDMAP_00004423
BDMAP_00004424
BDMAP_00004425
BDMAP_00004426
BDMAP_00004427
BDMAP_00004428
BDMAP_00004429
BDMAP_00004430
BDMAP_00004431
BDMAP_00004432
BDMAP_00004433
BDMAP_00004434
BDMAP_00004435
BDMAP_00004436
BDMAP_00004437
BDMAP_00004438
BDMAP_00004439
BDMAP_00004440
BDMAP_00004441
BDMAP_00004442
BDMAP_00004443
BDMAP_00004444
BDMAP_00004445
BDMAP_00004446
BDMAP_00004447
BDMAP_00004448
BDMAP_00004449
BDMAP_00004450
BDMAP_00004451
BDMAP_00004452
BDMAP_00004453
BDMAP_00004454
BDMAP_00004455
BDMAP_00004456
BDMAP_00004457
BDMAP_00004458
BDMAP_00004459
BDMAP_00004460
BDMAP_00004461
BDMAP_00004462
BDMAP_00004463
BDMAP_00004464
BDMAP_00004465
BDMAP_00004466
BDMAP_00004467
BDMAP_00004468
BDMAP_00004469
BDMAP_00004470
BDMAP_00004471
BDMAP_00004472
BDMAP_00004473
BDMAP_00004474
BDMAP_00004475
BDMAP_00004476
BDMAP_00004477
BDMAP_00004478
BDMAP_00004479
BDMAP_00004480
BDMAP_00004481
BDMAP_00004482
BDMAP_00004483
BDMAP_00004484
BDMAP_00004485
BDMAP_00004486
BDMAP_00004487
BDMAP_00004488
BDMAP_00004489
BDMAP_00004490
BDMAP_00004491
BDMAP_00004492
BDMAP_00004493
BDMAP_00004494
BDMAP_00004495
BDMAP_00004496
BDMAP_00004497
BDMAP_00004498
BDMAP_00004499
BDMAP_00004500
BDMAP_00004501
BDMAP_00004502
BDMAP_00004503
BDMAP_00004504
BDMAP_00004505
BDMAP_00004506
BDMAP_00004507
BDMAP_00004508
BDMAP_00004509
BDMAP_00004510
BDMAP_00004511
BDMAP_00004512
BDMAP_00004513
BDMAP_00004514
BDMAP_00004515
BDMAP_00004516
BDMAP_00004517
BDMAP_00004518
BDMAP_00004519
BDMAP_00004520
BDMAP_00004521
BDMAP_00004522
BDMAP_00004523
BDMAP_00004524
BDMAP_00004525
BDMAP_00004526
BDMAP_00004527
BDMAP_00004528
BDMAP_00004529
BDMAP_00004530
BDMAP_00004531
BDMAP_00004532
BDMAP_00004533
BDMAP_00004534
BDMAP_00004535
BDMAP_00004536
BDMAP_00004537
BDMAP_00004538
BDMAP_00004539
BDMAP_00004540
BDMAP_00004541
BDMAP_00004542
BDMAP_00004543
BDMAP_00004544
BDMAP_00004545
BDMAP_00004546
BDMAP_00004547
BDMAP_00004548
BDMAP_00004549
BDMAP_00004550
BDMAP_00004551
BDMAP_00004552
BDMAP_00004553
BDMAP_00004554
BDMAP_00004555
BDMAP_00004556
BDMAP_00004557
BDMAP_00004558
BDMAP_00004559
BDMAP_00004560
BDMAP_00004561
BDMAP_00004562
BDMAP_00004563
BDMAP_00004564
BDMAP_00004565
BDMAP_00004566
BDMAP_00004567
BDMAP_00004568
BDMAP_00004569
BDMAP_00004570
BDMAP_00004571
BDMAP_00004572
BDMAP_00004573
BDMAP_00004574
BDMAP_00004575
BDMAP_00004576
BDMAP_00004577
BDMAP_00004578
BDMAP_00004579
BDMAP_00004580
BDMAP_00004581
BDMAP_00004582
BDMAP_00004583
BDMAP_00004584
BDMAP_00004585
BDMAP_00004586
BDMAP_00004587
BDMAP_00004588
BDMAP_00004589
BDMAP_00004590
BDMAP_00004591
BDMAP_00004592
BDMAP_00004593
BDMAP_00004594
BDMAP_00004595
BDMAP_00004596
BDMAP_00004597
BDMAP_00004598
BDMAP_00004599
BDMAP_00004600
BDMAP_00004601
BDMAP_00004602
BDMAP_00004603
BDMAP_00004604
BDMAP_00004605
BDMAP_00004606
BDMAP_00004607
BDMAP_00004608
BDMAP_00004609
BDMAP_00004610
BDMAP_00004611
BDMAP_00004612
BDMAP_00004613
BDMAP_00004614
BDMAP_00004615
BDMAP_00004616
BDMAP_00004617
BDMAP_00004618
BDMAP_00004619
BDMAP_00004620
BDMAP_00004621
BDMAP_00004622
BDMAP_00004623
BDMAP_00004624
BDMAP_00004625
BDMAP_00004626
BDMAP_00004627
BDMAP_00004628
BDMAP_00004629
BDMAP_00004630
BDMAP_00004631
BDMAP_00004632
BDMAP_00004633
BDMAP_00004634
BDMAP_00004635
BDMAP_00004636
BDMAP_00004637
BDMAP_00004638
BDMAP_00004639
BDMAP_00004640
BDMAP_00004641
BDMAP_00004642
BDMAP_00004643
BDMAP_00004644
BDMAP_00004645
BDMAP_00004646
BDMAP_00004647
BDMAP_00004648
BDMAP_00004649
BDMAP_00004650
BDMAP_00004651
BDMAP_00004652
BDMAP_00004653
BDMAP_00004654
BDMAP_00004655
BDMAP_00004656
BDMAP_00004657
BDMAP_00004658
BDMAP_00004659
BDMAP_00004660
BDMAP_00004661
BDMAP_00004662
BDMAP_00004663
BDMAP_00004664
BDMAP_00004665
BDMAP_00004666
BDMAP_00004667
BDMAP_00004668
BDMAP_00004669
BDMAP_00004670
BDMAP_00004671
BDMAP_00004672
BDMAP_00004673
BDMAP_00004674
BDMAP_00004675
BDMAP_00004676
BDMAP_00004677
BDMAP_00004678
BDMAP_00004679
BDMAP_00004680
BDMAP_00004681
BDMAP_00004682
BDMAP_00004683
BDMAP_00004684
BDMAP_00004685
BDMAP_00004686
BDMAP_00004687
BDMAP_00004688
BDMAP_00004689
BDMAP_00004690
BDMAP_00004691
BDMAP_00004692
BDMAP_00004693
BDMAP_00004694
BDMAP_00004695
BDMAP_00004696
BDMAP_00004697
BDMAP_00004698
BDMAP_00004699
BDMAP_00004700
BDMAP_00004701
BDMAP_00004702
BDMAP_00004703
BDMAP_00004704
BDMAP_00004705
BDMAP_00004706
BDMAP_00004707
BDMAP_00004708
BDMAP_00004709
BDMAP_00004710
BDMAP_00004711
BDMAP_00004712
BDMAP_00004713
BDMAP_00004714
BDMAP_00004715
BDMAP_00004716
BDMAP_00004717
BDMAP_00004718
BDMAP_00004719
BDMAP_00004720
BDMAP_00004721
BDMAP_00004722
BDMAP_00004723
BDMAP_00004724
BDMAP_00004725
BDMAP_00004726
BDMAP_00004727
BDMAP_00004728
BDMAP_00004729
BDMAP_00004730
BDMAP_00004731
BDMAP_00004732
BDMAP_00004733
BDMAP_00004734
BDMAP_00004735
BDMAP_00004736
BDMAP_00004737
BDMAP_00004738
BDMAP_00004739
BDMAP_00004740
BDMAP_00004741
BDMAP_00004742
BDMAP_00004743
BDMAP_00004744
BDMAP_00004745
BDMAP_00004746
BDMAP_00004747
BDMAP_00004748
BDMAP_00004749
BDMAP_00004750
BDMAP_00004751
BDMAP_00004752
BDMAP_00004753
BDMAP_00004754
BDMAP_00004755
BDMAP_00004756
BDMAP_00004757
BDMAP_00004758
BDMAP_00004759
BDMAP_00004760
BDMAP_00004761
BDMAP_00004762
BDMAP_00004763
BDMAP_00004764
BDMAP_00004765
BDMAP_00004766
BDMAP_00004767
BDMAP_00004768
BDMAP_00004769
BDMAP_00004770
BDMAP_00004771
BDMAP_00004772
BDMAP_00004773
BDMAP_00004774
BDMAP_00004775
BDMAP_00004776
BDMAP_00004777
BDMAP_00004778
BDMAP_00004779
BDMAP_00004780
BDMAP_00004781
BDMAP_00004782
BDMAP_00004783
BDMAP_00004784
BDMAP_00004785
BDMAP_00004786
BDMAP_00004787
BDMAP_00004788
BDMAP_00004789
BDMAP_00004790
BDMAP_00004791
BDMAP_00004792
BDMAP_00004793
BDMAP_00004794
BDMAP_00004795
BDMAP_00004796
BDMAP_00004797
BDMAP_00004798
BDMAP_00004799
BDMAP_00004800
BDMAP_00004801
BDMAP_00004802
BDMAP_00004803
BDMAP_00004804
BDMAP_00004805
BDMAP_00004806
BDMAP_00004807
BDMAP_00004808
BDMAP_00004809
BDMAP_00004810
BDMAP_00004811
BDMAP_00004812
BDMAP_00004813
BDMAP_00004814
BDMAP_00004815
BDMAP_00004816
BDMAP_00004817
BDMAP_00004818
BDMAP_00004819
BDMAP_00004820
BDMAP_00004821
BDMAP_00004822
BDMAP_00004823
BDMAP_00004824
BDMAP_00004825
BDMAP_00004826
BDMAP_00004827
BDMAP_00004828
BDMAP_00004829
BDMAP_00004830
BDMAP_00004831
BDMAP_00004832
BDMAP_00004833
BDMAP_00004834
BDMAP_00004835
BDMAP_00004836
BDMAP_00004837
BDMAP_00004838
BDMAP_00004839
BDMAP_00004840
BDMAP_00004841
BDMAP_00004842
BDMAP_00004843
BDMAP_00004844
BDMAP_00004845
BDMAP_00004846
BDMAP_00004847
BDMAP_00004848
BDMAP_00004849
BDMAP_00004850
BDMAP_00004851
BDMAP_00004852
BDMAP_00004853
BDMAP_00004854
BDMAP_00004855
BDMAP_00004856
BDMAP_00004857
BDMAP_00004858
BDMAP_00004859
BDMAP_00004860
BDMAP_00004861
BDMAP_00004862
BDMAP_00004863
BDMAP_00004864
BDMAP_00004865
BDMAP_00004866
BDMAP_00004867
BDMAP_00004868
BDMAP_00004869
BDMAP_00004870
BDMAP_00004871
BDMAP_00004872
BDMAP_00004873
BDMAP_00004874
BDMAP_00004875
BDMAP_00004876
BDMAP_00004877
BDMAP_00004878
BDMAP_00004879
BDMAP_00004880
BDMAP_00004881
BDMAP_00004882
BDMAP_00004883
BDMAP_00004884
BDMAP_00004885
BDMAP_00004886
BDMAP_00004887
BDMAP_00004888
BDMAP_00004889
BDMAP_00004890
BDMAP_00004891
BDMAP_00004892
BDMAP_00004893
BDMAP_00004894
BDMAP_00004895
BDMAP_00004896
BDMAP_00004897
BDMAP_00004898
BDMAP_00004899
BDMAP_00004900
BDMAP_00004901
BDMAP_00004902
BDMAP_00004903
BDMAP_00004904
BDMAP_00004905
BDMAP_00004906
BDMAP_00004907
BDMAP_00004908
BDMAP_00004909
BDMAP_00004910
BDMAP_00004911
BDMAP_00004912
BDMAP_00004913
BDMAP_00004914
BDMAP_00004915
BDMAP_00004916
BDMAP_00004917
BDMAP_00004918
BDMAP_00004919
BDMAP_00004920
BDMAP_00004921
BDMAP_00004922
BDMAP_00004923
BDMAP_00004924
BDMAP_00004925
BDMAP_00004926
BDMAP_00004927
BDMAP_00004928
BDMAP_00004929
BDMAP_00004930
BDMAP_00004931
BDMAP_00004932
BDMAP_00004933
BDMAP_00004934
BDMAP_00004935
BDMAP_00004936
BDMAP_00004937
BDMAP_00004938
BDMAP_00004939
BDMAP_00004940
BDMAP_00004941
BDMAP_00004942
BDMAP_00004943
BDMAP_00004944
BDMAP_00004945
BDMAP_00004946
BDMAP_00004947
BDMAP_00004948
BDMAP_00004949
BDMAP_00004950
BDMAP_00004951
BDMAP_00004952
BDMAP_00004953
BDMAP_00004954
BDMAP_00004955
BDMAP_00004956
BDMAP_00004957
BDMAP_00004958
BDMAP_00004959
BDMAP_00004960
BDMAP_00004961
BDMAP_00004962
BDMAP_00004963
BDMAP_00004964
BDMAP_00004965
BDMAP_00004966
BDMAP_00004967
BDMAP_00004968
BDMAP_00004969
BDMAP_00004970
BDMAP_00004971
BDMAP_00004972
BDMAP_00004973
BDMAP_00004974
BDMAP_00004975
BDMAP_00004976
BDMAP_00004977
BDMAP_00004978
BDMAP_00004979
BDMAP_00004980
BDMAP_00004981
BDMAP_00004982
BDMAP_00004983
BDMAP_00004984
BDMAP_00004985
BDMAP_00004986
BDMAP_00004987
BDMAP_00004988
BDMAP_00004989
BDMAP_00004990
BDMAP_00004991
BDMAP_00004992
BDMAP_00004993
BDMAP_00004994
BDMAP_00004995
BDMAP_00004996
BDMAP_00004997
BDMAP_00004998
BDMAP_00004999
BDMAP_00005000
BDMAP_00005001
BDMAP_00005002
BDMAP_00005003
BDMAP_00005004
BDMAP_00005005
BDMAP_00005006
BDMAP_00005007
BDMAP_00005008
BDMAP_00005009
BDMAP_00005010
BDMAP_00005011
BDMAP_00005012
BDMAP_00005013
BDMAP_00005014
BDMAP_00005015
BDMAP_00005016
BDMAP_00005017
BDMAP_00005018
BDMAP_00005019
BDMAP_00005020
BDMAP_00005021
BDMAP_00005022
BDMAP_00005023
BDMAP_00005024
BDMAP_00005025
BDMAP_00005026
BDMAP_00005027
BDMAP_00005028
BDMAP_00005029
BDMAP_00005030
BDMAP_00005031
BDMAP_00005032
BDMAP_00005033
BDMAP_00005034
BDMAP_00005035
BDMAP_00005036
BDMAP_00005037
BDMAP_00005038
BDMAP_00005039
BDMAP_00005040
BDMAP_00005041
BDMAP_00005042
BDMAP_00005043
BDMAP_00005044
BDMAP_00005045
BDMAP_00005046
BDMAP_00005047
BDMAP_00005048
BDMAP_00005049
BDMAP_00005050
BDMAP_00005051
BDMAP_00005052
BDMAP_00005053
BDMAP_00005054
BDMAP_00005055
BDMAP_00005056
BDMAP_00005057
BDMAP_00005058
BDMAP_00005059
BDMAP_00005060
BDMAP_00005061
BDMAP_00005062
BDMAP_00005063
BDMAP_00005064
BDMAP_00005065
BDMAP_00005066
BDMAP_00005067
BDMAP_00005068
BDMAP_00005069
BDMAP_00005070
BDMAP_00005071
BDMAP_00005072
BDMAP_00005073
BDMAP_00005074
BDMAP_00005075
BDMAP_00005076
BDMAP_00005077
BDMAP_00005078
BDMAP_00005079
BDMAP_00005080
BDMAP_00005081
BDMAP_00005082
BDMAP_00005083
BDMAP_00005084
BDMAP_00005085
BDMAP_00005086
BDMAP_00005087
BDMAP_00005088
BDMAP_00005089
BDMAP_00005090
BDMAP_00005091
BDMAP_00005092
BDMAP_00005093
BDMAP_00005094
BDMAP_00005095
BDMAP_00005096
BDMAP_00005097
BDMAP_00005098
BDMAP_00005099
BDMAP_00005100
BDMAP_00005101
BDMAP_00005102
BDMAP_00005103
BDMAP_00005104
BDMAP_00005105
BDMAP_00005106
BDMAP_00005107
BDMAP_00005108
BDMAP_00005109
BDMAP_00005110
BDMAP_00005111
BDMAP_00005112
BDMAP_00005113
BDMAP_00005114
BDMAP_00005115
BDMAP_00005116
BDMAP_00005117
BDMAP_00005118
BDMAP_00005119
BDMAP_00005120
BDMAP_00005121
BDMAP_00005122
BDMAP_00005123
BDMAP_00005124
BDMAP_00005125
BDMAP_00005126
BDMAP_00005127
BDMAP_00005128
BDMAP_00005129
BDMAP_00005130
BDMAP_00005131
BDMAP_00005132
BDMAP_00005133
BDMAP_00005134
BDMAP_00005135
BDMAP_00005136
BDMAP_00005137
BDMAP_00005138
BDMAP_00005139
BDMAP_00005140
BDMAP_00005141
BDMAP_00005142
BDMAP_00005143
BDMAP_00005144
BDMAP_00005145
BDMAP_00005146
BDMAP_00005147
BDMAP_00005148
BDMAP_00005149
BDMAP_00005150
BDMAP_00005151
BDMAP_00005152
BDMAP_00005153
BDMAP_00005154
BDMAP_00005155
BDMAP_00005156
BDMAP_00005157
BDMAP_00005158
BDMAP_00005159
BDMAP_00005160
BDMAP_00005161
BDMAP_00005162
BDMAP_00005163
BDMAP_00005164
BDMAP_00005165
BDMAP_00005166
BDMAP_00005167
BDMAP_00005168
BDMAP_00005169
BDMAP_00005170
BDMAP_00005171
BDMAP_00005172
BDMAP_00005173
BDMAP_00005174
BDMAP_00005175
BDMAP_00005176
BDMAP_00005177
BDMAP_00005178
BDMAP_00005179
BDMAP_00005180
BDMAP_00005181
BDMAP_00005182
BDMAP_00005183
BDMAP_00005184
BDMAP_00005185
BDMAP_00005186
BDMAP_00005187
BDMAP_00005188
BDMAP_00005189
BDMAP_00005190
BDMAP_00005191
BDMAP_00005192
BDMAP_00005193
BDMAP_00005194
BDMAP_00005195
================================================
FILE: Finetune/AbdomenAtlas/main.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import logging
import numpy as np
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import torch.nn.parallel
import torch.utils.data.distributed
from optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR
from trainer import run_training
from dataset.dataloader_bdmap import get_loader_Atlas
import torch.nn as nn
from monai.inferers import sliding_window_inference
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
from monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock
from monai.networks.nets.swin_unetr import SwinTransformer as SwinViT
from monai.utils import ensure_tuple_rep
import warnings
warnings.filterwarnings('ignore')
# os.environ['CUDA_VISIBLE_DEVICES'] = "7"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument("--checkpoint", default=None, help="start training from saved checkpoint")
parser.add_argument("--logdir", default="logs", type=str, help="directory to save the tensorboard logs")
parser.add_argument(
"--pretrained_dir", default="./pretrained_models/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--out_channels", default=10, type=int, help="number of output channels")
parser.add_argument(
"--pretrained_model_name",
default="model.pt",
type=str,
help="pretrained model name",
)
roi = 96
parser.add_argument("--data_dir", default="/project/medimgfmod/CT/AbdomenAtlasMini1.0/", type=str,
help="dataset directory")
parser.add_argument("--data_txt_path", default='./dataset/dataset_list', help="dataset json file")
parser.add_argument("--dataset_list", default=['AbdomenAtlas1.0'], help="dataset json file")
parser.add_argument("--cache_dataset", default=True, help="use monai CACHE Dataset class")
parser.add_argument("--cache_dir", default='./cache', help="CACHE dir")
parser.add_argument("--save_checkpoint", default=True, help="save checkpoint during training")
parser.add_argument("--max_epochs", default=100, type=int, help="max number of training epochs")
parser.add_argument("--warmup_epochs", default=5, type=int, help="number of warmup epochs")
parser.add_argument("--val_every", default=1, type=int, help="validation frequency")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=4, type=int, help="number of sliding window batch size")
parser.add_argument("--optim_lr", default=1e-3, type=float, help="optimization learning rate")
parser.add_argument("--optim_name", default="adamw", type=str, help="optimization algorithm")
parser.add_argument("--reg_weight", default=1e-5, type=float, help="regularization weight")
parser.add_argument("--momentum", default=0.99, type=float, help="momentum")
parser.add_argument("--noamp", default=False, help="do NOT use amp for training")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--world_size", default=1, type=int, help="number of nodes for distributed training")
parser.add_argument("--rank", default=0, type=int, help="node rank for distributed training")
parser.add_argument("--dist-url", default="tcp://127.0.0.1:23456", type=str, help="distributed url")
parser.add_argument("--dist-backend", default="nccl", type=str, help="distributed backend")
parser.add_argument("--norm_name", default="instance", type=str, help="normalization name")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=2.0, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--lrschedule", default="warmup_cosine", type=str, help="type of learning rate scheduler")
parser.add_argument("--resume_ckpt", action="store_true", help="resume training from pretrained checkpoint")
parser.add_argument("--smooth_dr", default=1e-6, type=float, help="constant added to dice denominator to avoid nan")
parser.add_argument("--smooth_nr", default=0.0, type=float, help="constant added to dice numerator to avoid zero")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
parser.add_argument("--use_ssl_pretrained", default=True, help="use self-supervised pretrained weights")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--squared_dice", action="store_true", help="use squared Dice")
def main():
args = parser.parse_args()
args.amp = not args.noamp
if args.distributed:
args.ngpus_per_node = torch.cuda.device_count()
print("Found total gpus", args.ngpus_per_node)
args.world_size = args.ngpus_per_node * args.world_size
mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))
else:
main_worker(gpu=0, args=args)
def main_worker(gpu, args):
if args.distributed:
torch.multiprocessing.set_start_method("fork", force=True)
np.set_printoptions(formatter={"float": "{: 0.3f}".format}, suppress=True)
args.gpu = gpu
if args.distributed:
args.rank = args.rank * args.ngpus_per_node + gpu
dist.init_process_group(
backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank
)
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
args.test_mode = False
loader = get_loader_Atlas(args)
print(args.rank, " gpu", args.gpu)
if args.rank == 0:
print("Batch size is:", args.batch_size, "epochs", args.max_epochs)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
if args.rank == 0:
os.makedirs(args.logdir, exist_ok=True)
logger = init_log('global', logging.INFO)
logger.propagate = 0
pretrained_dir = args.pretrained_dir
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=args.dropout_path_rate,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
if args.resume_ckpt:
model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))["state_dict"]
model.load_state_dict(model_dict)
print("Use resume weights")
if args.use_ssl_pretrained:
try:
model_dict = torch.load("./VoCo_10k.pt", map_location=torch.device('cpu'))
state_dict = model_dict
# state_dict = model_dict['net']
# fix potential differences in state dict keys from pre-training to
# fine-tuning
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
# We now load model weights, setting param `strict` to False, i.e.:
# this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves
# the decoder weights untouched (CNN UNet decoder).
model.load_state_dict(state_dict, strict=False)
print("Using pretrained voco ema self-supervised Swin UNETR backbone weights !")
except ValueError:
raise ValueError("Self-supervised pre-trained weights not available for" + str(args.model_name))
if args.squared_dice:
dice_loss = DiceCELoss(
to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr
)
else:
dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)
post_label = AsDiscrete(to_onehot=args.out_channels)
post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)
dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("Total parameters count", pytorch_total_params)
best_acc = 0
start_epoch = 0
if args.checkpoint is not None:
checkpoint = torch.load(args.checkpoint, map_location="cpu")
from collections import OrderedDict
new_state_dict = OrderedDict()
for k, v in checkpoint["state_dict"].items():
new_state_dict[k.replace("backbone.", "")] = v
model.load_state_dict(new_state_dict, strict=False)
if "epoch" in checkpoint:
start_epoch = checkpoint["epoch"]
if "best_acc" in checkpoint:
best_acc = checkpoint["best_acc"]
print("=> loaded checkpoint '{}' (epoch {}) (bestacc {})".format(args.checkpoint, start_epoch, best_acc))
model.cuda()
if args.distributed:
torch.cuda.set_device(args.gpu)
if args.norm_name == "batch":
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
model.cuda(args.gpu)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)
if args.optim_name == "adam":
optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
elif args.optim_name == "adamw":
optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
# optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)
elif args.optim_name == "sgd":
optimizer = torch.optim.SGD(
model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight
)
else:
raise ValueError("Unsupported Optimization Procedure: " + str(args.optim_name))
if args.lrschedule == "warmup_cosine":
print(len(loader[0]))
max_steps = args.max_epochs * len(loader[0])
warmup_steps = args.warmup_epochs * len(loader[0])
scheduler = LinearWarmupCosineAnnealingLR(
optimizer, warmup_epochs=warmup_steps, max_epochs=max_steps
)
elif args.lrschedule == "cosine_anneal":
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)
if args.checkpoint is not None:
scheduler.step(epoch=start_epoch)
else:
scheduler = None
accuracy = run_training(
model=model,
train_loader=loader[0],
val_loader=loader[1],
optimizer=optimizer,
loss_func=dice_loss,
acc_func=dice_acc,
args=args,
model_inferer=model_inferer,
scheduler=scheduler,
start_epoch=start_epoch,
post_label=post_label,
post_pred=post_pred,
)
return accuracy
logs = set()
def init_log(name, level=logging.INFO):
if (name, level) in logs:
return
logs.add((name, level))
logger = logging.getLogger(name)
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
if "SLURM_PROCID" in os.environ:
rank = int(os.environ["SLURM_PROCID"])
logger.addFilter(lambda record: rank == 0)
else:
rank = 0
format_str = "[%(asctime)s][%(levelname)8s] %(message)s"
formatter = logging.Formatter(format_str)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
if __name__ == "__main__":
main()
================================================
FILE: Finetune/AbdomenAtlas/optimizers/__init__.py
================================================
================================================
FILE: Finetune/AbdomenAtlas/optimizers/lr_scheduler.py
================================================
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import warnings
from typing import List
from torch import nn as nn
from torch.optim import Adam, Optimizer
from torch.optim.lr_scheduler import LambdaLR, _LRScheduler
__all__ = ["LinearLR", "ExponentialLR"]
class _LRSchedulerMONAI(_LRScheduler):
"""Base class for increasing the learning rate between two boundaries over a number
of iterations"""
def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:
"""
Args:
optimizer: wrapped optimizer.
end_lr: the final learning rate.
num_iter: the number of iterations over which the test occurs.
last_epoch: the index of last epoch.
Returns:
None
"""
self.end_lr = end_lr
self.num_iter = num_iter
super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)
class LinearLR(_LRSchedulerMONAI):
"""Linearly increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]
class ExponentialLR(_LRSchedulerMONAI):
"""Exponentially increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]
class WarmupCosineSchedule(LambdaLR):
"""Linear warmup and then cosine decay.
Based on https://huggingface.co/ implementation.
"""
def __init__(
self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1
) -> None:
"""
Args:
optimizer: wrapped optimizer.
warmup_steps: number of warmup iterations.
t_total: total number of training iterations.
cycles: cosine cycles parameter.
last_epoch: the index of last epoch.
Returns:
None
"""
self.warmup_steps = warmup_steps
self.t_total = t_total
self.cycles = cycles
super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
class LinearWarmupCosineAnnealingLR(_LRScheduler):
def __init__(
self,
optimizer: Optimizer,
warmup_epochs: int,
max_epochs: int,
warmup_start_lr: float = 0.0,
eta_min: float = 0.0,
last_epoch: int = -1,
) -> None:
"""
Args:
optimizer (Optimizer): Wrapped optimizer.
warmup_epochs (int): Maximum number of iterations for linear warmup
max_epochs (int): Maximum number of iterations
warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.
eta_min (float): Minimum learning rate. Default: 0.
last_epoch (int): The index of last epoch. Default: -1.
"""
self.warmup_epochs = warmup_epochs
self.max_epochs = max_epochs
self.warmup_start_lr = warmup_start_lr
self.eta_min = eta_min
super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)
def get_lr(self) -> List[float]:
"""
Compute learning rate using chainable form of the scheduler
"""
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
if self.last_epoch == 0:
return [self.warmup_start_lr] * len(self.base_lrs)
elif self.last_epoch < self.warmup_epochs:
return [
group["lr"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
elif self.last_epoch == self.warmup_epochs:
return self.base_lrs
elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:
return [
group["lr"]
+ (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
return [
(1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
/ (
1
+ math.cos(
math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)
)
)
* (group["lr"] - self.eta_min)
+ self.eta_min
for group in self.optimizer.param_groups
]
def _get_closed_form_lr(self) -> List[float]:
"""
Called when epoch is passed as a param to the `step` function of the scheduler.
"""
if self.last_epoch < self.warmup_epochs:
return [
self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr in self.base_lrs
]
return [
self.eta_min
+ 0.5
* (base_lr - self.eta_min)
* (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
for base_lr in self.base_lrs
]
================================================
FILE: Finetune/AbdomenAtlas/preprocess/try_load.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
import torch.nn.functional as F
from torch.cuda.amp import GradScaler, autocast
from dataset.dataloader_bdmap import get_loader_Atlas
from utils.utils import dice, resample_3d
from utils.utils import AverageMeter, distributed_all_gather
from tqdm import tqdm
from monai.inferers import sliding_window_inference
from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
from utils.utils import *
import cv2
from PIL import Image
# os.environ['CUDA_VISIBLE_DEVICES'] = "0"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument(
"--pretrained_dir", default="./runs/logs_scratch_v2/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="/project/medimgfmod/CT/AbdomenAtlasMini1.0/", type=str, help="dataset directory")
parser.add_argument("--data_txt_path", default='./dataset/dataset_list', help="dataset json file")
parser.add_argument("--dataset_list", default=['AbdomenAtlas1.0'], help="dataset json file")
parser.add_argument("--pos", default=1, type=int, help="number of positive sample")
parser.add_argument("--neg", default=0, type=int, help="number of negative sample")
roi=96
parser.add_argument("--cache_dataset", default=False, help="use monai CACHE Dataset class")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--batch_size", default=8, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=1, type=int, help="number of sliding window batch size")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=14, type=int, help="number of output channels")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=2.0, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--workers", default=16, type=int, help="number of workers")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
import warnings
warnings.filterwarnings('ignore')
def main():
args = parser.parse_args()
args.test_mode = True
loader = get_loader_Atlas(args)
# num = 0
# vis_path = './vis/'
# check_dir(vis_path)
with torch.no_grad():
for batch_data in tqdm(loader[0]):
image, label = batch_data["image"], batch_data["label"]
print(image.shape, label.shape, torch.unique(label))
# img = image[0][0].data.cpu().numpy()
# label = label[0][0].data.cpu().numpy()
#
# h, w, c = img.shape
# cmap = color_map()
#
# for j in range(c):
# im = img[:, :, j]
# la = label[:, :, j]
#
# if len(list(np.unique(la))) > 1:
# im = (255 * im).astype(np.uint8)
# la = Image.fromarray(la.astype(np.uint8), mode='P')
# la.putpalette(cmap)
# num += 1
#
# cv2.imwrite(vis_path+str(num)+'_im.png', im)
# la.save(vis_path+str(num)+'_lab.png')
if __name__ == "__main__":
main()
================================================
FILE: Finetune/AbdomenAtlas/readme.md
================================================
# VoCo for AbdomenAtlas
CVPR 2024 paper, [**"VoCo: A Simple-yet-Effective Volume Contrastive Learning Framework for 3D Medical Image Analysis"**](https://arxiv.org/abs/2402.17300)
Authors: Linshan Wu, Jiaxin Zhuang, and Hao Chen
Code for AbdomenAtlasMini1.0 Training and Inference.
## Usage
### Pre-training
Please refer to the official [VoCo repo](https://github.com/Luffy03/VoCo)
### Requirement
I have stored all the required checkpoints and running logs in the project.
Our Segmentation Training codes are based on [MONAI](https://github.com/Project-MONAI/research-contributions).
Please also refer to the requirements.txt.
### Training
First edit the data_path of AbdomenAtlasMini1.0 in 'train.sh'
```
data_dir=YOUR AbdomenAtlasMini1.0 PATH
```
Reading 9 label files is not efficient in training and we also find that there are some bugs in
the originial [data_loader](https://github.com/MrGiovanni/SuPreM/blob/d8a948c96e56f2050109c3ce418bc4caa09420a5/supervised_pretraining/dataset/dataloader_bdmap.py#L147)
(the data of label is loaded but the meta_keys of labels are not loaded, thus the following transform will result in not corresponding image and labels. We provide '/preprocess/try_load.py' for visualization). Thus, we first merge all 9 label files in to one.
```
# preprocess, in exe function of check.py , path=YOUR AbdomenAtlasMini1.0 PATH
python check.py
# merge all 9 organ label files to one label.nii.gz
```
After pre-processing, Training implementation
```
# bash
sh train.sh
# Or using slurm
sbatch train.slurm
```
To accelerate training, we use 'PersistentDataset' to pre-cache data.
```
# in train.sh
cache_dataset=False
# Or with adequate space
cache_dataset=True
cache_dir=Your path to save cache
```
### Inference
First edit the test and prediction path of AbdomenAtlasMini1.0 in 'Atlas_test.sh'
```
test_data_path=Your path to AbdomenAtlasTest
save_prediction_path=Your path to save the prediction AbdomenAtlasTest
```
Inference implementation
```
# bash
sh Atlas_test.sh
```
Inference Visualization
```
# We provide check_pred_vis() function in check.py for you to visualize the predictions
python check.py
```
## Acknowledgement
We thank [MONAI](https://github.com/Project-MONAI/research-contributions) and [SuPreM](https://github.com/MrGiovanni/SuPreM) for part of their codes.
## Citation ✏️ 📄
If you find this repo useful for your research, please consider citing the paper as follows:
```
@inproceedings{VoCo,
title={VoCo: A Simple-yet-Effective Volume Contrastive Learning Framework for 3D Medical Image Analysis},
author={Wu, Linshan and Zhuang, Jiaxin and Chen, Hao},
booktitle={IEEE Conf. Comput. Vis. Pattern Recog.},
year={2024}
}
```
================================================
FILE: Finetune/AbdomenAtlas/requirements.txt
================================================
# packages in environment at /home/lwubf/anaconda3/envs/nnunet:
#
# Name Version Build Channel
_libgcc_mutex 0.1 main
absl-py 2.1.0
ca-certificates 2023.12.12 h06a4308_0
certifi 2022.12.7
charset-normalizer 2.1.1
cmake 3.25.0
contourpy 1.2.0
cycler 0.12.1
einops 0.7.0
elasticdeform 0.5.0
filelock 3.9.0
fonttools 4.50.0
fsspec 2024.2.0
grpcio 1.62.0
huggingface-hub 0.21.4
idna 3.4
importlib-metadata 7.0.1
importlib_resources 6.4.0
inquirerpy 0.3.4
Jinja2 3.1.2
kiwisolver 1.4.5
ld_impl_linux-64 2.38 h1181459_1
libffi 3.3 he6710b0_2
libgcc-ng 9.1.0 hdf63c60_0
libstdcxx-ng 9.1.0 hdf63c60_0
lit 15.0.7
Markdown 3.5.2
MarkupSafe 2.1.5
matplotlib 3.8.3
monai 1.3.0
mpmath 1.3.0
ncurses 6.3 h7f8727e_2
networkx 3.2.1
nibabel 5.2.0
numpy 1.26.4
opencv-python 4.9.0.80
openssl 1.1.1w h7f8727e_0
packaging 23.2
pfzy 0.3.4
pillow 10.2.0
pip 23.3.1 py39h06a4308_0
prompt-toolkit 3.0.43
protobuf 4.25.3
pyparsing 3.1.2
python 3.9.12 h12debd9_1
python-dateutil 2.9.0.post0
PyYAML 6.0.1
readline 8.1.2 h7f8727e_1
requests 2.28.1
scipy 1.12.0
setuptools 68.2.2 py39h06a4308_0
SimpleITK 2.0.2
six 1.16.0
sqlite 3.38.5 hc218d9a_0
sympy 1.12
tensorboard 2.16.2
tensorboard-data-server 0.7.2
tensorboardX 2.6.2.2
tk 8.6.12 h1ccaba5_0
torch 2.0.1+cu118
torchaudio 2.0.2+cu118
torchvision 0.15.2+cu118
tqdm 4.66.2
triton 2.0.0
typing_extensions 4.8.0
tzdata 2024a h04d1e81_0
urllib3 1.26.13
wcwidth 0.2.13
Werkzeug 3.0.1
wheel 0.41.2 py39h06a4308_0
xz 5.2.5 h7f8727e_1
zipp 3.17.0
zlib 1.2.12 h7f8727e_2
================================================
FILE: Finetune/AbdomenAtlas/train.sh
================================================
now=$(date +"%Y%m%d_%H%M%S")
logdir=runs/logs
mkdir -p $logdir
data_dir=/project/medimgfmod/CT/AbdomenAtlasMini1.0/
cache_dataset=False
cache_dir=/scratch/medimgfmod/CT/cache/Atlas
torchrun --master_port=21472 main.py \
--data_dir $data_dir --cache_dataset $cache_dataset --cache_dir $cache_dir --logdir $logdir | tee $logdir/$now.txt
================================================
FILE: Finetune/AbdomenAtlas/train.slurm
================================================
#!/bin/bash
# NOTE: Lines starting with "#SBATCH" are valid SLURM commands or statements,
# while those starting with "#" and "##SBATCH" are comments.
#SBATCH -J Atlas
#SBATCH -t 72:00:00 #Maximum runtime of 48 hours
# Enable email notificaitons when job begins and ends
#SBATCH --mail-user=lwubf@connect.ust.hk #Update your email address
#SBATCH --mail-type=begin
#SBATCH --mail-type=end
# Choose partition (queue) with "gpu"
#SBATCH -p project
# To use 24 cpu core and 1 gpu devices in a node
#SBATCH -N 1 -n 16 --gres=gpu:1
# Setup runtime environment if necessary
source ~/.bashrc
source activate nnunet
# Go to the job submission directory and run your application
cd /home/lwubf/AbdomenAtlas/
sh train.sh
================================================
FILE: Finetune/AbdomenAtlas/trainer.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import time
import numpy as np
import torch
import torch.nn.parallel
import torch.utils.data.distributed
from tensorboardX import SummaryWriter
from torch.cuda.amp import GradScaler, autocast
from utils.utils import AverageMeter, distributed_all_gather
from monai.data import decollate_batch
from utils.mixup import mixup
def train_epoch(model, loader, optimizer, scheduler, scaler, epoch, loss_func, args):
model.train()
start_time = time.time()
run_loss = AverageMeter()
for idx, batch_data in enumerate(loader):
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
data, target = mixup([data, target])
for param in model.parameters():
param.grad = None
with autocast(enabled=args.amp):
logits = model(data)
loss = loss_func(logits, target)
#
if args.amp:
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
else:
loss.backward()
optimizer.step()
if args.distributed:
loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)
run_loss.update(
np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size
)
else:
run_loss.update(loss.item(), n=args.batch_size)
lr = optimizer.param_groups[0]["lr"]
if scheduler is not None:
scheduler.step()
if args.rank == 0 and (idx + 1) % 200 == 0:
print(
"Epoch {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"loss: {:.4f}".format(run_loss.avg),
"lr: {:.8f}".format(lr),
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
for param in model.parameters():
param.grad = None
return run_loss.avg
def val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):
model.eval()
run_acc = AverageMeter()
start_time = time.time()
with torch.no_grad():
for idx, batch_data in enumerate(loader):
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
with autocast(enabled=args.amp):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
val_labels_list = [target[0]]
val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]
val_outputs_list = [logits[0]]
val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]
acc_func.reset()
acc_func(y_pred=val_output_convert, y=val_labels_convert)
acc, not_nans = acc_func.aggregate()
acc = acc.cuda(args.rank)
if args.distributed:
acc_list, not_nans_list = distributed_all_gather(
[acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length
)
for al, nl in zip(acc_list, not_nans_list):
run_acc.update(al, n=nl)
else:
run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())
if args.rank == 0:
avg_acc = np.mean(run_acc.avg)
print(
"Val {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"acc",
avg_acc,
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
torch.cuda.empty_cache()
return run_acc.avg
def save_checkpoint(model, epoch, args, filename="model.pt", best_acc=0, optimizer=None, scheduler=None):
state_dict = model.state_dict() if not args.distributed else model.module.state_dict()
save_dict = {"epoch": epoch, "best_acc": best_acc, "state_dict": state_dict}
if optimizer is not None:
save_dict["optimizer"] = optimizer.state_dict()
if scheduler is not None:
save_dict["scheduler"] = scheduler.state_dict()
filename = os.path.join(args.logdir, filename)
torch.save(save_dict, filename)
print("Saving checkpoint", filename)
def run_training(
model,
train_loader,
val_loader,
optimizer,
loss_func,
acc_func,
args,
model_inferer=None,
scheduler=None,
start_epoch=0,
post_label=None,
post_pred=None,
):
scaler = None
if args.amp:
scaler = GradScaler()
val_acc_max = 0.0
for epoch in range(start_epoch, args.max_epochs):
if args.distributed:
train_loader.sampler.set_epoch(epoch)
torch.distributed.barrier()
print(args.rank, time.ctime(), "Epoch:", epoch)
epoch_time = time.time()
train_loss = train_epoch(
model, train_loader, optimizer, scheduler, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args
)
if args.rank == 0:
print(
"Final training {}/{}".format(epoch, args.max_epochs - 1),
"loss: {:.4f}".format(train_loss),
"time {:.2f}s".format(time.time() - epoch_time),
)
b_new_best = False
if (epoch + 1) % args.val_every == 0:
if args.distributed:
torch.distributed.barrier()
epoch_time = time.time()
val_avg_acc = val_epoch(
model,
val_loader,
epoch=epoch,
acc_func=acc_func,
model_inferer=model_inferer,
args=args,
post_label=post_label,
post_pred=post_pred,
)
val_avg_acc = np.mean(val_avg_acc)
if args.rank == 0:
print(
"Final validation {}/{}".format(epoch, args.max_epochs - 1),
"acc",
val_avg_acc,
"time {:.2f}s".format(time.time() - epoch_time),
)
if val_avg_acc > val_acc_max:
print("new best ({:.6f} --> {:.6f}). ".format(val_acc_max, val_avg_acc))
val_acc_max = val_avg_acc
b_new_best = True
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(
model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler
)
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename="model_final.pt")
if b_new_best:
print("Copying to model.pt new best model!!!!")
shutil.copyfile(os.path.join(args.logdir, "model_final.pt"), os.path.join(args.logdir, "model.pt"))
print("Training Finished !, Best Accuracy: ", val_acc_max)
return val_acc_max
================================================
FILE: Finetune/AbdomenAtlas/utils/__init__.py
================================================
================================================
FILE: Finetune/AbdomenAtlas/utils/data_trans.py
================================================
import math
import os
from copy import deepcopy
import numpy as np
import torch
import pickle
from monai import data, transforms
from monai.data import *
from monai.transforms import *
from torch.utils.data import DataLoader, ConcatDataset
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank: self.total_size: self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank: self.total_size: self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
def get_trans(args):
base_trans = [
LoadImaged(keys=["image", "label"]),
EnsureChannelFirstd(keys=["image", "label"]),
Orientationd(keys=["image", "label"], axcodes="RAS"),
Spacingd(keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z),
mode=("bilinear", "nearest")),
ScaleIntensityRanged(
keys=["image"],
a_min=args.a_min,
a_max=args.a_max,
b_min=0.0,
b_max=1.0,
clip=True,
),
CropForegroundd(keys=["image", "label"], source_key="image"),
SpatialPadd(keys=["image", "label"], spatial_size=(args.roi_x, args.roi_y, args.roi_z),
mode='constant'),
transforms.RandShiftIntensityd(keys="image", offsets=0.1, prob=0),
]
random_trans = [
RandCropByPosNegLabeld(
keys=["image", "label"],
label_key="label",
spatial_size=(args.roi_x, args.roi_y, args.roi_z),
pos=args.pos,
neg=args.neg,
num_samples=args.sw_batch_size,
image_key="image",
image_threshold=0,
),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=0),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=1),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=2),
transforms.RandRotate90d(keys=["image", "label"], prob=args.RandRotate90d_prob, max_k=3),
transforms.RandScaleIntensityd(keys="image", factors=0.1, prob=args.RandScaleIntensityd_prob),
transforms.RandShiftIntensityd(keys="image", offsets=0.1, prob=args.RandShiftIntensityd_prob),
Delete_keys(keys=["image", "label"]),
]
return base_trans, random_trans
class Delete_keys(MapTransform):
"""Filter unsed label.
"""
def __call__(self, data):
d = dict(data)
if 'name' in d.keys():
del d['name']
if 'image_meta_dict' in d.keys():
del d['image_meta_dict']
if 'label_meta_dict' in d.keys():
del d['label_meta_dict']
return d
================================================
FILE: Finetune/AbdomenAtlas/utils/mixup.py
================================================
import torch
import numpy as np
def mixup(inputs):
batch_size = inputs[0].size(0)
rand = torch.randperm(batch_size)
rand = [ra.tolist() for ra in rand]
lam = int(np.random.beta(0.2, 0.2) * inputs[0].size(2))
new_inputs = []
for input in inputs:
rand_input = input[rand]
if np.random.rand() < 0.5:
new_input = torch.cat([input[:, :, :, 0:lam, :],
rand_input[:, :, :, lam:input.size(3), :]], dim=3)
else:
new_input = torch.cat([input[:, :, 0:lam, :, :],
rand_input[:, :, lam:input.size(2), :, :]], dim=2)
new_inputs.append(new_input)
return new_inputs
================================================
FILE: Finetune/AbdomenAtlas/utils/utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import scipy.ndimage as ndimage
import torch
import os
import SimpleITK as sitk
def resample_3d(img, target_size):
imx, imy, imz = img.shape
tx, ty, tz = target_size
zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))
img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)
return img_resampled
def dice(x, y):
intersect = np.sum(np.sum(np.sum(x * y)))
y_sum = np.sum(np.sum(np.sum(y)))
if y_sum == 0:
return 0.0
x_sum = np.sum(np.sum(np.sum(x)))
return 2 * intersect / (x_sum + y_sum)
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)
def distributed_all_gather(
tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None
):
if world_size is None:
world_size = torch.distributed.get_world_size()
if valid_batch_size is not None:
valid_batch_size = min(valid_batch_size, world_size)
elif is_valid is not None:
is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)
if not no_barrier:
torch.distributed.barrier()
tensor_list_out = []
with torch.no_grad():
if is_valid is not None:
is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]
torch.distributed.all_gather(is_valid_list, is_valid)
is_valid = [x.item() for x in is_valid_list]
for tensor in tensor_list:
gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]
torch.distributed.all_gather(gather_list, tensor)
if valid_batch_size is not None:
gather_list = gather_list[:valid_batch_size]
elif is_valid is not None:
gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]
if out_numpy:
gather_list = [t.cpu().numpy() for t in gather_list]
tensor_list_out.append(gather_list)
return tensor_list_out
def color_map(dataset='pascal'):
cmap = np.zeros((256, 3), dtype='uint8')
if dataset == 'pascal' or dataset == 'coco':
def bitget(byteval, idx):
return (byteval & (1 << idx)) != 0
for i in range(256):
r = g = b = 0
c = i
for j in range(8):
r = r | (bitget(c, 0) << 7-j)
g = g | (bitget(c, 1) << 7-j)
b = b | (bitget(c, 2) << 7-j)
c = c >> 3
cmap[i] = np.array([r, g, b])
elif dataset == 'cityscapes':
cmap[0] = np.array([128, 64, 128])
cmap[1] = np.array([244, 35, 232])
cmap[2] = np.array([70, 70, 70])
cmap[3] = np.array([102, 102, 156])
cmap[4] = np.array([190, 153, 153])
cmap[5] = np.array([153, 153, 153])
cmap[6] = np.array([250, 170, 30])
cmap[7] = np.array([220, 220, 0])
cmap[8] = np.array([107, 142, 35])
cmap[9] = np.array([152, 251, 152])
cmap[10] = np.array([70, 130, 180])
cmap[11] = np.array([220, 20, 60])
cmap[12] = np.array([255, 0, 0])
cmap[13] = np.array([0, 0, 142])
cmap[14] = np.array([0, 0, 70])
cmap[15] = np.array([0, 60, 100])
cmap[16] = np.array([0, 80, 100])
cmap[17] = np.array([0, 0, 230])
cmap[18] = np.array([119, 11, 32])
cmap[19] = np.array([0, 0, 0])
cmap[255] = np.array([0, 0, 0])
return cmap
def check_dir(dir):
if not os.path.exists(dir):
os.makedirs(dir)
def read(img):
img = sitk.ReadImage(img)
img = sitk.GetArrayFromImage(img)
img = img.transpose(1, 2, 0)
return img
================================================
FILE: Finetune/Amos/check_test.py
================================================
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
import torch.nn.functional as F
from torch.cuda.amp import GradScaler, autocast
from utils.data_test import get_loader
from utils.utils import dice, resample_3d
from utils.utils import AverageMeter, distributed_all_gather
from monai.inferers import sliding_window_inference
from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
import zipfile
import shutil
import SimpleITK as sitk
from tqdm import tqdm
from utils.utils import *
from PIL import Image
def norm(img):
new_img = img.copy()
new_img[img<-175] = 0
new_img[img>250] = 250
out = new_img/250
out = (255*out).astype(np.uint8)
return out
def check_size():
data_path = "D:\data/amos22\imagesTr"
pred_path = 'D:\data/amos22\labelsTr'
view_path = './pred/view_tr'
# data_path = "D:\data/amos22/imagesTs"
# pred_path = './pred/test'
# view_path = './pred/view_ts'
check_dir(view_path)
cmap = color_map()
ls = os.listdir(pred_path)
num = 0
# for i in tqdm(ls):
i = ls[0]
# i = 'FLARETs_0031_0000.nii'
img_path = os.path.join(data_path, i) # i[:-7]+'_0000.nii.gz'
img_itk = sitk.ReadImage(img_path)
img = sitk.GetArrayFromImage(img_itk)
print(img_itk.GetSpacing(), img_itk.GetDirection())
# img = np.flip(img, 1)
# img = np.flip(img, 2)
pred = os.path.join(pred_path, i)
pred_itk = sitk.ReadImage(pred)
pred = sitk.GetArrayFromImage(pred_itk)
print(pred_itk.GetSpacing(), pred_itk.GetDirection())
# pred = pred.transpose()
print(img.shape, pred.shape)
c, h, w = img.shape
for j in range(c):
im = img[j, :, :]
pre = pred[j, :, :].astype(np.uint8)
pre = Image.fromarray(pre, mode='P')
pre.putpalette(cmap)
im = norm(im)
import cv2
cv2.imwrite(view_path + '/' + str(j) + '_raw.png', im)
pre.save(view_path + '/' + str(j) + '_pred.png')
def rename():
pred_path = './pred/test'
ls = os.listdir(pred_path)
for i in ls:
old_name = os.path.join(pred_path, i)
new_name = os.path.join(pred_path, i[:-13] + '.nii.gz')
os.rename(old_name, new_name)
def check_direction():
data_path = "D:\data\FLARE22\imagesTr" # (-1, -1, 1)
data_path = "D:\data/amos22\imagesTr" # (1, -1, 1)
data_path = 'D:\data\BTCV\imagesTr' # (1, 1, 1)
ls = os.listdir(data_path)
for i in tqdm(ls):
img_path = os.path.join(data_path, i) # i[:-7]+'_0000.nii.gz'
img_itk = sitk.ReadImage(img_path)
print(img_itk.GetSpacing(), img_itk.GetDirection())
if __name__ == "__main__":
check_direction()
================================================
FILE: Finetune/Amos/dataset/__init__.py
================================================
================================================
FILE: Finetune/Amos/dataset/dataset.json
================================================
{
"description": "0",
"labels": {
"0": "background",
"1": "Liver",
"10": "Esophagus",
"11": "Stomach",
"12": "Duodenum",
"13": "Left Kidney",
"2": "Right kidney",
"3": "Spleen",
"4": "Pancreas",
"5": "Aorta",
"6": "Inferior vena cava",
"7": "Right adrenal gland",
"8": "Left adrenal gland",
"9": "Gallbladder"
},
"licence": "hands off!",
"modality": {
"0": "CT"
},
"name": "FLARE22",
"numTest": 200,
"numTraining": 50,
"reference": "0",
"release": "0.0",
"tensorImageSize": "4D",
"test": [
"./imagesTs/FLARETs_0001_0000.nii.gz",
"./imagesTs/FLARETs_0002_0000.nii.gz",
"./imagesTs/FLARETs_0003_0000.nii.gz",
"./imagesTs/FLARETs_0004_0000.nii.gz",
"./imagesTs/FLARETs_0005_0000.nii.gz",
"./imagesTs/FLARETs_0006_0000.nii.gz",
"./imagesTs/FLARETs_0007_0000.nii.gz",
"./imagesTs/FLARETs_0008_0000.nii.gz",
"./imagesTs/FLARETs_0009_0000.nii.gz",
"./imagesTs/FLARETs_0010_0000.nii.gz",
"./imagesTs/FLARETs_0011_0000.nii.gz",
"./imagesTs/FLARETs_0012_0000.nii.gz",
"./imagesTs/FLARETs_0013_0000.nii.gz",
"./imagesTs/FLARETs_0014_0000.nii.gz",
"./imagesTs/FLARETs_0015_0000.nii.gz",
"./imagesTs/FLARETs_0016_0000.nii.gz",
"./imagesTs/FLARETs_0017_0000.nii.gz",
"./imagesTs/FLARETs_0018_0000.nii.gz",
"./imagesTs/FLARETs_0019_0000.nii.gz",
"./imagesTs/FLARETs_0020_0000.nii.gz",
"./imagesTs/FLARETs_0021_0000.nii.gz",
"./imagesTs/FLARETs_0022_0000.nii.gz",
"./imagesTs/FLARETs_0023_0000.nii.gz",
"./imagesTs/FLARETs_0024_0000.nii.gz",
"./imagesTs/FLARETs_0025_0000.nii.gz",
"./imagesTs/FLARETs_0026_0000.nii.gz",
"./imagesTs/FLARETs_0027_0000.nii.gz",
"./imagesTs/FLARETs_0028_0000.nii.gz",
"./imagesTs/FLARETs_0029_0000.nii.gz",
"./imagesTs/FLARETs_0030_0000.nii.gz",
"./imagesTs/FLARETs_0031_0000.nii.gz",
"./imagesTs/FLARETs_0032_0000.nii.gz",
"./imagesTs/FLARETs_0033_0000.nii.gz",
"./imagesTs/FLARETs_0034_0000.nii.gz",
"./imagesTs/FLARETs_0035_0000.nii.gz",
"./imagesTs/FLARETs_0036_0000.nii.gz",
"./imagesTs/FLARETs_0037_0000.nii.gz",
"./imagesTs/FLARETs_0038_0000.nii.gz",
"./imagesTs/FLARETs_0039_0000.nii.gz",
"./imagesTs/FLARETs_0040_0000.nii.gz",
"./imagesTs/FLARETs_0041_0000.nii.gz",
"./imagesTs/FLARETs_0042_0000.nii.gz",
"./imagesTs/FLARETs_0043_0000.nii.gz",
"./imagesTs/FLARETs_0044_0000.nii.gz",
"./imagesTs/FLARETs_0045_0000.nii.gz",
"./imagesTs/FLARETs_0046_0000.nii.gz",
"./imagesTs/FLARETs_0047_0000.nii.gz",
"./imagesTs/FLARETs_0048_0000.nii.gz",
"./imagesTs/FLARETs_0049_0000.nii.gz",
"./imagesTs/FLARETs_0050_0000.nii.gz",
"./imagesTs/FLARETs_0051_0000.nii.gz",
"./imagesTs/FLARETs_0052_0000.nii.gz",
"./imagesTs/FLARETs_0053_0000.nii.gz",
"./imagesTs/FLARETs_0054_0000.nii.gz",
"./imagesTs/FLARETs_0055_0000.nii.gz",
"./imagesTs/FLARETs_0056_0000.nii.gz",
"./imagesTs/FLARETs_0057_0000.nii.gz",
"./imagesTs/FLARETs_0058_0000.nii.gz",
"./imagesTs/FLARETs_0059_0000.nii.gz",
"./imagesTs/FLARETs_0060_0000.nii.gz",
"./imagesTs/FLARETs_0061_0000.nii.gz",
"./imagesTs/FLARETs_0062_0000.nii.gz",
"./imagesTs/FLARETs_0063_0000.nii.gz",
"./imagesTs/FLARETs_0064_0000.nii.gz",
"./imagesTs/FLARETs_0065_0000.nii.gz",
"./imagesTs/FLARETs_0066_0000.nii.gz",
"./imagesTs/FLARETs_0067_0000.nii.gz",
"./imagesTs/FLARETs_0068_0000.nii.gz",
"./imagesTs/FLARETs_0069_0000.nii.gz",
"./imagesTs/FLARETs_0070_0000.nii.gz",
"./imagesTs/FLARETs_0071_0000.nii.gz",
"./imagesTs/FLARETs_0072_0000.nii.gz",
"./imagesTs/FLARETs_0073_0000.nii.gz",
"./imagesTs/FLARETs_0074_0000.nii.gz",
"./imagesTs/FLARETs_0075_0000.nii.gz",
"./imagesTs/FLARETs_0076_0000.nii.gz",
"./imagesTs/FLARETs_0077_0000.nii.gz",
"./imagesTs/FLARETs_0078_0000.nii.gz",
"./imagesTs/FLARETs_0079_0000.nii.gz",
"./imagesTs/FLARETs_0080_0000.nii.gz",
"./imagesTs/FLARETs_0081_0000.nii.gz",
"./imagesTs/FLARETs_0082_0000.nii.gz",
"./imagesTs/FLARETs_0083_0000.nii.gz",
"./imagesTs/FLARETs_0084_0000.nii.gz",
"./imagesTs/FLARETs_0085_0000.nii.gz",
"./imagesTs/FLARETs_0086_0000.nii.gz",
"./imagesTs/FLARETs_0087_0000.nii.gz",
"./imagesTs/FLARETs_0088_0000.nii.gz",
"./imagesTs/FLARETs_0089_0000.nii.gz",
"./imagesTs/FLARETs_0090_0000.nii.gz",
"./imagesTs/FLARETs_0091_0000.nii.gz",
"./imagesTs/FLARETs_0092_0000.nii.gz",
"./imagesTs/FLARETs_0093_0000.nii.gz",
"./imagesTs/FLARETs_0094_0000.nii.gz",
"./imagesTs/FLARETs_0095_0000.nii.gz",
"./imagesTs/FLARETs_0096_0000.nii.gz",
"./imagesTs/FLARETs_0097_0000.nii.gz",
"./imagesTs/FLARETs_0098_0000.nii.gz",
"./imagesTs/FLARETs_0099_0000.nii.gz",
"./imagesTs/FLARETs_0100_0000.nii.gz",
"./imagesTs/FLARETs_0101_0000.nii.gz",
"./imagesTs/FLARETs_0102_0000.nii.gz",
"./imagesTs/FLARETs_0103_0000.nii.gz",
"./imagesTs/FLARETs_0104_0000.nii.gz",
"./imagesTs/FLARETs_0105_0000.nii.gz",
"./imagesTs/FLARETs_0106_0000.nii.gz",
"./imagesTs/FLARETs_0107_0000.nii.gz",
"./imagesTs/FLARETs_0108_0000.nii.gz",
"./imagesTs/FLARETs_0109_0000.nii.gz",
"./imagesTs/FLARETs_0110_0000.nii.gz",
"./imagesTs/FLARETs_0111_0000.nii.gz",
"./imagesTs/FLARETs_0112_0000.nii.gz",
"./imagesTs/FLARETs_0113_0000.nii.gz",
"./imagesTs/FLARETs_0114_0000.nii.gz",
"./imagesTs/FLARETs_0115_0000.nii.gz",
"./imagesTs/FLARETs_0116_0000.nii.gz",
"./imagesTs/FLARETs_0117_0000.nii.gz",
"./imagesTs/FLARETs_0118_0000.nii.gz",
"./imagesTs/FLARETs_0119_0000.nii.gz",
"./imagesTs/FLARETs_0120_0000.nii.gz",
"./imagesTs/FLARETs_0121_0000.nii.gz",
"./imagesTs/FLARETs_0122_0000.nii.gz",
"./imagesTs/FLARETs_0123_0000.nii.gz",
"./imagesTs/FLARETs_0124_0000.nii.gz",
"./imagesTs/FLARETs_0125_0000.nii.gz",
"./imagesTs/FLARETs_0126_0000.nii.gz",
"./imagesTs/FLARETs_0127_0000.nii.gz",
"./imagesTs/FLARETs_0128_0000.nii.gz",
"./imagesTs/FLARETs_0129_0000.nii.gz",
"./imagesTs/FLARETs_0130_0000.nii.gz",
"./imagesTs/FLARETs_0131_0000.nii.gz",
"./imagesTs/FLARETs_0132_0000.nii.gz",
"./imagesTs/FLARETs_0133_0000.nii.gz",
"./imagesTs/FLARETs_0134_0000.nii.gz",
"./imagesTs/FLARETs_0135_0000.nii.gz",
"./imagesTs/FLARETs_0136_0000.nii.gz",
"./imagesTs/FLARETs_0137_0000.nii.gz",
"./imagesTs/FLARETs_0138_0000.nii.gz",
"./imagesTs/FLARETs_0139_0000.nii.gz",
"./imagesTs/FLARETs_0140_0000.nii.gz",
"./imagesTs/FLARETs_0141_0000.nii.gz",
"./imagesTs/FLARETs_0142_0000.nii.gz",
"./imagesTs/FLARETs_0143_0000.nii.gz",
"./imagesTs/FLARETs_0144_0000.nii.gz",
"./imagesTs/FLARETs_0145_0000.nii.gz",
"./imagesTs/FLARETs_0146_0000.nii.gz",
"./imagesTs/FLARETs_0147_0000.nii.gz",
"./imagesTs/FLARETs_0148_0000.nii.gz",
"./imagesTs/FLARETs_0149_0000.nii.gz",
"./imagesTs/FLARETs_0150_0000.nii.gz",
"./imagesTs/FLARETs_0151_0000.nii.gz",
"./imagesTs/FLARETs_0152_0000.nii.gz",
"./imagesTs/FLARETs_0153_0000.nii.gz",
"./imagesTs/FLARETs_0154_0000.nii.gz",
"./imagesTs/FLARETs_0155_0000.nii.gz",
"./imagesTs/FLARETs_0156_0000.nii.gz",
"./imagesTs/FLARETs_0157_0000.nii.gz",
"./imagesTs/FLARETs_0158_0000.nii.gz",
"./imagesTs/FLARETs_0159_0000.nii.gz",
"./imagesTs/FLARETs_0160_0000.nii.gz",
"./imagesTs/FLARETs_0161_0000.nii.gz",
"./imagesTs/FLARETs_0162_0000.nii.gz",
"./imagesTs/FLARETs_0163_0000.nii.gz",
"./imagesTs/FLARETs_0164_0000.nii.gz",
"./imagesTs/FLARETs_0165_0000.nii.gz",
"./imagesTs/FLARETs_0166_0000.nii.gz",
"./imagesTs/FLARETs_0167_0000.nii.gz",
"./imagesTs/FLARETs_0168_0000.nii.gz",
"./imagesTs/FLARETs_0169_0000.nii.gz",
"./imagesTs/FLARETs_0170_0000.nii.gz",
"./imagesTs/FLARETs_0171_0000.nii.gz",
"./imagesTs/FLARETs_0172_0000.nii.gz",
"./imagesTs/FLARETs_0173_0000.nii.gz",
"./imagesTs/FLARETs_0174_0000.nii.gz",
"./imagesTs/FLARETs_0175_0000.nii.gz",
"./imagesTs/FLARETs_0176_0000.nii.gz",
"./imagesTs/FLARETs_0177_0000.nii.gz",
"./imagesTs/FLARETs_0178_0000.nii.gz",
"./imagesTs/FLARETs_0179_0000.nii.gz",
"./imagesTs/FLARETs_0180_0000.nii.gz",
"./imagesTs/FLARETs_0181_0000.nii.gz",
"./imagesTs/FLARETs_0182_0000.nii.gz",
"./imagesTs/FLARETs_0183_0000.nii.gz",
"./imagesTs/FLARETs_0184_0000.nii.gz",
"./imagesTs/FLARETs_0185_0000.nii.gz",
"./imagesTs/FLARETs_0186_0000.nii.gz",
"./imagesTs/FLARETs_0187_0000.nii.gz",
"./imagesTs/FLARETs_0188_0000.nii.gz",
"./imagesTs/FLARETs_0189_0000.nii.gz",
"./imagesTs/FLARETs_0190_0000.nii.gz",
"./imagesTs/FLARETs_0191_0000.nii.gz",
"./imagesTs/FLARETs_0192_0000.nii.gz",
"./imagesTs/FLARETs_0193_0000.nii.gz",
"./imagesTs/FLARETs_0194_0000.nii.gz",
"./imagesTs/FLARETs_0195_0000.nii.gz",
"./imagesTs/FLARETs_0196_0000.nii.gz",
"./imagesTs/FLARETs_0197_0000.nii.gz",
"./imagesTs/FLARETs_0198_0000.nii.gz",
"./imagesTs/FLARETs_0199_0000.nii.gz",
"./imagesTs/FLARETs_0200_0000.nii.gz"
],
"validation": [{
"image": "./imagesTr/FLARE22_Tr_0001_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0001.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0002_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0002.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0003_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0003.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0004_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0004.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0005_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0005.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0006_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0006.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0007_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0007.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0008_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0008.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0009_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0009.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0010_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0010.nii.gz"
}
],
"training": [
{
"image": "./imagesTr/FLARE22_Tr_0011_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0011.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0012_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0012.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0013_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0013.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0014_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0014.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0015_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0015.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0016_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0016.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0017_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0017.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0018_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0018.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0019_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0019.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0020_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0020.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0021_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0021.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0022_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0022.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0023_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0023.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0024_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0024.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0025_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0025.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0026_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0026.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0027_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0027.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0028_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0028.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0029_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0029.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0030_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0030.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0031_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0031.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0032_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0032.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0033_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0033.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0034_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0034.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0035_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0035.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0036_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0036.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0037_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0037.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0038_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0038.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0039_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0039.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0040_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0040.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0041_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0041.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0042_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0042.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0043_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0043.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0044_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0044.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0045_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0045.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0046_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0046.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0047_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0047.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0048_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0048.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0049_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0049.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0050_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0050.nii.gz"
}
]
}
================================================
FILE: Finetune/Amos/dataset/dataset_test50.json
================================================
{
"description": "0",
"labels": {
"0": "background",
"1": "Liver",
"10": "Esophagus",
"11": "Stomach",
"12": "Duodenum",
"13": "Left Kidney",
"2": "Right kidney",
"3": "Spleen",
"4": "Pancreas",
"5": "Aorta",
"6": "Inferior vena cava",
"7": "Right adrenal gland",
"8": "Left adrenal gland",
"9": "Gallbladder"
},
"licence": "hands off!",
"modality": {
"0": "CT"
},
"name": "FLARE22",
"numTest": 200,
"numTraining": 50,
"reference": "0",
"release": "0.0",
"tensorImageSize": "4D",
"test": [
"./imagesTs/FLARETs_0001_0000.nii.gz",
"./imagesTs/FLARETs_0002_0000.nii.gz",
"./imagesTs/FLARETs_0003_0000.nii.gz",
"./imagesTs/FLARETs_0004_0000.nii.gz",
"./imagesTs/FLARETs_0005_0000.nii.gz",
"./imagesTs/FLARETs_0006_0000.nii.gz",
"./imagesTs/FLARETs_0007_0000.nii.gz",
"./imagesTs/FLARETs_0008_0000.nii.gz",
"./imagesTs/FLARETs_0009_0000.nii.gz",
"./imagesTs/FLARETs_0010_0000.nii.gz",
"./imagesTs/FLARETs_0011_0000.nii.gz",
"./imagesTs/FLARETs_0012_0000.nii.gz",
"./imagesTs/FLARETs_0013_0000.nii.gz",
"./imagesTs/FLARETs_0014_0000.nii.gz",
"./imagesTs/FLARETs_0015_0000.nii.gz",
"./imagesTs/FLARETs_0016_0000.nii.gz",
"./imagesTs/FLARETs_0017_0000.nii.gz",
"./imagesTs/FLARETs_0018_0000.nii.gz",
"./imagesTs/FLARETs_0019_0000.nii.gz",
"./imagesTs/FLARETs_0020_0000.nii.gz",
"./imagesTs/FLARETs_0021_0000.nii.gz",
"./imagesTs/FLARETs_0022_0000.nii.gz",
"./imagesTs/FLARETs_0023_0000.nii.gz",
"./imagesTs/FLARETs_0024_0000.nii.gz",
"./imagesTs/FLARETs_0025_0000.nii.gz",
"./imagesTs/FLARETs_0026_0000.nii.gz",
"./imagesTs/FLARETs_0027_0000.nii.gz",
"./imagesTs/FLARETs_0028_0000.nii.gz",
"./imagesTs/FLARETs_0029_0000.nii.gz",
"./imagesTs/FLARETs_0030_0000.nii.gz",
"./imagesTs/FLARETs_0031_0000.nii.gz",
"./imagesTs/FLARETs_0032_0000.nii.gz",
"./imagesTs/FLARETs_0033_0000.nii.gz",
"./imagesTs/FLARETs_0034_0000.nii.gz",
"./imagesTs/FLARETs_0035_0000.nii.gz",
"./imagesTs/FLARETs_0036_0000.nii.gz",
"./imagesTs/FLARETs_0037_0000.nii.gz",
"./imagesTs/FLARETs_0038_0000.nii.gz",
"./imagesTs/FLARETs_0039_0000.nii.gz",
"./imagesTs/FLARETs_0040_0000.nii.gz",
"./imagesTs/FLARETs_0041_0000.nii.gz",
"./imagesTs/FLARETs_0042_0000.nii.gz",
"./imagesTs/FLARETs_0043_0000.nii.gz",
"./imagesTs/FLARETs_0044_0000.nii.gz",
"./imagesTs/FLARETs_0045_0000.nii.gz",
"./imagesTs/FLARETs_0046_0000.nii.gz",
"./imagesTs/FLARETs_0047_0000.nii.gz",
"./imagesTs/FLARETs_0048_0000.nii.gz",
"./imagesTs/FLARETs_0049_0000.nii.gz",
"./imagesTs/FLARETs_0050_0000.nii.gz"
],
"validation": [{
"image": "./imagesTr/FLARE22_Tr_0001_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0001.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0002_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0002.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0003_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0003.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0004_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0004.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0005_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0005.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0006_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0006.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0007_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0007.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0008_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0008.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0009_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0009.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0010_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0010.nii.gz"
}
],
"training": [
{
"image": "./imagesTr/FLARE22_Tr_0011_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0011.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0012_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0012.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0013_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0013.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0014_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0014.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0015_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0015.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0016_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0016.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0017_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0017.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0018_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0018.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0019_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0019.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0020_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0020.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0021_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0021.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0022_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0022.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0023_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0023.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0024_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0024.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0025_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0025.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0026_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0026.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0027_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0027.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0028_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0028.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0029_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0029.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0030_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0030.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0031_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0031.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0032_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0032.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0033_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0033.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0034_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0034.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0035_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0035.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0036_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0036.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0037_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0037.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0038_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0038.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0039_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0039.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0040_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0040.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0041_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0041.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0042_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0042.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0043_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0043.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0044_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0044.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0045_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0045.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0046_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0046.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0047_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0047.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0048_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0048.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0049_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0049.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0050_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0050.nii.gz"
}
]
}
================================================
FILE: Finetune/Amos/dataset_CT.json
================================================
{"name": "AMOS", "description": "Amos: A large-scale abdominal multi-organ benchmark for versatile medical image segmentation", "author": "Yuanfeng Ji", "reference": "SRIDB x CUHKSZ x HKU x LGCHSZ x LGPHSZ", "licence": "CC-BY-SA 4.0", "release": "1.0 01/05/2022", "contact": "u3008013@connect.hku.hk", "tensorImageSize": "3D", "modality": {"0": "CT"},
"labels": {"0": "background",
"1": "spleen",
"2": "right kidney",
"3": "left kidney",
"4": "gall bladder",
"5": "esophagus", "6": "liver",
"7": "stomach", "8": "arota",
"9": "postcava", "10": "pancreas",
"11": "right adrenal gland", "12": "left adrenal gland",
"13": "duodenum", "14": "bladder", "15": "prostate/uterus"},
"numTraining": 240, "numValidation": 120, "numTest": 240,
"training": [{"image": "./imagesTr/amos_0001.nii.gz", "label": "./labelsTr/amos_0001.nii.gz"}, {"image": "./imagesTr/amos_0004.nii.gz", "label": "./labelsTr/amos_0004.nii.gz"}, {"image": "./imagesTr/amos_0005.nii.gz", "label": "./labelsTr/amos_0005.nii.gz"}, {"image": "./imagesTr/amos_0006.nii.gz", "label": "./labelsTr/amos_0006.nii.gz"}, {"image": "./imagesTr/amos_0007.nii.gz", "label": "./labelsTr/amos_0007.nii.gz"}, {"image": "./imagesTr/amos_0009.nii.gz", "label": "./labelsTr/amos_0009.nii.gz"}, {"image": "./imagesTr/amos_0010.nii.gz", "label": "./labelsTr/amos_0010.nii.gz"}, {"image": "./imagesTr/amos_0011.nii.gz", "label": "./labelsTr/amos_0011.nii.gz"}, {"image": "./imagesTr/amos_0014.nii.gz", "label": "./labelsTr/amos_0014.nii.gz"}, {"image": "./imagesTr/amos_0015.nii.gz", "label": "./labelsTr/amos_0015.nii.gz"}, {"image": "./imagesTr/amos_0016.nii.gz", "label": "./labelsTr/amos_0016.nii.gz"}, {"image": "./imagesTr/amos_0017.nii.gz", "label": "./labelsTr/amos_0017.nii.gz"}, {"image": "./imagesTr/amos_0019.nii.gz", "label": "./labelsTr/amos_0019.nii.gz"}, {"image": "./imagesTr/amos_0021.nii.gz", "label": "./labelsTr/amos_0021.nii.gz"}, {"image": "./imagesTr/amos_0023.nii.gz", "label": "./labelsTr/amos_0023.nii.gz"}, {"image": "./imagesTr/amos_0024.nii.gz", "label": "./labelsTr/amos_0024.nii.gz"}, {"image": "./imagesTr/amos_0025.nii.gz", "label": "./labelsTr/amos_0025.nii.gz"}, {"image": "./imagesTr/amos_0027.nii.gz", "label": "./labelsTr/amos_0027.nii.gz"}, {"image": "./imagesTr/amos_0030.nii.gz", "label": "./labelsTr/amos_0030.nii.gz"}, {"image": "./imagesTr/amos_0033.nii.gz", "label": "./labelsTr/amos_0033.nii.gz"}, {"image": "./imagesTr/amos_0035.nii.gz", "label": "./labelsTr/amos_0035.nii.gz"}, {"image": "./imagesTr/amos_0036.nii.gz", "label": "./labelsTr/amos_0036.nii.gz"}, {"image": "./imagesTr/amos_0038.nii.gz", "label": "./labelsTr/amos_0038.nii.gz"}, {"image": "./imagesTr/amos_0042.nii.gz", "label": "./labelsTr/amos_0042.nii.gz"}, {"image": "./imagesTr/amos_0043.nii.gz", "label": "./labelsTr/amos_0043.nii.gz"}, {"image": "./imagesTr/amos_0044.nii.gz", "label": "./labelsTr/amos_0044.nii.gz"}, {"image": "./imagesTr/amos_0045.nii.gz", "label": "./labelsTr/amos_0045.nii.gz"}, {"image": "./imagesTr/amos_0047.nii.gz", "label": "./labelsTr/amos_0047.nii.gz"}, {"image": "./imagesTr/amos_0048.nii.gz", "label": "./labelsTr/amos_0048.nii.gz"}, {"image": "./imagesTr/amos_0049.nii.gz", "label": "./labelsTr/amos_0049.nii.gz"}, {"image": "./imagesTr/amos_0050.nii.gz", "label": "./labelsTr/amos_0050.nii.gz"}, {"image": "./imagesTr/amos_0052.nii.gz", "label": "./labelsTr/amos_0052.nii.gz"}, {"image": "./imagesTr/amos_0054.nii.gz", "label": "./labelsTr/amos_0054.nii.gz"}, {"image": "./imagesTr/amos_0057.nii.gz", "label": "./labelsTr/amos_0057.nii.gz"}, {"image": "./imagesTr/amos_0058.nii.gz", "label": "./labelsTr/amos_0058.nii.gz"}, {"image": "./imagesTr/amos_0059.nii.gz", "label": "./labelsTr/amos_0059.nii.gz"}, {"image": "./imagesTr/amos_0060.nii.gz", "label": "./labelsTr/amos_0060.nii.gz"}, {"image": "./imagesTr/amos_0064.nii.gz", "label": "./labelsTr/amos_0064.nii.gz"}, {"image": "./imagesTr/amos_0066.nii.gz", "label": "./labelsTr/amos_0066.nii.gz"}, {"image": "./imagesTr/amos_0067.nii.gz", "label": "./labelsTr/amos_0067.nii.gz"}, {"image": "./imagesTr/amos_0069.nii.gz", "label": "./labelsTr/amos_0069.nii.gz"}, {"image": "./imagesTr/amos_0071.nii.gz", "label": "./labelsTr/amos_0071.nii.gz"}, {"image": "./imagesTr/amos_0072.nii.gz", "label": "./labelsTr/amos_0072.nii.gz"}, {"image": "./imagesTr/amos_0075.nii.gz", "label": "./labelsTr/amos_0075.nii.gz"}, {"image": "./imagesTr/amos_0076.nii.gz", "label": "./labelsTr/amos_0076.nii.gz"}, {"image": "./imagesTr/amos_0077.nii.gz", "label": "./labelsTr/amos_0077.nii.gz"}, {"image": "./imagesTr/amos_0078.nii.gz", "label": "./labelsTr/amos_0078.nii.gz"}, {"image": "./imagesTr/amos_0079.nii.gz", "label": "./labelsTr/amos_0079.nii.gz"}, {"image": "./imagesTr/amos_0081.nii.gz", "label": "./labelsTr/amos_0081.nii.gz"}, {"image": "./imagesTr/amos_0083.nii.gz", "label": "./labelsTr/amos_0083.nii.gz"}, {"image": "./imagesTr/amos_0084.nii.gz", "label": "./labelsTr/amos_0084.nii.gz"}, {"image": "./imagesTr/amos_0086.nii.gz", "label": "./labelsTr/amos_0086.nii.gz"}, {"image": "./imagesTr/amos_0088.nii.gz", "label": "./labelsTr/amos_0088.nii.gz"}, {"image": "./imagesTr/amos_0089.nii.gz", "label": "./labelsTr/amos_0089.nii.gz"}, {"image": "./imagesTr/amos_0092.nii.gz", "label": "./labelsTr/amos_0092.nii.gz"}, {"image": "./imagesTr/amos_0094.nii.gz", "label": "./labelsTr/amos_0094.nii.gz"}, {"image": "./imagesTr/amos_0097.nii.gz", "label": "./labelsTr/amos_0097.nii.gz"}, {"image": "./imagesTr/amos_0098.nii.gz", "label": "./labelsTr/amos_0098.nii.gz"}, {"image": "./imagesTr/amos_0099.nii.gz", "label": "./labelsTr/amos_0099.nii.gz"}, {"image": "./imagesTr/amos_0102.nii.gz", "label": "./labelsTr/amos_0102.nii.gz"}, {"image": "./imagesTr/amos_0103.nii.gz", "label": "./labelsTr/amos_0103.nii.gz"}, {"image": "./imagesTr/amos_0104.nii.gz", "label": "./labelsTr/amos_0104.nii.gz"}, {"image": "./imagesTr/amos_0105.nii.gz", "label": "./labelsTr/amos_0105.nii.gz"}, {"image": "./imagesTr/amos_0109.nii.gz", "label": "./labelsTr/amos_0109.nii.gz"}, {"image": "./imagesTr/amos_0110.nii.gz", "label": "./labelsTr/amos_0110.nii.gz"}, {"image": "./imagesTr/amos_0111.nii.gz", "label": "./labelsTr/amos_0111.nii.gz"}, {"image": "./imagesTr/amos_0113.nii.gz", "label": "./labelsTr/amos_0113.nii.gz"}, {"image": "./imagesTr/amos_0115.nii.gz", "label": "./labelsTr/amos_0115.nii.gz"}, {"image": "./imagesTr/amos_0116.nii.gz", "label": "./labelsTr/amos_0116.nii.gz"}, {"image": "./imagesTr/amos_0118.nii.gz", "label": "./labelsTr/amos_0118.nii.gz"}, {"image": "./imagesTr/amos_0119.nii.gz", "label": "./labelsTr/amos_0119.nii.gz"}, {"image": "./imagesTr/amos_0121.nii.gz", "label": "./labelsTr/amos_0121.nii.gz"}, {"image": "./imagesTr/amos_0124.nii.gz", "label": "./labelsTr/amos_0124.nii.gz"}, {"image": "./imagesTr/amos_0125.nii.gz", "label": "./labelsTr/amos_0125.nii.gz"}, {"image": "./imagesTr/amos_0126.nii.gz", "label": "./labelsTr/amos_0126.nii.gz"}, {"image": "./imagesTr/amos_0127.nii.gz", "label": "./labelsTr/amos_0127.nii.gz"}, {"image": "./imagesTr/amos_0129.nii.gz", "label": "./labelsTr/amos_0129.nii.gz"}, {"image": "./imagesTr/amos_0131.nii.gz", "label": "./labelsTr/amos_0131.nii.gz"}, {"image": "./imagesTr/amos_0133.nii.gz", "label": "./labelsTr/amos_0133.nii.gz"}, {"image": "./imagesTr/amos_0134.nii.gz", "label": "./labelsTr/amos_0134.nii.gz"}, {"image": "./imagesTr/amos_0135.nii.gz", "label": "./labelsTr/amos_0135.nii.gz"}, {"image": "./imagesTr/amos_0137.nii.gz", "label": "./labelsTr/amos_0137.nii.gz"}, {"image": "./imagesTr/amos_0138.nii.gz", "label": "./labelsTr/amos_0138.nii.gz"}, {"image": "./imagesTr/amos_0141.nii.gz", "label": "./labelsTr/amos_0141.nii.gz"}, {"image": "./imagesTr/amos_0142.nii.gz", "label": "./labelsTr/amos_0142.nii.gz"}, {"image": "./imagesTr/amos_0143.nii.gz", "label": "./labelsTr/amos_0143.nii.gz"}, {"image": "./imagesTr/amos_0147.nii.gz", "label": "./labelsTr/amos_0147.nii.gz"}, {"image": "./imagesTr/amos_0149.nii.gz", "label": "./labelsTr/amos_0149.nii.gz"}, {"image": "./imagesTr/amos_0152.nii.gz", "label": "./labelsTr/amos_0152.nii.gz"}, {"image": "./imagesTr/amos_0153.nii.gz", "label": "./labelsTr/amos_0153.nii.gz"}, {"image": "./imagesTr/amos_0154.nii.gz", "label": "./labelsTr/amos_0154.nii.gz"}, {"image": "./imagesTr/amos_0156.nii.gz", "label": "./labelsTr/amos_0156.nii.gz"}, {"image": "./imagesTr/amos_0158.nii.gz", "label": "./labelsTr/amos_0158.nii.gz"}, {"image": "./imagesTr/amos_0159.nii.gz", "label": "./labelsTr/amos_0159.nii.gz"}, {"image": "./imagesTr/amos_0160.nii.gz", "label": "./labelsTr/amos_0160.nii.gz"}, {"image": "./imagesTr/amos_0161.nii.gz", "label": "./labelsTr/amos_0161.nii.gz"}, {"image": "./imagesTr/amos_0162.nii.gz", "label": "./labelsTr/amos_0162.nii.gz"}, {"image": "./imagesTr/amos_0166.nii.gz", "label": "./labelsTr/amos_0166.nii.gz"}, {"image": "./imagesTr/amos_0170.nii.gz", "label": "./labelsTr/amos_0170.nii.gz"}, {"image": "./imagesTr/amos_0171.nii.gz", "label": "./labelsTr/amos_0171.nii.gz"}, {"image": "./imagesTr/amos_0172.nii.gz", "label": "./labelsTr/amos_0172.nii.gz"}, {"image": "./imagesTr/amos_0173.nii.gz", "label": "./labelsTr/amos_0173.nii.gz"}, {"image": "./imagesTr/amos_0175.nii.gz", "label": "./labelsTr/amos_0175.nii.gz"}, {"image": "./imagesTr/amos_0177.nii.gz", "label": "./labelsTr/amos_0177.nii.gz"}, {"image": "./imagesTr/amos_0179.nii.gz", "label": "./labelsTr/amos_0179.nii.gz"}, {"image": "./imagesTr/amos_0180.nii.gz", "label": "./labelsTr/amos_0180.nii.gz"}, {"image": "./imagesTr/amos_0181.nii.gz", "label": "./labelsTr/amos_0181.nii.gz"}, {"image": "./imagesTr/amos_0184.nii.gz", "label": "./labelsTr/amos_0184.nii.gz"}, {"image": "./imagesTr/amos_0185.nii.gz", "label": "./labelsTr/amos_0185.nii.gz"}, {"image": "./imagesTr/amos_0186.nii.gz", "label": "./labelsTr/amos_0186.nii.gz"}, {"image": "./imagesTr/amos_0188.nii.gz", "label": "./labelsTr/amos_0188.nii.gz"}, {"image": "./imagesTr/amos_0190.nii.gz", "label": "./labelsTr/amos_0190.nii.gz"}, {"image": "./imagesTr/amos_0192.nii.gz", "label": "./labelsTr/amos_0192.nii.gz"}, {"image": "./imagesTr/amos_0193.nii.gz", "label": "./labelsTr/amos_0193.nii.gz"}, {"image": "./imagesTr/amos_0195.nii.gz", "label": "./labelsTr/amos_0195.nii.gz"}, {"image": "./imagesTr/amos_0196.nii.gz", "label": "./labelsTr/amos_0196.nii.gz"}, {"image": "./imagesTr/amos_0197.nii.gz", "label": "./labelsTr/amos_0197.nii.gz"}, {"image": "./imagesTr/amos_0198.nii.gz", "label": "./labelsTr/amos_0198.nii.gz"}, {"image": "./imagesTr/amos_0199.nii.gz", "label": "./labelsTr/amos_0199.nii.gz"}, {"image": "./imagesTr/amos_0212.nii.gz", "label": "./labelsTr/amos_0212.nii.gz"}, {"image": "./imagesTr/amos_0214.nii.gz", "label": "./labelsTr/amos_0214.nii.gz"}, {"image": "./imagesTr/amos_0215.nii.gz", "label": "./labelsTr/amos_0215.nii.gz"}, {"image": "./imagesTr/amos_0217.nii.gz", "label": "./labelsTr/amos_0217.nii.gz"}, {"image": "./imagesTr/amos_0224.nii.gz", "label": "./labelsTr/amos_0224.nii.gz"}, {"image": "./imagesTr/amos_0225.nii.gz", "label": "./labelsTr/amos_0225.nii.gz"}, {"image": "./imagesTr/amos_0226.nii.gz", "label": "./labelsTr/amos_0226.nii.gz"}, {"image": "./imagesTr/amos_0230.nii.gz", "label": "./labelsTr/amos_0230.nii.gz"}, {"image": "./imagesTr/amos_0231.nii.gz", "label": "./labelsTr/amos_0231.nii.gz"}, {"image": "./imagesTr/amos_0235.nii.gz", "label": "./labelsTr/amos_0235.nii.gz"}, {"image": "./imagesTr/amos_0237.nii.gz", "label": "./labelsTr/amos_0237.nii.gz"}, {"image": "./imagesTr/amos_0239.nii.gz", "label": "./labelsTr/amos_0239.nii.gz"}, {"image": "./imagesTr/amos_0242.nii.gz", "label": "./labelsTr/amos_0242.nii.gz"}, {"image": "./imagesTr/amos_0245.nii.gz", "label": "./labelsTr/amos_0245.nii.gz"}, {"image": "./imagesTr/amos_0248.nii.gz", "label": "./labelsTr/amos_0248.nii.gz"}, {"image": "./imagesTr/amos_0249.nii.gz", "label": "./labelsTr/amos_0249.nii.gz"}, {"image": "./imagesTr/amos_0254.nii.gz", "label": "./labelsTr/amos_0254.nii.gz"}, {"image": "./imagesTr/amos_0259.nii.gz", "label": "./labelsTr/amos_0259.nii.gz"}, {"image": "./imagesTr/amos_0263.nii.gz", "label": "./labelsTr/amos_0263.nii.gz"}, {"image": "./imagesTr/amos_0264.nii.gz", "label": "./labelsTr/amos_0264.nii.gz"}, {"image": "./imagesTr/amos_0268.nii.gz", "label": "./labelsTr/amos_0268.nii.gz"}, {"image": "./imagesTr/amos_0272.nii.gz", "label": "./labelsTr/amos_0272.nii.gz"}, {"image": "./imagesTr/amos_0273.nii.gz", "label": "./labelsTr/amos_0273.nii.gz"}, {"image": "./imagesTr/amos_0274.nii.gz", "label": "./labelsTr/amos_0274.nii.gz"}, {"image": "./imagesTr/amos_0276.nii.gz", "label": "./labelsTr/amos_0276.nii.gz"}, {"image": "./imagesTr/amos_0279.nii.gz", "label": "./labelsTr/amos_0279.nii.gz"}, {"image": "./imagesTr/amos_0281.nii.gz", "label": "./labelsTr/amos_0281.nii.gz"}, {"image": "./imagesTr/amos_0282.nii.gz", "label": "./labelsTr/amos_0282.nii.gz"}, {"image": "./imagesTr/amos_0288.nii.gz", "label": "./labelsTr/amos_0288.nii.gz"}, {"image": "./imagesTr/amos_0294.nii.gz", "label": "./labelsTr/amos_0294.nii.gz"}, {"image": "./imagesTr/amos_0296.nii.gz", "label": "./labelsTr/amos_0296.nii.gz"}, {"image": "./imagesTr/amos_0297.nii.gz", "label": "./labelsTr/amos_0297.nii.gz"}, {"image": "./imagesTr/amos_0299.nii.gz", "label": "./labelsTr/amos_0299.nii.gz"}, {"image": "./imagesTr/amos_0301.nii.gz", "label": "./labelsTr/amos_0301.nii.gz"}, {"image": "./imagesTr/amos_0302.nii.gz", "label": "./labelsTr/amos_0302.nii.gz"}, {"image": "./imagesTr/amos_0307.nii.gz", "label": "./labelsTr/amos_0307.nii.gz"}, {"image": "./imagesTr/amos_0317.nii.gz", "label": "./labelsTr/amos_0317.nii.gz"}, {"image": "./imagesTr/amos_0320.nii.gz", "label": "./labelsTr/amos_0320.nii.gz"}, {"image": "./imagesTr/amos_0321.nii.gz", "label": "./labelsTr/amos_0321.nii.gz"}, {"image": "./imagesTr/amos_0330.nii.gz", "label": "./labelsTr/amos_0330.nii.gz"}, {"image": "./imagesTr/amos_0332.nii.gz", "label": "./labelsTr/amos_0332.nii.gz"}, {"image": "./imagesTr/amos_0336.nii.gz", "label": "./labelsTr/amos_0336.nii.gz"}, {"image": "./imagesTr/amos_0337.nii.gz", "label": "./labelsTr/amos_0337.nii.gz"}, {"image": "./imagesTr/amos_0341.nii.gz", "label": "./labelsTr/amos_0341.nii.gz"}, {"image": "./imagesTr/amos_0348.nii.gz", "label": "./labelsTr/amos_0348.nii.gz"}, {"image": "./imagesTr/amos_0349.nii.gz", "label": "./labelsTr/amos_0349.nii.gz"}, {"image": "./imagesTr/amos_0350.nii.gz", "label": "./labelsTr/amos_0350.nii.gz"}, {"image": "./imagesTr/amos_0351.nii.gz", "label": "./labelsTr/amos_0351.nii.gz"}, {"image": "./imagesTr/amos_0353.nii.gz", "label": "./labelsTr/amos_0353.nii.gz"}, {"image": "./imagesTr/amos_0358.nii.gz", "label": "./labelsTr/amos_0358.nii.gz"}, {"image": "./imagesTr/amos_0361.nii.gz", "label": "./labelsTr/amos_0361.nii.gz"}, {"image": "./imagesTr/amos_0362.nii.gz", "label": "./labelsTr/amos_0362.nii.gz"}, {"image": "./imagesTr/amos_0366.nii.gz", "label": "./labelsTr/amos_0366.nii.gz"}, {"image": "./imagesTr/amos_0367.nii.gz", "label": "./labelsTr/amos_0367.nii.gz"}, {"image": "./imagesTr/amos_0370.nii.gz", "label": "./labelsTr/amos_0370.nii.gz"}, {"image": "./imagesTr/amos_0371.nii.gz", "label": "./labelsTr/amos_0371.nii.gz"}, {"image": "./imagesTr/amos_0374.nii.gz", "label": "./labelsTr/amos_0374.nii.gz"}, {"image": "./imagesTr/amos_0376.nii.gz", "label": "./labelsTr/amos_0376.nii.gz"}, {"image": "./imagesTr/amos_0378.nii.gz", "label": "./labelsTr/amos_0378.nii.gz"}, {"image": "./imagesTr/amos_0379.nii.gz", "label": "./labelsTr/amos_0379.nii.gz"}, {"image": "./imagesTr/amos_0380.nii.gz", "label": "./labelsTr/amos_0380.nii.gz"}, {"image": "./imagesTr/amos_0381.nii.gz", "label": "./labelsTr/amos_0381.nii.gz"}, {"image": "./imagesTr/amos_0383.nii.gz", "label": "./labelsTr/amos_0383.nii.gz"}, {"image": "./imagesTr/amos_0384.nii.gz", "label": "./labelsTr/amos_0384.nii.gz"}, {"image": "./imagesTr/amos_0387.nii.gz", "label": "./labelsTr/amos_0387.nii.gz"}, {"image": "./imagesTr/amos_0388.nii.gz", "label": "./labelsTr/amos_0388.nii.gz"}, {"image": "./imagesTr/amos_0390.nii.gz", "label": "./labelsTr/amos_0390.nii.gz"}, {"image": "./imagesTr/amos_0391.nii.gz", "label": "./labelsTr/amos_0391.nii.gz"}, {"image": "./imagesTr/amos_0392.nii.gz", "label": "./labelsTr/amos_0392.nii.gz"}, {"image": "./imagesTr/amos_0395.nii.gz", "label": "./labelsTr/amos_0395.nii.gz"}, {"image": "./imagesTr/amos_0396.nii.gz", "label": "./labelsTr/amos_0396.nii.gz"}, {"image": "./imagesTr/amos_0398.nii.gz", "label": "./labelsTr/amos_0398.nii.gz"}, {"image": "./imagesTr/amos_0400.nii.gz", "label": "./labelsTr/amos_0400.nii.gz"}, {"image": "./imagesTr/amos_0401.nii.gz", "label": "./labelsTr/amos_0401.nii.gz"}, {"image": "./imagesTr/amos_0402.nii.gz", "label": "./labelsTr/amos_0402.nii.gz"}, {"image": "./imagesTr/amos_0403.nii.gz", "label": "./labelsTr/amos_0403.nii.gz"}, {"image": "./imagesTr/amos_0404.nii.gz", "label": "./labelsTr/amos_0404.nii.gz"}, {"image": "./imagesTr/amos_0405.nii.gz", "label": "./labelsTr/amos_0405.nii.gz"}, {"image": "./imagesTr/amos_0406.nii.gz", "label": "./labelsTr/amos_0406.nii.gz"}, {"image": "./imagesTr/amos_0408.nii.gz", "label": "./labelsTr/amos_0408.nii.gz"}, {"image": "./imagesTr/amos_0410.nii.gz", "label": "./labelsTr/amos_0410.nii.gz"}],
"validation": [{"image": "./imagesVa/amos_0008.nii.gz", "label": "./labelsVa/amos_0008.nii.gz"}, {"image": "./imagesVa/amos_0013.nii.gz", "label": "./labelsVa/amos_0013.nii.gz"}, {"image": "./imagesVa/amos_0018.nii.gz", "label": "./labelsVa/amos_0018.nii.gz"}, {"image": "./imagesVa/amos_0022.nii.gz", "label": "./labelsVa/amos_0022.nii.gz"}, {"image": "./imagesVa/amos_0029.nii.gz", "label": "./labelsVa/amos_0029.nii.gz"}, {"image": "./imagesVa/amos_0032.nii.gz", "label": "./labelsVa/amos_0032.nii.gz"}, {"image": "./imagesVa/amos_0034.nii.gz", "label": "./labelsVa/amos_0034.nii.gz"}, {"image": "./imagesVa/amos_0040.nii.gz", "label": "./labelsVa/amos_0040.nii.gz"}, {"image": "./imagesVa/amos_0041.nii.gz", "label": "./labelsVa/amos_0041.nii.gz"}, {"image": "./imagesVa/amos_0051.nii.gz", "label": "./labelsVa/amos_0051.nii.gz"}, {"image": "./imagesVa/amos_0056.nii.gz", "label": "./labelsVa/amos_0056.nii.gz"}, {"image": "./imagesVa/amos_0061.nii.gz", "label": "./labelsVa/amos_0061.nii.gz"}, {"image": "./imagesVa/amos_0063.nii.gz", "label": "./labelsVa/amos_0063.nii.gz"}, {"image": "./imagesVa/amos_0070.nii.gz", "label": "./labelsVa/amos_0070.nii.gz"}, {"image": "./imagesVa/amos_0073.nii.gz", "label": "./labelsVa/amos_0073.nii.gz"}, {"image": "./imagesVa/amos_0085.nii.gz", "label": "./labelsVa/amos_0085.nii.gz"}, {"image": "./imagesVa/amos_0087.nii.gz", "label": "./labelsVa/amos_0087.nii.gz"}, {"image": "./imagesVa/amos_0090.nii.gz", "label": "./labelsVa/amos_0090.nii.gz"}, {"image": "./imagesVa/amos_0106.nii.gz", "label": "./labelsVa/amos_0106.nii.gz"}, {"image": "./imagesVa/amos_0108.nii.gz", "label": "./labelsVa/amos_0108.nii.gz"}, {"image": "./imagesVa/amos_0112.nii.gz", "label": "./labelsVa/amos_0112.nii.gz"}, {"image": "./imagesVa/amos_0117.nii.gz", "label": "./labelsVa/amos_0117.nii.gz"}, {"image": "./imagesVa/amos_0120.nii.gz", "label": "./labelsVa/amos_0120.nii.gz"}, {"image": "./imagesVa/amos_0123.nii.gz", "label": "./labelsVa/amos_0123.nii.gz"}, {"image": "./imagesVa/amos_0128.nii.gz", "label": "./labelsVa/amos_0128.nii.gz"}, {"image": "./imagesVa/amos_0132.nii.gz", "label": "./labelsVa/amos_0132.nii.gz"}, {"image": "./imagesVa/amos_0136.nii.gz", "label": "./labelsVa/amos_0136.nii.gz"}, {"image": "./imagesVa/amos_0140.nii.gz", "label": "./labelsVa/amos_0140.nii.gz"}, {"image": "./imagesVa/amos_0144.nii.gz", "label": "./labelsVa/amos_0144.nii.gz"}, {"image": "./imagesVa/amos_0150.nii.gz", "label": "./labelsVa/amos_0150.nii.gz"}, {"image": "./imagesVa/amos_0155.nii.gz", "label": "./labelsVa/amos_0155.nii.gz"}, {"image": "./imagesVa/amos_0157.nii.gz", "label": "./labelsVa/amos_0157.nii.gz"}, {"image": "./imagesVa/amos_0167.nii.gz", "label": "./labelsVa/amos_0167.nii.gz"}, {"image": "./imagesVa/amos_0174.nii.gz", "label": "./labelsVa/amos_0174.nii.gz"}, {"image": "./imagesVa/amos_0176.nii.gz", "label": "./labelsVa/amos_0176.nii.gz"}, {"image": "./imagesVa/amos_0189.nii.gz", "label": "./labelsVa/amos_0189.nii.gz"}, {"image": "./imagesVa/amos_0191.nii.gz", "label": "./labelsVa/amos_0191.nii.gz"}, {"image": "./imagesVa/amos_0194.nii.gz", "label": "./labelsVa/amos_0194.nii.gz"}, {"image": "./imagesVa/amos_0200.nii.gz", "label": "./labelsVa/amos_0200.nii.gz"}, {"image": "./imagesVa/amos_0202.nii.gz", "label": "./labelsVa/amos_0202.nii.gz"}, {"image": "./imagesVa/amos_0203.nii.gz", "label": "./labelsVa/amos_0203.nii.gz"}, {"image": "./imagesVa/amos_0204.nii.gz", "label": "./labelsVa/amos_0204.nii.gz"}, {"image": "./imagesVa/amos_0206.nii.gz", "label": "./labelsVa/amos_0206.nii.gz"}, {"image": "./imagesVa/amos_0207.nii.gz", "label": "./labelsVa/amos_0207.nii.gz"}, {"image": "./imagesVa/amos_0208.nii.gz", "label": "./labelsVa/amos_0208.nii.gz"}, {"image": "./imagesVa/amos_0216.nii.gz", "label": "./labelsVa/amos_0216.nii.gz"}, {"image": "./imagesVa/amos_0218.nii.gz", "label": "./labelsVa/amos_0218.nii.gz"}, {"image": "./imagesVa/amos_0219.nii.gz", "label": "./labelsVa/amos_0219.nii.gz"}, {"image": "./imagesVa/amos_0223.nii.gz", "label": "./labelsVa/amos_0223.nii.gz"}, {"image": "./imagesVa/amos_0228.nii.gz", "label": "./labelsVa/amos_0228.nii.gz"}, {"image": "./imagesVa/amos_0233.nii.gz", "label": "./labelsVa/amos_0233.nii.gz"}, {"image": "./imagesVa/amos_0238.nii.gz", "label": "./labelsVa/amos_0238.nii.gz"}, {"image": "./imagesVa/amos_0244.nii.gz", "label": "./labelsVa/amos_0244.nii.gz"}, {"image": "./imagesVa/amos_0247.nii.gz", "label": "./labelsVa/amos_0247.nii.gz"}, {"image": "./imagesVa/amos_0250.nii.gz", "label": "./labelsVa/amos_0250.nii.gz"}, {"image": "./imagesVa/amos_0255.nii.gz", "label": "./labelsVa/amos_0255.nii.gz"}, {"image": "./imagesVa/amos_0257.nii.gz", "label": "./labelsVa/amos_0257.nii.gz"}, {"image": "./imagesVa/amos_0258.nii.gz", "label": "./labelsVa/amos_0258.nii.gz"}, {"image": "./imagesVa/amos_0278.nii.gz", "label": "./labelsVa/amos_0278.nii.gz"}, {"image": "./imagesVa/amos_0280.nii.gz", "label": "./labelsVa/amos_0280.nii.gz"}, {"image": "./imagesVa/amos_0283.nii.gz", "label": "./labelsVa/amos_0283.nii.gz"}, {"image": "./imagesVa/amos_0284.nii.gz", "label": "./labelsVa/amos_0284.nii.gz"}, {"image": "./imagesVa/amos_0286.nii.gz", "label": "./labelsVa/amos_0286.nii.gz"}, {"image": "./imagesVa/amos_0287.nii.gz", "label": "./labelsVa/amos_0287.nii.gz"}, {"image": "./imagesVa/amos_0289.nii.gz", "label": "./labelsVa/amos_0289.nii.gz"}, {"image": "./imagesVa/amos_0290.nii.gz", "label": "./labelsVa/amos_0290.nii.gz"}, {"image": "./imagesVa/amos_0292.nii.gz", "label": "./labelsVa/amos_0292.nii.gz"}, {"image": "./imagesVa/amos_0293.nii.gz", "label": "./labelsVa/amos_0293.nii.gz"}, {"image": "./imagesVa/amos_0304.nii.gz", "label": "./labelsVa/amos_0304.nii.gz"}, {"image": "./imagesVa/amos_0308.nii.gz", "label": "./labelsVa/amos_0308.nii.gz"}, {"image": "./imagesVa/amos_0309.nii.gz", "label": "./labelsVa/amos_0309.nii.gz"}, {"image": "./imagesVa/amos_0310.nii.gz", "label": "./labelsVa/amos_0310.nii.gz"}, {"image": "./imagesVa/amos_0311.nii.gz", "label": "./labelsVa/amos_0311.nii.gz"}, {"image": "./imagesVa/amos_0313.nii.gz", "label": "./labelsVa/amos_0313.nii.gz"}, {"image": "./imagesVa/amos_0316.nii.gz", "label": "./labelsVa/amos_0316.nii.gz"}, {"image": "./imagesVa/amos_0318.nii.gz", "label": "./labelsVa/amos_0318.nii.gz"}, {"image": "./imagesVa/amos_0323.nii.gz", "label": "./labelsVa/amos_0323.nii.gz"}, {"image": "./imagesVa/amos_0325.nii.gz", "label": "./labelsVa/amos_0325.nii.gz"}, {"image": "./imagesVa/amos_0326.nii.gz", "label": "./labelsVa/amos_0326.nii.gz"}, {"image": "./imagesVa/amos_0328.nii.gz", "label": "./labelsVa/amos_0328.nii.gz"}, {"image": "./imagesVa/amos_0333.nii.gz", "label": "./labelsVa/amos_0333.nii.gz"}, {"image": "./imagesVa/amos_0334.nii.gz", "label": "./labelsVa/amos_0334.nii.gz"}, {"image": "./imagesVa/amos_0339.nii.gz", "label": "./labelsVa/amos_0339.nii.gz"}, {"image": "./imagesVa/amos_0342.nii.gz", "label": "./labelsVa/amos_0342.nii.gz"}, {"image": "./imagesVa/amos_0344.nii.gz", "label": "./labelsVa/amos_0344.nii.gz"}, {"image": "./imagesVa/amos_0346.nii.gz", "label": "./labelsVa/amos_0346.nii.gz"}, {"image": "./imagesVa/amos_0352.nii.gz", "label": "./labelsVa/amos_0352.nii.gz"}, {"image": "./imagesVa/amos_0356.nii.gz", "label": "./labelsVa/amos_0356.nii.gz"}, {"image": "./imagesVa/amos_0357.nii.gz", "label": "./labelsVa/amos_0357.nii.gz"}, {"image": "./imagesVa/amos_0363.nii.gz", "label": "./labelsVa/amos_0363.nii.gz"}, {"image": "./imagesVa/amos_0364.nii.gz", "label": "./labelsVa/amos_0364.nii.gz"}, {"image": "./imagesVa/amos_0365.nii.gz", "label": "./labelsVa/amos_0365.nii.gz"}, {"image": "./imagesVa/amos_0368.nii.gz", "label": "./labelsVa/amos_0368.nii.gz"}, {"image": "./imagesVa/amos_0372.nii.gz", "label": "./labelsVa/amos_0372.nii.gz"}, {"image": "./imagesVa/amos_0373.nii.gz", "label": "./labelsVa/amos_0373.nii.gz"}, {"image": "./imagesVa/amos_0377.nii.gz", "label": "./labelsVa/amos_0377.nii.gz"}, {"image": "./imagesVa/amos_0385.nii.gz", "label": "./labelsVa/amos_0385.nii.gz"}, {"image": "./imagesVa/amos_0397.nii.gz", "label": "./labelsVa/amos_0397.nii.gz"}, {"image": "./imagesVa/amos_0399.nii.gz", "label": "./labelsVa/amos_0399.nii.gz"}, {"image": "./imagesVa/amos_0409.nii.gz", "label": "./labelsVa/amos_0409.nii.gz"}],
"test": [{"image": "./imagesTs/amos_0002.nii.gz"}, {"image": "./imagesTs/amos_0003.nii.gz"}, {"image": "./imagesTs/amos_0012.nii.gz"}, {"image": "./imagesTs/amos_0020.nii.gz"}, {"image": "./imagesTs/amos_0026.nii.gz"}, {"image": "./imagesTs/amos_0028.nii.gz"}, {"image": "./imagesTs/amos_0031.nii.gz"}, {"image": "./imagesTs/amos_0037.nii.gz"}, {"image": "./imagesTs/amos_0039.nii.gz"}, {"image": "./imagesTs/amos_0046.nii.gz"}, {"image": "./imagesTs/amos_0053.nii.gz"}, {"image": "./imagesTs/amos_0055.nii.gz"}, {"image": "./imagesTs/amos_0062.nii.gz"}, {"image": "./imagesTs/amos_0065.nii.gz"}, {"image": "./imagesTs/amos_0068.nii.gz"}, {"image": "./imagesTs/amos_0074.nii.gz"}, {"image": "./imagesTs/amos_0080.nii.gz"}, {"image": "./imagesTs/amos_0082.nii.gz"}, {"image": "./imagesTs/amos_0091.nii.gz"}, {"image": "./imagesTs/amos_0093.nii.gz"}, {"image": "./imagesTs/amos_0095.nii.gz"}, {"image": "./imagesTs/amos_0096.nii.gz"}, {"image": "./imagesTs/amos_0100.nii.gz"}, {"image": "./imagesTs/amos_0101.nii.gz"}, {"image": "./imagesTs/amos_0107.nii.gz"}, {"image": "./imagesTs/amos_0114.nii.gz"}, {"image": "./imagesTs/amos_0122.nii.gz"}, {"image": "./imagesTs/amos_0130.nii.gz"}, {"image": "./imagesTs/amos_0139.nii.gz"}, {"image": "./imagesTs/amos_0145.nii.gz"}, {"image": "./imagesTs/amos_0146.nii.gz"}, {"image": "./imagesTs/amos_0148.nii.gz"}, {"image": "./imagesTs/amos_0151.nii.gz"}, {"image": "./imagesTs/amos_0163.nii.gz"}, {"image": "./imagesTs/amos_0164.nii.gz"}, {"image": "./imagesTs/amos_0165.nii.gz"}, {"image": "./imagesTs/amos_0168.nii.gz"}, {"image": "./imagesTs/amos_0169.nii.gz"}, {"image": "./imagesTs/amos_0178.nii.gz"}, {"image": "./imagesTs/amos_0182.nii.gz"}, {"image": "./imagesTs/amos_0183.nii.gz"}, {"image": "./imagesTs/amos_0187.nii.gz"}, {"image": "./imagesTs/amos_0201.nii.gz"}, {"image": "./imagesTs/amos_0205.nii.gz"}, {"image": "./imagesTs/amos_0209.nii.gz"}, {"image": "./imagesTs/amos_0210.nii.gz"}, {"image": "./imagesTs/amos_0211.nii.gz"}, {"image": "./imagesTs/amos_0213.nii.gz"}, {"image": "./imagesTs/amos_0220.nii.gz"}, {"image": "./imagesTs/amos_0221.nii.gz"}, {"image": "./imagesTs/amos_0222.nii.gz"}, {"image": "./imagesTs/amos_0227.nii.gz"}, {"image": "./imagesTs/amos_0229.nii.gz"}, {"image": "./imagesTs/amos_0232.nii.gz"}, {"image": "./imagesTs/amos_0234.nii.gz"}, {"image": "./imagesTs/amos_0236.nii.gz"}, {"image": "./imagesTs/amos_0240.nii.gz"}, {"image": "./imagesTs/amos_0241.nii.gz"}, {"image": "./imagesTs/amos_0243.nii.gz"}, {"image": "./imagesTs/amos_0246.nii.gz"}, {"image": "./imagesTs/amos_0251.nii.gz"}, {"image": "./imagesTs/amos_0252.nii.gz"}, {"image": "./imagesTs/amos_0253.nii.gz"}, {"image": "./imagesTs/amos_0256.nii.gz"}, {"image": "./imagesTs/amos_0260.nii.gz"}, {"image": "./imagesTs/amos_0261.nii.gz"}, {"image": "./imagesTs/amos_0262.nii.gz"}, {"image": "./imagesTs/amos_0265.nii.gz"}, {"image": "./imagesTs/amos_0266.nii.gz"}, {"image": "./imagesTs/amos_0267.nii.gz"}, {"image": "./imagesTs/amos_0269.nii.gz"}, {"image": "./imagesTs/amos_0270.nii.gz"}, {"image": "./imagesTs/amos_0271.nii.gz"}, {"image": "./imagesTs/amos_0275.nii.gz"}, {"image": "./imagesTs/amos_0277.nii.gz"}, {"image": "./imagesTs/amos_0285.nii.gz"}, {"image": "./imagesTs/amos_0291.nii.gz"}, {"image": "./imagesTs/amos_0295.nii.gz"}, {"image": "./imagesTs/amos_0298.nii.gz"}, {"image": "./imagesTs/amos_0300.nii.gz"}, {"image": "./imagesTs/amos_0303.nii.gz"}, {"image": "./imagesTs/amos_0305.nii.gz"}, {"image": "./imagesTs/amos_0306.nii.gz"}, {"image": "./imagesTs/amos_0312.nii.gz"}, {"image": "./imagesTs/amos_0314.nii.gz"}, {"image": "./imagesTs/amos_0315.nii.gz"}, {"image": "./imagesTs/amos_0319.nii.gz"}, {"image": "./imagesTs/amos_0322.nii.gz"}, {"image": "./imagesTs/amos_0324.nii.gz"}, {"image": "./imagesTs/amos_0327.nii.gz"}, {"image": "./imagesTs/amos_0329.nii.gz"}, {"image": "./imagesTs/amos_0331.nii.gz"}, {"image": "./imagesTs/amos_0335.nii.gz"}, {"image": "./imagesTs/amos_0338.nii.gz"}, {"image": "./imagesTs/amos_0340.nii.gz"}, {"image": "./imagesTs/amos_0343.nii.gz"}, {"image": "./imagesTs/amos_0345.nii.gz"}, {"image": "./imagesTs/amos_0347.nii.gz"}, {"image": "./imagesTs/amos_0354.nii.gz"}, {"image": "./imagesTs/amos_0355.nii.gz"}, {"image": "./imagesTs/amos_0359.nii.gz"}, {"image": "./imagesTs/amos_0360.nii.gz"}, {"image": "./imagesTs/amos_0369.nii.gz"}, {"image": "./imagesTs/amos_0375.nii.gz"}, {"image": "./imagesTs/amos_0382.nii.gz"}, {"image": "./imagesTs/amos_0386.nii.gz"}, {"image": "./imagesTs/amos_0389.nii.gz"}, {"image": "./imagesTs/amos_0393.nii.gz"}, {"image": "./imagesTs/amos_0394.nii.gz"}, {"image": "./imagesTs/amos_0407.nii.gz"}, {"image": "./imagesTs/amos_0411.nii.gz"}, {"image": "./imagesTs/amos_0412.nii.gz"}, {"image": "./imagesTs/amos_0413.nii.gz"}, {"image": "./imagesTs/amos_0414.nii.gz"}, {"image": "./imagesTs/amos_0415.nii.gz"}, {"image": "./imagesTs/amos_0416.nii.gz"}, {"image": "./imagesTs/amos_0417.nii.gz"}, {"image": "./imagesTs/amos_0418.nii.gz"}, {"image": "./imagesTs/amos_0419.nii.gz"}, {"image": "./imagesTs/amos_0420.nii.gz"}, {"image": "./imagesTs/amos_0421.nii.gz"}, {"image": "./imagesTs/amos_0422.nii.gz"}, {"image": "./imagesTs/amos_0423.nii.gz"}, {"image": "./imagesTs/amos_0424.nii.gz"}, {"image": "./imagesTs/amos_0425.nii.gz"}, {"image": "./imagesTs/amos_0426.nii.gz"}, {"image": "./imagesTs/amos_0427.nii.gz"}, {"image": "./imagesTs/amos_0428.nii.gz"}, {"image": "./imagesTs/amos_0429.nii.gz"}, {"image": "./imagesTs/amos_0430.nii.gz"}, {"image": "./imagesTs/amos_0431.nii.gz"}, {"image": "./imagesTs/amos_0432.nii.gz"}, {"image": "./imagesTs/amos_0433.nii.gz"}, {"image": "./imagesTs/amos_0434.nii.gz"}, {"image": "./imagesTs/amos_0435.nii.gz"}, {"image": "./imagesTs/amos_0436.nii.gz"}, {"image": "./imagesTs/amos_0437.nii.gz"}, {"image": "./imagesTs/amos_0438.nii.gz"}, {"image": "./imagesTs/amos_0439.nii.gz"}, {"image": "./imagesTs/amos_0440.nii.gz"}, {"image": "./imagesTs/amos_0441.nii.gz"}, {"image": "./imagesTs/amos_0442.nii.gz"}, {"image": "./imagesTs/amos_0443.nii.gz"}, {"image": "./imagesTs/amos_0444.nii.gz"}, {"image": "./imagesTs/amos_0445.nii.gz"}, {"image": "./imagesTs/amos_0446.nii.gz"}, {"image": "./imagesTs/amos_0447.nii.gz"}, {"image": "./imagesTs/amos_0448.nii.gz"}, {"image": "./imagesTs/amos_0449.nii.gz"}, {"image": "./imagesTs/amos_0450.nii.gz"}, {"image": "./imagesTs/amos_0451.nii.gz"}, {"image": "./imagesTs/amos_0452.nii.gz"}, {"image": "./imagesTs/amos_0453.nii.gz"}, {"image": "./imagesTs/amos_0454.nii.gz"}, {"image": "./imagesTs/amos_0455.nii.gz"}, {"image": "./imagesTs/amos_0456.nii.gz"}, {"image": "./imagesTs/amos_0457.nii.gz"}, {"image": "./imagesTs/amos_0458.nii.gz"}, {"image": "./imagesTs/amos_0459.nii.gz"}, {"image": "./imagesTs/amos_0460.nii.gz"}, {"image": "./imagesTs/amos_0461.nii.gz"}, {"image": "./imagesTs/amos_0462.nii.gz"}, {"image": "./imagesTs/amos_0463.nii.gz"}, {"image": "./imagesTs/amos_0464.nii.gz"}, {"image": "./imagesTs/amos_0465.nii.gz"}, {"image": "./imagesTs/amos_0466.nii.gz"}, {"image": "./imagesTs/amos_0467.nii.gz"}, {"image": "./imagesTs/amos_0468.nii.gz"}, {"image": "./imagesTs/amos_0469.nii.gz"}, {"image": "./imagesTs/amos_0470.nii.gz"}, {"image": "./imagesTs/amos_0471.nii.gz"}, {"image": "./imagesTs/amos_0472.nii.gz"}, {"image": "./imagesTs/amos_0473.nii.gz"}, {"image": "./imagesTs/amos_0474.nii.gz"}, {"image": "./imagesTs/amos_0475.nii.gz"}, {"image": "./imagesTs/amos_0476.nii.gz"}, {"image": "./imagesTs/amos_0477.nii.gz"}, {"image": "./imagesTs/amos_0478.nii.gz"}, {"image": "./imagesTs/amos_0479.nii.gz"}, {"image": "./imagesTs/amos_0480.nii.gz"}, {"image": "./imagesTs/amos_0481.nii.gz"}, {"image": "./imagesTs/amos_0482.nii.gz"}, {"image": "./imagesTs/amos_0483.nii.gz"}, {"image": "./imagesTs/amos_0484.nii.gz"}, {"image": "./imagesTs/amos_0485.nii.gz"}, {"image": "./imagesTs/amos_0486.nii.gz"}, {"image": "./imagesTs/amos_0487.nii.gz"}, {"image": "./imagesTs/amos_0488.nii.gz"}, {"image": "./imagesTs/amos_0489.nii.gz"}, {"image": "./imagesTs/amos_0490.nii.gz"}, {"image": "./imagesTs/amos_0491.nii.gz"}, {"image": "./imagesTs/amos_0492.nii.gz"}, {"image": "./imagesTs/amos_0493.nii.gz"}, {"image": "./imagesTs/amos_0494.nii.gz"}, {"image": "./imagesTs/amos_0495.nii.gz"}, {"image": "./imagesTs/amos_0496.nii.gz"}, {"image": "./imagesTs/amos_0497.nii.gz"}, {"image": "./imagesTs/amos_0498.nii.gz"}, {"image": "./imagesTs/amos_0499.nii.gz"}, {"image": "./imagesTs/amos_0500.nii.gz"}]}
================================================
FILE: Finetune/Amos/gen_json.py
================================================
from typing import Tuple
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import *
def get_identifiers_from_splitted_files(folder: str):
uniques = np.unique([i[:-12] for i in subfiles(folder, suffix='.nii.gz', join=False)])
return uniques
def generate_dataset_json(output_file: str, imagesTr_dir: str, imagesTs_dir: str, modalities: dict,
labels: dict, dataset_name: str, sort_keys=True, license: str = "hands off!", dataset_description: str = "",
dataset_reference="", dataset_release='0.0'):
"""
:param output_file: This needs to be the full path to the dataset_CT.json you intend to write, so
output_file='DATASET_PATH/dataset_CT.json' where the folder DATASET_PATH points to is the one with the
imagesTr and labelsTr subfolders
:param imagesTr_dir: path to the imagesTr folder of that dataset
:param imagesTs_dir: path to the imagesTs folder of that dataset. Can be None
:param modalities: tuple of strings with modality names. must be in the same order as the images (first entry
corresponds to _0000.nii.gz, etc). Example: ('T1', 'T2', 'FLAIR').
:param labels: dict with int->str (key->value) mapping the label IDs to label names. Note that 0 is always
supposed to be background! Example: {0: 'background', 1: 'edema', 2: 'enhancing tumor'}
:param dataset_name: The name of the dataset. Can be anything you want
:param sort_keys: In order to sort or not, the keys in dataset_CT.json
:param license:
:param dataset_description:
:param dataset_reference: website of the dataset, if available
:param dataset_release:
:return:
"""
train_identifiers = get_identifiers_from_splitted_files(imagesTr_dir)
if imagesTs_dir is not None:
test_identifiers = get_identifiers_from_splitted_files(imagesTs_dir)
else:
test_identifiers = []
json_dict = {}
json_dict['name'] = dataset_name
json_dict['description'] = dataset_description
json_dict['tensorImageSize'] = "4D"
json_dict['reference'] = dataset_reference
json_dict['licence'] = license
json_dict['release'] = dataset_release
json_dict['modality'] = {str(i): modalities[i] for i in modalities.keys()}
json_dict['labels'] = {str(i): labels[i] for i in labels.keys()}
json_dict['numTraining'] = len(train_identifiers)
json_dict['numTest'] = len(test_identifiers)
json_dict['training'] = [
{'image': "./imagesTr/%s_0000.nii.gz" % i, "label": "./labelsTr/%s.nii.gz" % i} for i
in
train_identifiers]
json_dict['test'] = ["./imagesTs/%s_0000.nii.gz" % i for i in test_identifiers]
if not output_file.endswith("dataset_CT.json"):
print("WARNING: output file name is not dataset_CT.json! This may be intentional or not. You decide. "
"Proceeding anyways...")
save_json(json_dict, os.path.join(output_file), sort_keys=sort_keys)
if __name__=='__main__':
generate_dataset_json(output_file='dataset/dataset.json',
imagesTr_dir='D:\data\FLARE22\imagesTr',
imagesTs_dir='D:\data\FLARE22\imagesTs',
modalities={"0": "CT"},
labels={"0": "background",
"1": "Liver",
"2": "Right kidney",
"3": "Spleen",
"4": "Pancreas",
"5": "Aorta",
"6": "Inferior vena cava",
"7": "Right adrenal gland",
"8": "Left adrenal gland",
"9": "Gallbladder",
"10": "Esophagus",
"11": "Stomach",
"12": "Duodenum",
"13": "Left Kidney"
},
dataset_name="FLARE22",
dataset_description='0',
dataset_reference='0')
# nnUNet_predict -i nnUNet_raw_data_base/nnUNet_raw_data/Task022_FLARE22/imagesTs -o eval -t 22 -tr nnUNetTrainerV2_FLARE_Big -m 3d_fullres -p nnUNetPlansFLARE22Big --all_in_gpu True
================================================
FILE: Finetune/Amos/inferers.py
================================================
"""Multiview inferer."""
import warnings
from typing import Any, Callable, Dict, List, Mapping, Sequence, Tuple, Union
import torch
import torch.nn.functional as F
from monai.data.utils import compute_importance_map, dense_patch_slices, get_valid_patch_size
from monai.transforms import Resize
from monai.utils import (
BlendMode,
PytorchPadMode,
convert_data_type,
ensure_tuple,
fall_back_tuple,
look_up_option,
optional_import,
)
from monai.inferers.utils import _get_scan_interval
# from utils import view_ops
# from utils import view_transforms
tqdm, _ = optional_import("tqdm", name="tqdm")
def double_sliding_window_inference(
inputs: torch.Tensor,
view: int,
roi_size: Union[Sequence[int], int],
sw_batch_size: int,
predictor: Callable[..., Union[torch.Tensor, Sequence[torch.Tensor], Dict[Any, torch.Tensor]]],
overlap: float = 0.25,
mode: Union[BlendMode, str] = BlendMode.CONSTANT,
sigma_scale: Union[Sequence[float], float] = 0.125,
padding_mode: Union[PytorchPadMode, str] = PytorchPadMode.CONSTANT,
cval: float = 0.0,
sw_device: Union[torch.device, str, None] = None,
device: Union[torch.device, str, None] = None,
progress: bool = False,
roi_weight_map: Union[torch.Tensor, None] = None,
*args: Any,
**kwargs: Any,
) -> Union[torch.Tensor, Tuple[torch.Tensor, ...], Dict[Any, torch.Tensor]]:
"""
Sliding window inference on two `inputs` with `predictor`.
The outputs of `predictor` could be a tensor, a tuple, or a dictionary of tensors.
Each output in the tuple or dict value is allowed to have different resolutions with respect to the input.
e.g., the input patch spatial size is [128,128,128], the output (a tuple of two patches) patch sizes
could be ([128,64,256], [64,32,128]).
In this case, the parameter `overlap` and `roi_size` need to be carefully chosen to ensure the output ROI is still
an integer. If the predictor's input and output spatial sizes are not equal, we recommend choosing the parameters
so that `overlap*roi_size*output_size/input_size` is an integer (for each spatial dimension).
When roi_size is larger than the inputs' spatial size, the input image are padded during inference.
To maintain the same spatial sizes, the output image will be cropped to the original input size.
Args:
inputs: input image to be processed (assuming NCHW[D])
roi_size: the spatial window size for inferences.
When its components have None or non-positives, the corresponding inputs dimension will be used.
if the components of the `roi_size` are non-positive values, the transform will use the
corresponding components of img size. For example, `roi_size=(32, -1)` will be adapted
to `(32, 64)` if the second spatial dimension size of img is `64`.
sw_batch_size: the batch size to run window slices.
predictor: given input tensor ``patch_data`` in shape NCHW[D],
The outputs of the function call ``predictor(patch_data)`` should be a tensor, a tuple, or a dictionary
with Tensor values. Each output in the tuple or dict value should have the same batch_size, i.e. NM'H'W'[D'];
where H'W'[D'] represents the output patch's spatial size, M is the number of output channels,
N is `sw_batch_size`, e.g., the input shape is (7, 1, 128,128,128),
the output could be a tuple of two tensors, with shapes: ((7, 5, 128, 64, 256), (7, 4, 64, 32, 128)).
In this case, the parameter `overlap` and `roi_size` need to be carefully chosen
to ensure the scaled output ROI sizes are still integers.
If the `predictor`'s input and output spatial sizes are different,
we recommend choosing the parameters so that ``overlap*roi_size*zoom_scale`` is an integer for each dimension.
overlap: Amount of overlap between scans.
mode: {``"constant"``, ``"gaussian"``}
How to blend output of overlapping windows. Defaults to ``"constant"``.
- ``"constant``": gives equal weight to all predictions.
- ``"gaussian``": gives less weight to predictions on edges of windows.
sigma_scale: the standard deviation coefficient of the Gaussian window when `mode` is ``"gaussian"``.
Default: 0.125. Actual window sigma is ``sigma_scale`` * ``dim_size``.
When sigma_scale is a sequence of floats, the values denote sigma_scale at the corresponding
spatial dimensions.
padding_mode: {``"constant"``, ``"reflect"``, ``"replicate"``, ``"circular"``}
Padding mode for ``inputs``, when ``roi_size`` is larger than inputs. Defaults to ``"constant"``
See also: https://pytorch.org/docs/stable/generated/torch.nn.functional.pad.html
cval: fill value for 'constant' padding mode. Default: 0
sw_device: device for the window data.
By default the device (and accordingly the memory) of the `inputs` is used.
Normally `sw_device` should be consistent with the device where `predictor` is defined.
device: device for the stitched output prediction.
By default the device (and accordingly the memory) of the `inputs` is used. If for example
set to device=torch.device('cpu') the gpu memory consumption is less and independent of the
`inputs` and `roi_size`. Output is on the `device`.
progress: whether to print a `tqdm` progress bar.
roi_weight_map: pre-computed (non-negative) weight map for each ROI.
If not given, and ``mode`` is not `constant`, this map will be computed on the fly.
args: optional args to be passed to ``predictor``.
kwargs: optional keyword args to be passed to ``predictor``.
Note:
- input must be channel-first and have a batch dim, supports N-D sliding window.
"""
compute_dtype = inputs.dtype
num_spatial_dims = len(inputs.shape) - 2
if overlap < 0 or overlap >= 1:
raise ValueError("overlap must be >= 0 and < 1.")
# determine image spatial size and batch size
# Note: all input images must have the same image size and batch size
batch_size, _, *image_size_ = inputs.shape
if device is None:
device = inputs.device
if sw_device is None:
sw_device = inputs.device
roi_size = fall_back_tuple(roi_size, image_size_)
# in case that image size is smaller than roi size
image_size = tuple(max(image_size_[i], roi_size[i]) for i in range(num_spatial_dims))
pad_size = []
for k in range(len(inputs.shape) - 1, 1, -1):
diff = max(roi_size[k - 2] - inputs.shape[k], 0)
half = diff // 2
pad_size.extend([half, diff - half])
inputs = F.pad(inputs, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)
# inputs2 = F.pad(inputs2, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)
scan_interval = _get_scan_interval(image_size, roi_size, num_spatial_dims, overlap)
# Store all slices in list
slices = dense_patch_slices(image_size, roi_size, scan_interval)
num_win = len(slices) # number of windows per image
total_slices = num_win * batch_size # total number of windows
# Create window-level importance map
valid_patch_size = get_valid_patch_size(image_size, roi_size)
if valid_patch_size == roi_size and (roi_weight_map is not None):
importance_map = roi_weight_map
else:
try:
importance_map = compute_importance_map(valid_patch_size, mode=mode, sigma_scale=sigma_scale, device=device)
except BaseException as e:
raise RuntimeError(
"Seems to be OOM. Please try smaller patch size or mode='constant' instead of mode='gaussian'."
) from e
importance_map = convert_data_type(importance_map, torch.Tensor, device, compute_dtype)[0] # type: ignore
# handle non-positive weights
min_non_zero = max(importance_map[importance_map != 0].min().item(), 1e-3)
importance_map = torch.clamp(importance_map.to(torch.float32), min=min_non_zero).to(compute_dtype)
# Perform predictions
dict_key, output_image_list_1, output_image_list_2, count_map_list = None, [], [], []
_initialized_ss = -1
is_tensor_output = True # whether the predictor's output is a tensor (instead of dict/tuple)
# for each patch
for slice_g in tqdm(range(0, total_slices, sw_batch_size)) if progress else range(0, total_slices, sw_batch_size):
slice_range = range(slice_g, min(slice_g + sw_batch_size, total_slices))
unravel_slice = [
[slice(int(idx / num_win), int(idx / num_win) + 1), slice(None)] + list(slices[idx % num_win])
for idx in slice_range
]
window_data = torch.cat([inputs[win_slice] for win_slice in unravel_slice]).to(sw_device)
view_list = [view, (view + 1) % len(view_transforms.permutation_transforms)]
window_data_list = [view_ops.get_permute_transform(0, dst)(window_data) for dst in view_list]
# window_data_2 = torch.cat([inputs2[win_slice] for win_slice in unravel_slice]).to(sw_device)
seg_prob_out_1, seg_prob_out_2 = predictor(window_data_list[0], window_data_list[1], view_list, *args, **kwargs) # batched patch segmentation
seg_prob_out_1, seg_prob_out_2 = view_ops.permute_inverse([seg_prob_out_1, seg_prob_out_2], view_list)
# convert seg_prob_out to tuple seg_prob_tuple, this does not allocate new memory.
seg_prob_tuple_1: Tuple[torch.Tensor, ...]
seg_prob_tuple_2: Tuple[torch.Tensor, ...]
if isinstance(seg_prob_out_1, torch.Tensor):
seg_prob_tuple_1 = (seg_prob_out_1,)
seg_prob_tuple_2 = (seg_prob_out_2,)
elif isinstance(seg_prob_out_1, Mapping):
if dict_key is None:
dict_key = sorted(seg_prob_out_1.keys()) # track predictor's output keys
seg_prob_tuple_1 = tuple(seg_prob_out_1[k] for k in dict_key)
seg_prob_tuple_2 = tuple(seg_prob_out_2[k] for k in dict_key)
is_tensor_output = False
else:
seg_prob_tuple_1 = ensure_tuple(seg_prob_out_1)
seg_prob_tuple_2 = ensure_tuple(seg_prob_out_2)
is_tensor_output = False
# for each output in multi-output list
for ss in range(len(seg_prob_tuple_1)):
seg_prob_1 = seg_prob_tuple_1[ss].to(device) # BxCxMxNxP or BxCxMxN
seg_prob_2 = seg_prob_tuple_2[ss].to(device)
# compute zoom scale: out_roi_size/in_roi_size
zoom_scale = []
for axis, (img_s_i, out_w_i, in_w_i) in enumerate(
zip(image_size, seg_prob_1.shape[2:], window_data.shape[2:])
):
_scale = out_w_i / float(in_w_i)
if not (img_s_i * _scale).is_integer():
warnings.warn(
f"For spatial axis: {axis}, output[{ss}] will have non-integer shape. Spatial "
f"zoom_scale between output[{ss}] and input is {_scale}. Please pad inputs."
)
zoom_scale.append(_scale)
if _initialized_ss < ss: # init. the ss-th buffer at the first iteration
# construct multi-resolution outputs
output_classes = seg_prob_1.shape[1]
output_shape = [batch_size, output_classes] + [
int(image_size_d * zoom_scale_d) for image_size_d, zoom_scale_d in zip(image_size, zoom_scale)
]
# allocate memory to store the full output and the count for overlapping parts
output_image_list_1.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))
output_image_list_2.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))
count_map_list.append(torch.zeros([1, 1] + output_shape[2:], dtype=compute_dtype, device=device))
_initialized_ss += 1
# resizing the importance_map
resizer = Resize(spatial_size=seg_prob_1.shape[2:], mode="nearest", anti_aliasing=False)
# store the result in the proper location of the full output. Apply weights from importance map.
for idx, original_idx in zip(slice_range, unravel_slice):
# zoom roi
original_idx_zoom = list(original_idx) # 4D for 2D image, 5D for 3D image
for axis in range(2, len(original_idx_zoom)):
zoomed_start = original_idx[axis].start * zoom_scale[axis - 2]
zoomed_end = original_idx[axis].stop * zoom_scale[axis - 2]
if not zoomed_start.is_integer() or (not zoomed_end.is_integer()):
warnings.warn(
f"For axis-{axis-2} of output[{ss}], the output roi range is not int. "
f"Input roi range is ({original_idx[axis].start}, {original_idx[axis].stop}). "
f"Spatial zoom_scale between output[{ss}] and input is {zoom_scale[axis - 2]}. "
f"Corresponding output roi range is ({zoomed_start}, {zoomed_end}).\n"
f"Please change overlap ({overlap}) or roi_size ({roi_size[axis-2]}) for axis-{axis-2}. "
"Tips: if overlap*roi_size*zoom_scale is an integer, it usually works."
)
original_idx_zoom[axis] = slice(int(zoomed_start), int(zoomed_end), None)
importance_map_zoom = resizer(importance_map.unsqueeze(0))[0].to(compute_dtype)
# store results and weights
output_image_list_1[ss][original_idx_zoom] += importance_map_zoom * seg_prob_1[idx - slice_g]
output_image_list_2[ss][original_idx_zoom] += importance_map_zoom * seg_prob_2[idx - slice_g]
count_map_list[ss][original_idx_zoom] += (
importance_map_zoom.unsqueeze(0).unsqueeze(0).expand(count_map_list[ss][original_idx_zoom].shape)
)
# account for any overlapping sections
for ss in range(len(output_image_list_1)):
count_map_pop = count_map_list.pop(0)
output_image_list_1[ss] = (output_image_list_1[ss] / count_map_pop).to(compute_dtype)
output_image_list_2[ss] = (output_image_list_2[ss] / count_map_pop).to(compute_dtype)
# remove padding if image_size smaller than roi_size
for ss in range(len(output_image_list_1)):
output_i_1, output_i_2 = output_image_list_1[ss], output_image_list_2[ss]
if torch.isnan(output_i_1).any() or torch.isinf(output_i_1).any():
warnings.warn("Sliding window inference results contain NaN or Inf.")
if torch.isnan(output_i_2).any() or torch.isinf(output_i_2).any():
warnings.warn("Sliding window inference results contain NaN or Inf.")
zoom_scale = [
seg_prob_map_shape_d / roi_size_d for seg_prob_map_shape_d, roi_size_d in zip(output_i_1.shape[2:], roi_size)
]
final_slicing: List[slice] = []
for sp in range(num_spatial_dims):
slice_dim = slice(pad_size[sp * 2], image_size_[num_spatial_dims - sp - 1] + pad_size[sp * 2])
slice_dim = slice(
int(round(slice_dim.start * zoom_scale[num_spatial_dims - sp - 1])),
int(round(slice_dim.stop * zoom_scale[num_spatial_dims - sp - 1])),
)
final_slicing.insert(0, slice_dim)
while len(final_slicing) < len(output_i_1.shape):
final_slicing.insert(0, slice(None))
output_image_list_1[ss] = output_i_1[final_slicing]
output_image_list_2[ss] = output_i_2[final_slicing]
if dict_key is not None: # if output of predictor is a dict
final_output_1 = dict(zip(dict_key, output_image_list_1))
final_output_2 = dict(zip(dict_key, output_image_list_2))
else:
final_output_1 = tuple(output_image_list_1) # type: ignore
final_output_2 = tuple(output_image_list_2) # type: ignore
final_output_1 = final_output_1[0] if is_tensor_output else final_output_1 # type: ignore
final_output_2 = final_output_2[0] if is_tensor_output else final_output_2 # type: ignore
return final_output_1, final_output_2
def one_hot(labels: torch.Tensor, num_classes: int, dtype: torch.dtype = torch.float, dim: int = 1) -> torch.Tensor:
"""
For every value v in `labels`, the value in the output will be either 1 or 0. Each vector along the `dim`-th
dimension has the "one-hot" format, i.e., it has a total length of `num_classes`,
with a one and `num_class-1` zeros.
Note that this will include the background label, thus a binary mask should be treated as having two classes.
Args:
labels: input tensor of integers to be converted into the 'one-hot' format. Internally `labels` will be
converted into integers `labels.long()`.
num_classes: number of output channels, the corresponding length of `labels[dim]` will be converted to
`num_classes` from `1`.
dtype: the data type of the output one_hot label.
dim: the dimension to be converted to `num_classes` channels from `1` channel, should be non-negative number.
Example:
For a tensor `labels` of dimensions [B]1[spatial_dims], return a tensor of dimensions `[B]N[spatial_dims]`
when `num_classes=N` number of classes and `dim=1`.
.. code-block:: python
from monai.networks.utils import one_hot
import torch
a = torch.randint(0, 2, size=(1, 2, 2, 2))
out = one_hot(a, num_classes=2, dim=0)
print(out.shape) # torch.Size([2, 2, 2, 2])
a = torch.randint(0, 2, size=(2, 1, 2, 2, 2))
out = one_hot(a, num_classes=2, dim=1)
print(out.shape) # torch.Size([2, 2, 2, 2, 2])
"""
# if `dim` is bigger, add singleton dim at the end
if labels.ndim < dim + 1:
shape = list(labels.shape) + [1] * (dim + 1 - len(labels.shape))
labels = torch.reshape(labels, shape)
sh = list(labels.shape)
if sh[dim] != 1:
raise AssertionError("labels should have a channel with length equal to one.")
sh[dim] = num_classes
o = torch.zeros(size=sh, dtype=dtype, device=labels.device)
labels = o.scatter_(dim=dim, index=labels.long(), value=1)
return labels
"""View operations."""
from typing import Sequence, Tuple
"""View operations.
Input format: [B, C, X, Y, Z, ...]
NOTE(meijieru): 0 is reserved for identify transform.
"""
from typing import Callable, Sequence, Union
import enum
import torch
RotateType = int
PermuteType = int
TransformFuncType = Callable[[torch.Tensor], torch.Tensor]
# A composition of multiple view transoforms.
TransformsType = Sequence[Union[PermuteType, RotateType]]
class GroupName(enum.Enum):
ROTATE = 1
PERMUTE = 2
DEFAULT_ORDER = (GroupName.ROTATE, GroupName.PERMUTE)
rotation_transforms = {
0: lambda x: x,
1: lambda x: x.rot90(1, (3, 4)),
2: lambda x: x.rot90(2, (3, 4)),
3: lambda x: x.rot90(3, (3, 4)),
}
rotation_inverse_transforms = {
0: lambda x: x,
1: lambda x: x.rot90(3, (3, 4)),
2: lambda x: x.rot90(2, (3, 4)),
3: lambda x: x.rot90(1, (3, 4)),
}
permutation_transforms = {
0: lambda x: x,
1: lambda x: x.permute(0, 1, 3, 2, 4),
2: lambda x: x.permute(0, 1, 4, 3, 2),
}
permutation_inverse_transforms = {
0: lambda x: x,
1: lambda x: x.permute(0, 1, 3, 2, 4),
2: lambda x: x.permute(0, 1, 4, 3, 2),
}
all_forward_transforms = {
GroupName.ROTATE: rotation_transforms,
GroupName.PERMUTE: permutation_transforms,
}
all_backward_transforms = {
GroupName.ROTATE: rotation_inverse_transforms,
GroupName.PERMUTE: permutation_inverse_transforms,
}
def get_transforms_func(views: TransformsType,
orders: Sequence[GroupName] = DEFAULT_ORDER,
inverse: bool = False) -> TransformFuncType:
"""Gets sequential transform functions."""
if len(views) != len(orders):
raise ValueError()
all_transforms = (all_forward_transforms
if not inverse else all_backward_transforms)
funcs = [
all_transforms[group_name][view]
for view, group_name in zip(views, orders)
]
funcs = funcs if not inverse else funcs[::-1]
def aux(val):
for func in funcs:
val = func(val)
return val
return aux
import torch
import numpy as np
def get_permute_transform(view_src: PermuteType,
view_dst: PermuteType) -> TransformFuncType:
"""Gets transform function from view src to view dst."""
def transform(x: torch.Tensor) -> torch.Tensor:
x_view_0 = view_transforms.permutation_inverse_transforms[view_src](x)
return view_transforms.permutation_transforms[view_dst](
x_view_0).contiguous()
return transform
def permute_inverse(xs: Sequence[torch.Tensor],
views: Sequence[PermuteType]) -> Sequence[torch.Tensor]:
"""Transforms data back to origin view."""
return [get_permute_transform(view, 0)(x) for x, view in zip(xs, views)]
def permute_rand(
x: torch.Tensor,
num_samples: int = 2
) -> Tuple[Sequence[torch.Tensor], Sequence[PermuteType]]:
"""Samples different transforms of data."""
num_permutes = len(view_transforms.permutation_transforms)
if num_samples > num_permutes:
raise ValueError('Duplicate samples.')
view_dsts = np.random.permutation(num_permutes)[:num_samples].tolist()
return [get_permute_transform(0, view)(x) for view in view_dsts], view_dsts
================================================
FILE: Finetune/Amos/main.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import logging
import numpy as np
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import torch.nn.parallel
import torch.utils.data.distributed
from optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR
from trainer import run_training
from utils.data_utils import get_loader
import torch.nn as nn
from monai.inferers import sliding_window_inference
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
from monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock
from monai.networks.nets.swin_unetr import SwinTransformer as SwinViT
from monai.utils import ensure_tuple_rep
os.environ['CUDA_VISIBLE_DEVICES'] = "4"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument("--checkpoint", default=None, help="start training from saved checkpoint")
parser.add_argument("--logdir", default="logs", type=str, help="directory to save the tensorboard logs")
parser.add_argument(
"--pretrained_dir", default="./pretrained_models/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="/data/linshan/CTs/Amos2022/", type=str, help="dataset directory")
parser.add_argument("--json_list", default="dataset_CT.json", type=str, help="dataset json file")
parser.add_argument(
"--pretrained_checkpoint",default="VoCo_10k.pt", type=str, help="VoCo_10k pretrained model")
parser.add_argument(
"--pretrained_model_name",
default="model_bestVal.pt",
type=str,
help="pretrained model name",
)
roi = 96
parser.add_argument("--save_checkpoint", default=True, help="save checkpoint during training")
parser.add_argument("--max_epochs", default=1000, type=int, help="max number of training epochs")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=16, type=int, help="number of sliding window batch size")
parser.add_argument("--optim_lr", default=3e-4, type=float, help="optimization learning rate")
parser.add_argument("--optim_name", default="adamw", type=str, help="optimization algorithm")
parser.add_argument("--reg_weight", default=0.005, type=float, help="regularization weight")
parser.add_argument("--momentum", default=0.99, type=float, help="momentum")
parser.add_argument("--noamp", default=False, help="do NOT use amp for training")
parser.add_argument("--val_every", default=50, type=int, help="validation frequency")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--world_size", default=1, type=int, help="number of nodes for distributed training")
parser.add_argument("--rank", default=0, type=int, help="node rank for distributed training")
parser.add_argument("--dist-url", default="tcp://127.0.0.1:23456", type=str, help="distributed url")
parser.add_argument("--dist-backend", default="nccl", type=str, help="distributed backend")
parser.add_argument("--norm_name", default="instance", type=str, help="normalization name")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=16, type=int, help="number of output channels")
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--lrschedule", default="warmup_cosine", type=str, help="type of learning rate scheduler")
parser.add_argument("--warmup_epochs", default=50, type=int, help="number of warmup epochs")
parser.add_argument("--resume_ckpt", action="store_true", help="resume training from pretrained checkpoint")
parser.add_argument("--smooth_dr", default=1e-6, type=float, help="constant added to dice denominator to avoid nan")
parser.add_argument("--smooth_nr", default=0.0, type=float, help="constant added to dice numerator to avoid zero")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
parser.add_argument("--use_ssl_pretrained", default=True, help="use self-supervised pretrained weights")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--squared_dice", action="store_true", help="use squared Dice")
def main():
args = parser.parse_args()
args.amp = not args.noamp
if args.distributed:
args.ngpus_per_node = torch.cuda.device_count()
print("Found total gpus", args.ngpus_per_node)
args.world_size = args.ngpus_per_node * args.world_size
mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))
else:
main_worker(gpu=0, args=args)
def main_worker(gpu, args):
if args.distributed:
torch.multiprocessing.set_start_method("fork", force=True)
np.set_printoptions(formatter={"float": "{: 0.3f}".format}, suppress=True)
args.gpu = gpu
if args.distributed:
args.rank = args.rank * args.ngpus_per_node + gpu
dist.init_process_group(
backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank
)
torch.cuda.set_device(0)
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
args.test_mode = False
loader = get_loader(args)
print(args.rank, " gpu", args.gpu)
if args.rank == 0:
print("Batch size is:", args.batch_size, "epochs", args.max_epochs)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
if args.rank == 0:
os.makedirs(args.logdir, exist_ok=True)
logger = init_log('global', logging.INFO)
logger.propagate = 0
pretrained_dir = args.pretrained_dir
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=args.dropout_path_rate,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
if args.resume_ckpt:
model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))["state_dict"]
model.load_state_dict(model_dict)
print("Use pretrained weights")
if args.use_ssl_pretrained:
try:
# model_VoCoEMA.pt
# model_dict = torch.load("./pretrained_models/supervised_suprem_swinunetr_2100.pth", map_location=torch.device('cpu'))
# model_dict = torch.load("./pretrained_models/model_VoCoEMA.pt", map_location=torch.device('cpu'))
model_dict = torch.load(args.pretrained_checkpoint,
map_location=torch.device('cpu'))
state_dict = model_dict
# state_dict = model_dict['net']
# fix potential differences in state dict keys from pre-training to
# fine-tuning
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
# We now load model weights, setting param `strict` to False, i.e.:
# this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves
# the decoder weights untouched (CNN UNet decoder).
model.load_state_dict(state_dict, strict=False)
print("Using pretrained voco ema self-supervised Swin UNETR backbone weights !")
except ValueError:
raise ValueError("Self-supervised pre-trained weights not available for" + str(args.model_name))
if args.squared_dice:
dice_loss = DiceCELoss(
to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr
)
else:
dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)
post_label = AsDiscrete(to_onehot=args.out_channels)
post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)
dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("Total parameters count", pytorch_total_params)
best_acc = 0
start_epoch = 0
if args.checkpoint is not None:
checkpoint = torch.load(args.checkpoint, map_location="cpu")
from collections import OrderedDict
new_state_dict = OrderedDict()
for k, v in checkpoint["state_dict"].items():
new_state_dict[k.replace("backbone.", "")] = v
model.load_state_dict(new_state_dict, strict=False)
if "epoch" in checkpoint:
start_epoch = checkpoint["epoch"]
if "best_acc" in checkpoint:
best_acc = checkpoint["best_acc"]
print("=> loaded checkpoint '{}' (epoch {}) (bestacc {})".format(args.checkpoint, start_epoch, best_acc))
model.cuda(args.gpu)
if args.distributed:
torch.cuda.set_device(args.gpu)
if args.norm_name == "batch":
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
model.cuda(args.gpu)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)
if args.optim_name == "adam":
optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
elif args.optim_name == "adamw":
optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
# optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)
elif args.optim_name == "sgd":
optimizer = torch.optim.SGD(
model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight
)
else:
raise ValueError("Unsupported Optimization Procedure: " + str(args.optim_name))
if args.lrschedule == "warmup_cosine":
scheduler = LinearWarmupCosineAnnealingLR(
optimizer, warmup_epochs=args.warmup_epochs, max_epochs=args.max_epochs
)
elif args.lrschedule == "cosine_anneal":
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)
if args.checkpoint is not None:
scheduler.step(epoch=start_epoch)
else:
scheduler = None
accuracy = run_training(
model=model,
train_loader=loader[0],
val_loader=loader[1],
optimizer=optimizer,
loss_func=dice_loss,
acc_func=dice_acc,
args=args,
model_inferer=model_inferer,
scheduler=scheduler,
start_epoch=start_epoch,
post_label=post_label,
post_pred=post_pred,
)
return accuracy
logs = set()
def init_log(name, level=logging.INFO):
if (name, level) in logs:
return
logs.add((name, level))
logger = logging.getLogger(name)
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
if "SLURM_PROCID" in os.environ:
rank = int(os.environ["SLURM_PROCID"])
logger.addFilter(lambda record: rank == 0)
else:
rank = 0
format_str = "[%(asctime)s][%(levelname)8s] %(message)s"
formatter = logging.Formatter(format_str)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
if __name__ == "__main__":
main()
================================================
FILE: Finetune/Amos/optimizers/__init__.py
================================================
================================================
FILE: Finetune/Amos/optimizers/lr_scheduler.py
================================================
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import warnings
from typing import List
from torch import nn as nn
from torch.optim import Adam, Optimizer
from torch.optim.lr_scheduler import LambdaLR, _LRScheduler
__all__ = ["LinearLR", "ExponentialLR"]
class _LRSchedulerMONAI(_LRScheduler):
"""Base class for increasing the learning rate between two boundaries over a number
of iterations"""
def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:
"""
Args:
optimizer: wrapped optimizer.
end_lr: the final learning rate.
num_iter: the number of iterations over which the test occurs.
last_epoch: the index of last epoch.
Returns:
None
"""
self.end_lr = end_lr
self.num_iter = num_iter
super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)
class LinearLR(_LRSchedulerMONAI):
"""Linearly increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]
class ExponentialLR(_LRSchedulerMONAI):
"""Exponentially increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]
class WarmupCosineSchedule(LambdaLR):
"""Linear warmup and then cosine decay.
Based on https://huggingface.co/ implementation.
"""
def __init__(
self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1
) -> None:
"""
Args:
optimizer: wrapped optimizer.
warmup_steps: number of warmup iterations.
t_total: total number of training iterations.
cycles: cosine cycles parameter.
last_epoch: the index of last epoch.
Returns:
None
"""
self.warmup_steps = warmup_steps
self.t_total = t_total
self.cycles = cycles
super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
class LinearWarmupCosineAnnealingLR(_LRScheduler):
def __init__(
self,
optimizer: Optimizer,
warmup_epochs: int,
max_epochs: int,
warmup_start_lr: float = 0.0,
eta_min: float = 0.0,
last_epoch: int = -1,
) -> None:
"""
Args:
optimizer (Optimizer): Wrapped optimizer.
warmup_epochs (int): Maximum number of iterations for linear warmup
max_epochs (int): Maximum number of iterations
warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.
eta_min (float): Minimum learning rate. Default: 0.
last_epoch (int): The index of last epoch. Default: -1.
"""
self.warmup_epochs = warmup_epochs
self.max_epochs = max_epochs
self.warmup_start_lr = warmup_start_lr
self.eta_min = eta_min
super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)
def get_lr(self) -> List[float]:
"""
Compute learning rate using chainable form of the scheduler
"""
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
if self.last_epoch == 0:
return [self.warmup_start_lr] * len(self.base_lrs)
elif self.last_epoch < self.warmup_epochs:
return [
group["lr"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
elif self.last_epoch == self.warmup_epochs:
return self.base_lrs
elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:
return [
group["lr"]
+ (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
return [
(1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
/ (
1
+ math.cos(
math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)
)
)
* (group["lr"] - self.eta_min)
+ self.eta_min
for group in self.optimizer.param_groups
]
def _get_closed_form_lr(self) -> List[float]:
"""
Called when epoch is passed as a param to the `step` function of the scheduler.
"""
if self.last_epoch < self.warmup_epochs:
return [
self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr in self.base_lrs
]
return [
self.eta_min
+ 0.5
* (base_lr - self.eta_min)
* (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
for base_lr in self.base_lrs
]
================================================
FILE: Finetune/Amos/pre_cache.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
import torch.nn.functional as F
from torch.cuda.amp import GradScaler, autocast
from utils.data_test import get_loader
from utils.utils import dice, resample_3d
from utils.utils import AverageMeter, distributed_all_gather
from monai.inferers import sliding_window_inference
from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import *
from monai.utils.enums import MetricReduction
from monai.handlers import StatsHandler, from_engine
from monai import data, transforms
from monai.data import *
# import resource
#
# rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
# resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
# print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
# os.environ['CUDA_VISIBLE_DEVICES'] = "2"
# os.environ['MASTER_ADDR'] = 'localhost'
# os.environ['MASTER_PORT'] = '28890'
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument(
"--pretrained_dir", default="./runs/logs_0.9129/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="D:\data/amos22", type=str, help="dataset directory")
parser.add_argument("--exp_name", default="logs_0.9129", type=str, help="experiment name")
parser.add_argument("--json_list", default="dataset_CT.json", type=str, help="dataset json file")
parser.add_argument(
"--pretrained_model_name",
default="model_best.pt",
type=str,
help="pretrained model name",
)
roi = 96
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=4, type=int, help="number of sliding window batch size")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=16, type=int, help="number of output channels")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
def main():
args = parser.parse_args()
args.test_mode = True
val_loader, test_transforms = get_loader(args)
with torch.no_grad():
for idx, batch_data in enumerate(val_loader):
print(idx)
# print(batch_data.keys())
# img_name = batch_data["image_meta_dict"]["filename_or_obj"][0].split("/")[-1]
# raw_data = np.load('./raw_data.npy', allow_pickle=True)
# raw_data = raw_data.item()
#
# shapes, affines = raw_data['shape'], raw_data['affine']
# shape, affine = shapes[img_name], affines[img_name]
# h, w, d = shape
# target_shape = (h, w, d)
data = batch_data["image"]
if __name__ == "__main__":
main()
================================================
FILE: Finetune/Amos/test.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
import torch.nn.functional as F
from torch.cuda.amp import GradScaler, autocast
from utils.data_test import get_loader
import SimpleITK as sitk
from monai.inferers import sliding_window_inference
from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import *
from monai.utils.enums import MetricReduction
from monai.handlers import StatsHandler, from_engine
import matplotlib.pyplot as plt
from utils.utils import *
from PIL import Image
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
from monai import data, transforms
from monai.data import *
os.environ['CUDA_VISIBLE_DEVICES'] = "5"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument(
"--pretrained_dir", default="./runs/logs/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="/data/linshan/CTs/Amos2022/", type=str, help="dataset directory")
parser.add_argument("--exp_name", default="test", type=str, help="experiment name")
parser.add_argument("--json_list", default="dataset_CT.json", type=str, help="dataset json file")
parser.add_argument(
"--pretrained_model_name",
default="model.pt",
type=str,
help="pretrained model name",
)
roi = 96
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=2, type=int, help="number of sliding window batch size")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=16, type=int, help="number of output channels")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
def main():
args = parser.parse_args()
args.test_mode = True
output_directory = "./pred/" + args.exp_name
if not os.path.exists(output_directory):
os.makedirs(output_directory)
val_loader, test_transforms = get_loader(args)
pretrained_dir = args.pretrained_dir
model_name = args.pretrained_model_name
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
pretrained_pth = os.path.join(pretrained_dir, model_name)
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=0.0,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
model_dict = torch.load(pretrained_pth)["state_dict"]
model.load_state_dict(model_dict, strict=True)
model.eval()
model.to(device)
# enable cuDNN benchmark
torch.backends.cudnn.benchmark = True
post_transforms = Compose([EnsureTyped(keys=["pred"]),
Invertd(keys=["pred"],
transform=test_transforms,
orig_keys="image",
meta_keys="pred_meta_dict",
orig_meta_keys="image_meta_dict",
meta_key_postfix="meta_dict",
nearest_interp=False,
to_tensor=True),
# Invertd(keys=["image"],
# transform=test_transforms,
# orig_keys="image",
# meta_keys="pred_meta_dict",
# orig_meta_keys="image_meta_dict",
# meta_key_postfix="meta_dict",
# nearest_interp=False,
# to_tensor=True),
AsDiscreted(keys="pred", argmax=True, to_onehot=None),
SaveImaged(keys="pred", meta_keys="pred_meta_dict", output_dir=output_directory,
separate_folder=False, folder_layout=None,
resample=False),
])
cmap = color_map()
num = 0
with torch.no_grad():
for idx, batch_data in enumerate(val_loader):
torch.cuda.empty_cache()
img_name = batch_data["image_meta_dict"]["filename_or_obj"][0].split("/")[-1]
print('img_name:', img_name, num)
num += 1
if isinstance(batch_data, list):
data = batch_data
else:
data = batch_data["image"].cuda()
with autocast(enabled=True):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
batch_data['pred'] = logits
# ori = torch.argmax(logits, 1).cpu().numpy().astype(np.uint8)[0]
# ori = Image.fromarray(ori[:, :, 50].astype(np.uint8), mode='P')
# ori.putpalette(cmap)
batch_data = [post_transforms(i) for i in
decollate_batch(batch_data)] # apply post-processing to output tensors
# test_img, val_outputs = from_engine(["image", "pred"])(batch_data)
# test_img = test_img[0][0].data.cpu().numpy()
# print(test_img.shape)
# c = val_outputs[0].shape[-1]
# val_outputs = val_outputs[0].argmax(0).cpu().numpy().astype(np.uint8)
# # # vis
# print(np.unique(val_outputs[:, :, c//3].astype(np.uint8)))
# val = Image.fromarray(val_outputs[:, :, c//3].astype(np.uint8), mode='P')
# val.putpalette(cmap)
# # # show
# plt.figure("check", (18, 6))
# plt.subplot(1, 2, 1)
# plt.imshow(test_img[:, :, c//3], cmap="gray")
# # plt.imshow(ori)
#
# plt.subplot(1, 2, 2)
# plt.imshow(val)
# plt.show()
# # # save predict
# seg = sitk.GetImageFromArray(val_outputs)
# seg.SetSpacing(img_itk.GetSpacing())
# seg.SetDirection(img_itk.GetDirection())
# sitk.WriteImage(seg, os.path.join(output_directory, img_name[:-12] + '.nii.gz'))
if __name__ == "__main__":
main()
================================================
FILE: Finetune/Amos/train.sh
================================================
now=$(date +"%Y%m%d_%H%M%S")
logdir=runs/logs
mkdir -p $logdir
torchrun --master_port=21198 main.py \
--logdir $logdir | tee $logdir/$now.txt
================================================
FILE: Finetune/Amos/trainer.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import time
import numpy as np
import torch
import torch.nn.parallel
import torch.utils.data.distributed
from tensorboardX import SummaryWriter
from torch.cuda.amp import GradScaler, autocast
from utils.utils import AverageMeter, distributed_all_gather
from monai.data import decollate_batch
def train_epoch(model, loader, optimizer, scaler, epoch, loss_func, args):
model.train()
start_time = time.time()
run_loss = AverageMeter()
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
for param in model.parameters():
param.grad = None
with autocast(enabled=args.amp):
logits = model(data)
loss = loss_func(logits, target)
#
if args.amp:
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
else:
loss.backward()
optimizer.step()
if args.distributed:
loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)
run_loss.update(
np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size
)
else:
run_loss.update(loss.item(), n=args.batch_size)
lr = optimizer.param_groups[0]["lr"]
if args.rank == 0:
print(
"Epoch {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"loss: {:.4f}".format(run_loss.avg),
"lr: {:.8f}".format(lr),
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
for param in model.parameters():
param.grad = None
return run_loss.avg
def val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):
model.eval()
run_acc = AverageMeter()
start_time = time.time()
with torch.no_grad():
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
with autocast(enabled=args.amp):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
if not logits.is_cuda:
target = target.cpu()
val_labels_list = decollate_batch(target)
val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]
val_outputs_list = decollate_batch(logits)
val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]
acc_func.reset()
acc_func(y_pred=val_output_convert, y=val_labels_convert)
acc, not_nans = acc_func.aggregate()
acc = acc.cuda(args.rank)
if args.distributed:
acc_list, not_nans_list = distributed_all_gather(
[acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length
)
for al, nl in zip(acc_list, not_nans_list):
run_acc.update(al, n=nl)
else:
run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())
if args.rank == 0:
avg_acc = np.mean(run_acc.avg)
print(
"Val {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"acc",
avg_acc,
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
return run_acc.avg
def save_checkpoint(model, epoch, args, filename="model.pt", best_acc=0, optimizer=None, scheduler=None):
state_dict = model.state_dict() if not args.distributed else model.module.state_dict()
save_dict = {"epoch": epoch, "best_acc": best_acc, "state_dict": state_dict}
if optimizer is not None:
save_dict["optimizer"] = optimizer.state_dict()
if scheduler is not None:
save_dict["scheduler"] = scheduler.state_dict()
filename = os.path.join(args.logdir, filename)
torch.save(save_dict, filename)
print("Saving checkpoint", filename)
def run_training(
model,
train_loader,
val_loader,
optimizer,
loss_func,
acc_func,
args,
model_inferer=None,
scheduler=None,
start_epoch=0,
post_label=None,
post_pred=None,
):
writer = None
if args.logdir is not None and args.rank == 0:
writer = SummaryWriter(log_dir=args.logdir)
if args.rank == 0:
print("Writing Tensorboard logs to ", args.logdir)
scaler = None
if args.amp:
scaler = GradScaler()
val_acc_max = 0.0
for epoch in range(start_epoch, args.max_epochs):
if args.distributed:
train_loader.sampler.set_epoch(epoch)
torch.distributed.barrier()
print(args.rank, time.ctime(), "Epoch:", epoch)
epoch_time = time.time()
train_loss = train_epoch(
model, train_loader, optimizer, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args
)
if args.rank == 0:
print(
"Final training {}/{}".format(epoch, args.max_epochs - 1),
"loss: {:.4f}".format(train_loss),
"time {:.2f}s".format(time.time() - epoch_time),
)
if args.rank == 0 and writer is not None:
writer.add_scalar("train_loss", train_loss, epoch)
b_new_best = False
if (epoch + 1) % args.val_every == 0:
if args.distributed:
torch.distributed.barrier()
epoch_time = time.time()
val_avg_acc = val_epoch(
model,
val_loader,
epoch=epoch,
acc_func=acc_func,
model_inferer=model_inferer,
args=args,
post_label=post_label,
post_pred=post_pred,
)
val_avg_acc = np.mean(val_avg_acc)
if args.rank == 0:
print(
"Final validation {}/{}".format(epoch, args.max_epochs - 1),
"acc",
val_avg_acc,
"time {:.2f}s".format(time.time() - epoch_time),
)
if writer is not None:
writer.add_scalar("val_acc", val_avg_acc, epoch)
if val_avg_acc > val_acc_max:
print("new best ({:.6f} --> {:.6f}). ".format(val_acc_max, val_avg_acc))
val_acc_max = val_avg_acc
b_new_best = True
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(
model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler
)
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename="model_final.pt")
if b_new_best:
print("Copying to model.pt new best model!!!!")
shutil.copyfile(os.path.join(args.logdir, "model_final.pt"), os.path.join(args.logdir, "model.pt"))
if scheduler is not None:
scheduler.step()
print("Training Finished !, Best Accuracy: ", val_acc_max)
return val_acc_max
================================================
FILE: Finetune/Amos/utils/__init__.py
================================================
================================================
FILE: Finetune/Amos/utils/data_test.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import pickle
import numpy as np
import torch
import itertools as it
from monai import data, transforms
from monai.data import *
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank : self.total_size : self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
def get_loader(args):
data_dir = args.data_dir
datalist_json = os.path.join(data_dir, args.json_list)
transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image"]),
transforms.EnsureChannelFirstd(keys=["image"]),
transforms.Orientationd(keys=["image"], axcodes="RAS"),
transforms.Spacingd(
keys=["image"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image"], source_key="image"),
]
)
datalist = load_decathlon_datalist(datalist_json, True, "test", base_dir=data_dir)
print('use persistent')
ds = PersistentDataset(data=datalist,
transform=transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/amos_test')
# /data/linshan/cache/flare22_test
sampler = Sampler(ds) if args.distributed else None
loader = data.DataLoader(
ds,
batch_size=args.batch_size,
shuffle=(sampler is None),
num_workers=args.workers,
sampler=sampler,
pin_memory=True,
)
return loader, transform
================================================
FILE: Finetune/Amos/utils/data_utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import pickle
import numpy as np
import torch
import itertools as it
from monai import data, transforms
from monai.data import *
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank : self.total_size : self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
def get_loader(args):
data_dir = args.data_dir
datalist_json = os.path.join(data_dir, args.json_list)
train_transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
transforms.RandCropByPosNegLabeld(
keys=["image", "label"],
label_key="label",
spatial_size=(args.roi_x, args.roi_y, args.roi_z),
pos=9,
neg=1,
num_samples=args.sw_batch_size,
image_key="image",
image_threshold=0,
),
# transforms.RandCropByLabelClassesd(
# keys=["image", "label"],
# image_key="image",
# label_key="label",
# spatial_size=(args.roi_x, args.roi_y, args.roi_z),
# num_classes=args.out_channels,
# ratios=[0, *it.repeat(1, args.out_channels-1)],
# num_samples=args.sw_batch_size,
# image_threshold=0,
# warn=False,
# ),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=0),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=1),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=2),
transforms.RandRotate90d(keys=["image", "label"], prob=args.RandRotate90d_prob, max_k=3),
#transforms.RandShiftIntensityd(keys="image", offsets=0.1, prob=args.RandShiftIntensityd_prob),
transforms.ToTensord(keys=["image", "label"]),
]
)
val_transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
transforms.ToTensord(keys=["image", "label"]),
]
)
if args.test_mode:
test_files = load_decathlon_datalist(datalist_json, True, "validation", base_dir=data_dir)
test_ds = PersistentDataset(data=test_files,
transform=val_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/flare22')
test_sampler = Sampler(test_ds, shuffle=False) if args.distributed else None
test_loader = data.DataLoader(
test_ds,
batch_size=1,
shuffle=False,
num_workers=args.workers,
sampler=test_sampler,
pin_memory=True,
persistent_workers=True,
)
loader = test_loader
else:
datalist = load_decathlon_datalist(datalist_json, True, "training", base_dir=data_dir)
if args.use_normal_dataset:
print('use persistent')
train_ds = PersistentDataset(data=datalist,
transform=train_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/amos')
# train_ds = data.Dataset(data=datalist, transform=train_transform)
else:
train_ds = data.CacheDataset(
data=datalist, transform=train_transform, cache_num=24, cache_rate=1.0, num_workers=args.workers
)
train_sampler = Sampler(train_ds) if args.distributed else None
train_loader = data.DataLoader(
train_ds,
batch_size=args.batch_size,
shuffle=(train_sampler is None),
num_workers=args.workers,
sampler=train_sampler,
pin_memory=True,
)
val_files = load_decathlon_datalist(datalist_json, True, "validation", base_dir=data_dir)
# val_ds = data.Dataset(data=val_files, transform=val_transform)
val_ds = PersistentDataset(data=val_files,
transform=val_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/amos')
val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None
val_loader = data.DataLoader(
val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=False
)
loader = [train_loader, val_loader]
return loader
================================================
FILE: Finetune/Amos/utils/utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import scipy.ndimage as ndimage
import torch
import os
def resample_3d(img, target_size):
imx, imy, imz = img.shape
tx, ty, tz = target_size
zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))
img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)
return img_resampled
def dice(x, y):
intersect = np.sum(np.sum(np.sum(x * y)))
y_sum = np.sum(np.sum(np.sum(y)))
if y_sum == 0:
return 0.0
x_sum = np.sum(np.sum(np.sum(x)))
return 2 * intersect / (x_sum + y_sum)
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)
def distributed_all_gather(
tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None
):
if world_size is None:
world_size = torch.distributed.get_world_size()
if valid_batch_size is not None:
valid_batch_size = min(valid_batch_size, world_size)
elif is_valid is not None:
is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)
if not no_barrier:
torch.distributed.barrier()
tensor_list_out = []
with torch.no_grad():
if is_valid is not None:
is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]
torch.distributed.all_gather(is_valid_list, is_valid)
is_valid = [x.item() for x in is_valid_list]
for tensor in tensor_list:
gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]
torch.distributed.all_gather(gather_list, tensor)
if valid_batch_size is not None:
gather_list = gather_list[:valid_batch_size]
elif is_valid is not None:
gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]
if out_numpy:
gather_list = [t.cpu().numpy() for t in gather_list]
tensor_list_out.append(gather_list)
return tensor_list_out
def color_map(dataset='pascal'):
cmap = np.zeros((256, 3), dtype='uint8')
if dataset == 'pascal' or dataset == 'coco':
def bitget(byteval, idx):
return (byteval & (1 << idx)) != 0
for i in range(256):
r = g = b = 0
c = i
for j in range(8):
r = r | (bitget(c, 0) << 7-j)
g = g | (bitget(c, 1) << 7-j)
b = b | (bitget(c, 2) << 7-j)
c = c >> 3
cmap[i] = np.array([r, g, b])
elif dataset == 'cityscapes':
cmap[0] = np.array([128, 64, 128])
cmap[1] = np.array([244, 35, 232])
cmap[2] = np.array([70, 70, 70])
cmap[3] = np.array([102, 102, 156])
cmap[4] = np.array([190, 153, 153])
cmap[5] = np.array([153, 153, 153])
cmap[6] = np.array([250, 170, 30])
cmap[7] = np.array([220, 220, 0])
cmap[8] = np.array([107, 142, 35])
cmap[9] = np.array([152, 251, 152])
cmap[10] = np.array([70, 130, 180])
cmap[11] = np.array([220, 20, 60])
cmap[12] = np.array([255, 0, 0])
cmap[13] = np.array([0, 0, 142])
cmap[14] = np.array([0, 0, 70])
cmap[15] = np.array([0, 60, 100])
cmap[16] = np.array([0, 80, 100])
cmap[17] = np.array([0, 0, 230])
cmap[18] = np.array([119, 11, 32])
cmap[19] = np.array([0, 0, 0])
cmap[255] = np.array([0, 0, 0])
return cmap
def check_dir(dir):
if not os.path.exists(dir):
os.makedirs(dir)
================================================
FILE: Finetune/Amos/val.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
import torch.nn.functional as F
from torch.cuda.amp import GradScaler, autocast
from utils.data_utils import get_loader
from utils.utils import dice, resample_3d
from utils.utils import AverageMeter, distributed_all_gather
from monai.inferers import sliding_window_inference
from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
os.environ['CUDA_VISIBLE_DEVICES'] = "0"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument(
"--pretrained_dir", default="./runs/logs_scratch_v2/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="/data/linshan/CTs/BTCV/", type=str, help="dataset directory")
parser.add_argument("--exp_name", default="BTCV_0.8451", type=str, help="experiment name")
parser.add_argument("--json_list", default="dataset_0.json", type=str, help="dataset json file")
parser.add_argument(
"--pretrained_model_name",
default="model_0.8451.pt",
type=str,
help="pretrained model name",
)
roi=96
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=4, type=int, help="number of sliding window batch size")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=14, type=int, help="number of output channels")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
def main():
args = parser.parse_args()
args.test_mode = True
output_directory = "./outputs/" + args.exp_name
if not os.path.exists(output_directory):
os.makedirs(output_directory)
val_loader = get_loader(args)
pretrained_dir = args.pretrained_dir
model_name = args.pretrained_model_name
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
pretrained_pth = os.path.join(pretrained_dir, model_name)
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=0.0,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
model_dict = torch.load(pretrained_pth)["state_dict"]
model.load_state_dict(model_dict, strict=True)
model.eval()
model.to(device)
acc_func = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)
run_acc = AverageMeter()
post_label = AsDiscrete(to_onehot=args.out_channels)
post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)
with torch.no_grad():
all_dice = None
num = np.zeros(13)
dice_list_case = []
for idx, batch_data in enumerate(val_loader):
img_name = batch_data["image_meta_dict"]["filename_or_obj"][0].split("/")[-1]
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
print(data.shape, target.shape)
z = data.shape[-1]
data = F.interpolate(data, size=(263, 218, z), mode='trilinear')
target = F.interpolate(target, size=(263, 218, z), mode='nearest')
print(data.shape, target.shape)
with autocast(enabled=True):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
if not logits.is_cuda:
target = target.cpu()
val_labels_list = decollate_batch(target)
val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]
val_outputs_list = decollate_batch(logits)
val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]
acc_func.reset()
acc_func(y_pred=val_output_convert, y=val_labels_convert)
acc, not_nans = acc_func.aggregate()
run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())
print(np.mean(run_acc.avg))
# # save predict
# print(logits.shape)
# val_outputs = torch.argmax(logits, 1).cpu().numpy()
# np.save(os.path.join(output_directory, 'pre'+ img_name[3:-7]+'.npy'), val_outputs.astype(np.uint8)[0])
# # save label
# val_labels = target.cpu().numpy()
# np.save(os.path.join(output_directory, 'label' + img_name[3:-7] + '.npy'), val_labels.astype(np.uint8)[0][0])
#
# # save input
# img = data.cpu().numpy()
# img = img * 255
# print(np.max(img))
# np.save(os.path.join(output_directory, 'img' + img_name[3:-7] + '.npy'), img.astype(np.uint8)[0][0])
if __name__ == "__main__":
main()
# outputs = torch.argmax(logits, 1).cpu().numpy()
# outputs = outputs.astype(np.uint8)[0]
# val_labels = target.cpu().numpy()[0, 0, :, :, :]
#
# len_class = len(list(np.unique(val_labels))) - 1
# dice_list_sub = []
# for i in range(1, 14):
# # judge this class exist or not, ignore background
# num[i - 1] += (np.sum(val_labels == i) > 0).astype(np.uint8)
# organ_Dice = dice(outputs == i, val_labels == i)
# dice_list_sub.append(organ_Dice)
#
# mean_dice = np.sum(dice_list_sub) / len_class
# print("Mean Organ Dice: {}".format(mean_dice))
#
# # acc of each organ
# print("Organ Dice:", dice_list_sub)
#
# if all_dice is None:
# all_dice = (np.asarray(dice_list_sub)).copy()
# else:
# all_dice = all_dice + np.asarray(dice_list_sub)
# print("Organ Dice accumulate:", all_dice*100 / num)
#
# dice_list_case.append(mean_dice)
# print("Overall Mean Dice: {}".format(100*np.mean(dice_list_case)))
================================================
FILE: Finetune/BTCV/dataset/__init__.py
================================================
================================================
FILE: Finetune/BTCV/dataset/dataset_0.json
================================================
{
"description": "btcv yucheng",
"labels": {
"0": "background",
"1": "spleen",
"2": "rkid",
"3": "lkid",
"4": "gall",
"5": "eso",
"6": "liver",
"7": "sto",
"8": "aorta",
"9": "IVC",
"10": "veins",
"11": "pancreas",
"12": "rad",
"13": "lad"
},
"licence": "yt",
"": {
"0": "CT"
},
"name": "btcv",
"numTest": 20,
"numTraining": 80,
"reference": "Vanderbilt University",
"release": "1.0 06/08/2015",
"tensorImageSize": "3D",
"test": [
"imagesTs/img0061.nii.gz",
"imagesTs/img0062.nii.gz",
"imagesTs/img0063.nii.gz",
"imagesTs/img0064.nii.gz",
"imagesTs/img0065.nii.gz",
"imagesTs/img0066.nii.gz",
"imagesTs/img0067.nii.gz",
"imagesTs/img0068.nii.gz",
"imagesTs/img0069.nii.gz",
"imagesTs/img0070.nii.gz",
"imagesTs/img0071.nii.gz",
"imagesTs/img0072.nii.gz",
"imagesTs/img0073.nii.gz",
"imagesTs/img0074.nii.gz",
"imagesTs/img0075.nii.gz",
"imagesTs/img0076.nii.gz",
"imagesTs/img0077.nii.gz",
"imagesTs/img0078.nii.gz",
"imagesTs/img0079.nii.gz",
"imagesTs/img0080.nii.gz"
],
"training": [
{
"image": "imagesTr/img0001.nii.gz",
"label": "labelsTr/label0001.nii.gz"
},
{
"image": "imagesTr/img0002.nii.gz",
"label": "labelsTr/label0002.nii.gz"
},
{
"image": "imagesTr/img0003.nii.gz",
"label": "labelsTr/label0003.nii.gz"
},
{
"image": "imagesTr/img0004.nii.gz",
"label": "labelsTr/label0004.nii.gz"
},
{
"image": "imagesTr/img0005.nii.gz",
"label": "labelsTr/label0005.nii.gz"
},
{
"image": "imagesTr/img0006.nii.gz",
"label": "labelsTr/label0006.nii.gz"
},
{
"image": "imagesTr/img0007.nii.gz",
"label": "labelsTr/label0007.nii.gz"
},
{
"image": "imagesTr/img0008.nii.gz",
"label": "labelsTr/label0008.nii.gz"
},
{
"image": "imagesTr/img0009.nii.gz",
"label": "labelsTr/label0009.nii.gz"
},
{
"image": "imagesTr/img0010.nii.gz",
"label": "labelsTr/label0010.nii.gz"
},
{
"image": "imagesTr/img0021.nii.gz",
"label": "labelsTr/label0021.nii.gz"
},
{
"image": "imagesTr/img0022.nii.gz",
"label": "labelsTr/label0022.nii.gz"
},
{
"image": "imagesTr/img0023.nii.gz",
"label": "labelsTr/label0023.nii.gz"
},
{
"image": "imagesTr/img0024.nii.gz",
"label": "labelsTr/label0024.nii.gz"
},
{
"image": "imagesTr/img0025.nii.gz",
"label": "labelsTr/label0025.nii.gz"
},
{
"image": "imagesTr/img0026.nii.gz",
"label": "labelsTr/label0026.nii.gz"
},
{
"image": "imagesTr/img0027.nii.gz",
"label": "labelsTr/label0027.nii.gz"
},
{
"image": "imagesTr/img0028.nii.gz",
"label": "labelsTr/label0028.nii.gz"
},
{
"image": "imagesTr/img0029.nii.gz",
"label": "labelsTr/label0029.nii.gz"
},
{
"image": "imagesTr/img0030.nii.gz",
"label": "labelsTr/label0030.nii.gz"
},
{
"image": "imagesTr/img0031.nii.gz",
"label": "labelsTr/label0031.nii.gz"
},
{
"image": "imagesTr/img0032.nii.gz",
"label": "labelsTr/label0032.nii.gz"
},
{
"image": "imagesTr/img0033.nii.gz",
"label": "labelsTr/label0033.nii.gz"
},
{
"image": "imagesTr/img0034.nii.gz",
"label": "labelsTr/label0034.nii.gz"
}
],
"validation": [
{
"image": "imagesTr/img0035.nii.gz",
"label": "labelsTr/label0035.nii.gz"
},
{
"image": "imagesTr/img0036.nii.gz",
"label": "labelsTr/label0036.nii.gz"
},
{
"image": "imagesTr/img0037.nii.gz",
"label": "labelsTr/label0037.nii.gz"
},
{
"image": "imagesTr/img0038.nii.gz",
"label": "labelsTr/label0038.nii.gz"
},
{
"image": "imagesTr/img0039.nii.gz",
"label": "labelsTr/label0039.nii.gz"
},
{
"image": "imagesTr/img0040.nii.gz",
"label": "labelsTr/label0040.nii.gz"
}
]
}
================================================
FILE: Finetune/BTCV/main.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import logging
import numpy as np
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import torch.nn.parallel
import torch.utils.data.distributed
from optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR
from trainer import run_training
from utils.data_utils import get_loader
import torch.nn as nn
from monai.inferers import sliding_window_inference
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
from monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock
from monai.networks.nets.swin_unetr import SwinTransformer as SwinViT
from monai.utils import ensure_tuple_rep
os.environ['CUDA_VISIBLE_DEVICES'] = "0"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument("--checkpoint", default=None, help="start training from saved checkpoint")
parser.add_argument("--logdir", default="logs", type=str, help="directory to save the tensorboard logs")
parser.add_argument(
"--pretrained_dir", default="./pretrained_models/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="./data/BTCV/", type=str, help="YOUR btcv dataset directory")
parser.add_argument("--cache_dir", default="./data/cache/BTCV/", type=str, help="YOUR btcv dataset cache directory")
parser.add_argument("--json_list", default="dataset_0.json", type=str, help="dataset json file")
parser.add_argument(
"--pretrained_checkpoint",default="VoCo_10k.pt", type=str, help="VoCo_10k pretrained model")
parser.add_argument(
"--pretrained_model_name",
default="model_bestVal.pt",
type=str,
help="pretrained model name",
)
roi = 96
parser.add_argument("--save_checkpoint", default=True, help="save checkpoint during training")
parser.add_argument("--max_epochs", default=3000, type=int, help="max number of training epochs")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=16, type=int, help="number of sliding window batch size")
parser.add_argument("--optim_lr", default=3e-4, type=float, help="optimization learning rate")
parser.add_argument("--optim_name", default="adamw", type=str, help="optimization algorithm")
parser.add_argument("--reg_weight", default=1e-5, type=float, help="regularization weight")
parser.add_argument("--momentum", default=0.99, type=float, help="momentum")
parser.add_argument("--noamp", default=True, help="do NOT use amp for training")
parser.add_argument("--val_every", default=50, type=int, help="validation frequency")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--world_size", default=1, type=int, help="number of nodes for distributed training")
parser.add_argument("--rank", default=0, type=int, help="node rank for distributed training")
parser.add_argument("--dist-url", default="tcp://127.0.0.1:23456", type=str, help="distributed url")
parser.add_argument("--dist-backend", default="nccl", type=str, help="distributed backend")
parser.add_argument("--norm_name", default="instance", type=str, help="normalization name")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=14, type=int, help="number of output channels")
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.5, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--lrschedule", default="warmup_cosine", type=str, help="type of learning rate scheduler")
parser.add_argument("--warmup_epochs", default=100, type=int, help="number of warmup epochs")
parser.add_argument("--resume_ckpt", action="store_true", help="resume training from pretrained checkpoint")
parser.add_argument("--smooth_dr", default=1e-6, type=float, help="constant added to dice denominator to avoid nan")
parser.add_argument("--smooth_nr", default=0.0, type=float, help="constant added to dice numerator to avoid zero")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
parser.add_argument("--use_ssl_pretrained", default=True, help="use self-supervised pretrained weights")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--squared_dice", action="store_true", help="use squared Dice")
def main():
args = parser.parse_args()
args.amp = not args.noamp
if args.distributed:
args.ngpus_per_node = torch.cuda.device_count()
print("Found total gpus", args.ngpus_per_node)
args.world_size = args.ngpus_per_node * args.world_size
mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))
else:
main_worker(gpu=0, args=args)
def main_worker(gpu, args):
if args.distributed:
torch.multiprocessing.set_start_method("fork", force=True)
np.set_printoptions(formatter={"float": "{: 0.3f}".format}, suppress=True)
args.gpu = gpu
if args.distributed:
args.rank = args.rank * args.ngpus_per_node + gpu
dist.init_process_group(
backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank
)
torch.cuda.set_device(0)
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
args.test_mode = False
loader = get_loader(args)
print(args.rank, " gpu", args.gpu)
if args.rank == 0:
print("Batch size is:", args.batch_size, "epochs", args.max_epochs)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
if args.rank == 0:
os.makedirs(args.logdir, exist_ok=True)
logger = init_log('global', logging.INFO)
logger.propagate = 0
pretrained_dir = args.pretrained_dir
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=args.dropout_path_rate,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
if args.resume_ckpt:
model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))["state_dict"]
model.load_state_dict(model_dict)
print("Use pretrained weights")
if args.use_ssl_pretrained:
try:
# model_VoCoEMA.pt
# model_dict = torch.load("./pretrained_models/supervised_suprem_swinunetr_2100.pth", map_location=torch.device('cpu'))
# model_dict = torch.load("./pretrained_models/model_VoCoEMA.pt", map_location=torch.device('cpu'))
model_dict = torch.load(args.pretrained_checkpoint,
map_location=torch.device('cpu'))
state_dict = model_dict
# fix potential differences in state dict keys from pre-training to
# fine-tuning
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
# We now load model weights, setting param `strict` to False, i.e.:
# this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves
# the decoder weights untouched (CNN UNet decoder).
model.load_state_dict(state_dict, strict=False)
print("Using pretrained voco ema self-supervised Swin UNETR backbone weights !")
except ValueError:
raise ValueError("Self-supervised pre-trained weights not available for" + str(args.model_name))
if args.squared_dice:
dice_loss = DiceCELoss(
to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr
)
else:
dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)
post_label = AsDiscrete(to_onehot=args.out_channels)
post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)
dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("Total parameters count", pytorch_total_params)
best_acc = 0
start_epoch = 0
if args.checkpoint is not None:
checkpoint = torch.load(args.checkpoint, map_location="cpu")
from collections import OrderedDict
new_state_dict = OrderedDict()
for k, v in checkpoint["state_dict"].items():
new_state_dict[k.replace("backbone.", "")] = v
model.load_state_dict(new_state_dict, strict=False)
if "epoch" in checkpoint:
start_epoch = checkpoint["epoch"]
if "best_acc" in checkpoint:
best_acc = checkpoint["best_acc"]
print("=> loaded checkpoint '{}' (epoch {}) (bestacc {})".format(args.checkpoint, start_epoch, best_acc))
model.cuda(args.gpu)
if args.distributed:
torch.cuda.set_device(args.gpu)
if args.norm_name == "batch":
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
model.cuda(args.gpu)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)
if args.optim_name == "adam":
optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
elif args.optim_name == "adamw":
optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
# optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)
elif args.optim_name == "sgd":
optimizer = torch.optim.SGD(
model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight
)
else:
raise ValueError("Unsupported Optimization Procedure: " + str(args.optim_name))
if args.lrschedule == "warmup_cosine":
scheduler = LinearWarmupCosineAnnealingLR(
optimizer, warmup_epochs=args.warmup_epochs, max_epochs=args.max_epochs
)
elif args.lrschedule == "cosine_anneal":
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)
if args.checkpoint is not None:
scheduler.step(epoch=start_epoch)
else:
scheduler = None
accuracy = run_training(
model=model,
train_loader=loader[0],
val_loader=loader[1],
optimizer=optimizer,
loss_func=dice_loss,
acc_func=dice_acc,
args=args,
model_inferer=model_inferer,
scheduler=scheduler,
start_epoch=start_epoch,
post_label=post_label,
post_pred=post_pred,
)
return accuracy
logs = set()
def init_log(name, level=logging.INFO):
if (name, level) in logs:
return
logs.add((name, level))
logger = logging.getLogger(name)
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
if "SLURM_PROCID" in os.environ:
rank = int(os.environ["SLURM_PROCID"])
logger.addFilter(lambda record: rank == 0)
else:
rank = 0
format_str = "[%(asctime)s][%(levelname)8s] %(message)s"
formatter = logging.Formatter(format_str)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
if __name__ == "__main__":
main()
================================================
FILE: Finetune/BTCV/optimizers/__init__.py
================================================
================================================
FILE: Finetune/BTCV/optimizers/lr_scheduler.py
================================================
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import warnings
from typing import List
from torch import nn as nn
from torch.optim import Adam, Optimizer
from torch.optim.lr_scheduler import LambdaLR, _LRScheduler
__all__ = ["LinearLR", "ExponentialLR"]
class _LRSchedulerMONAI(_LRScheduler):
"""Base class for increasing the learning rate between two boundaries over a number
of iterations"""
def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:
"""
Args:
optimizer: wrapped optimizer.
end_lr: the final learning rate.
num_iter: the number of iterations over which the test occurs.
last_epoch: the index of last epoch.
Returns:
None
"""
self.end_lr = end_lr
self.num_iter = num_iter
super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)
class LinearLR(_LRSchedulerMONAI):
"""Linearly increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]
class ExponentialLR(_LRSchedulerMONAI):
"""Exponentially increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]
class WarmupCosineSchedule(LambdaLR):
"""Linear warmup and then cosine decay.
Based on https://huggingface.co/ implementation.
"""
def __init__(
self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1
) -> None:
"""
Args:
optimizer: wrapped optimizer.
warmup_steps: number of warmup iterations.
t_total: total number of training iterations.
cycles: cosine cycles parameter.
last_epoch: the index of last epoch.
Returns:
None
"""
self.warmup_steps = warmup_steps
self.t_total = t_total
self.cycles = cycles
super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
class LinearWarmupCosineAnnealingLR(_LRScheduler):
def __init__(
self,
optimizer: Optimizer,
warmup_epochs: int,
max_epochs: int,
warmup_start_lr: float = 0.0,
eta_min: float = 0.0,
last_epoch: int = -1,
) -> None:
"""
Args:
optimizer (Optimizer): Wrapped optimizer.
warmup_epochs (int): Maximum number of iterations for linear warmup
max_epochs (int): Maximum number of iterations
warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.
eta_min (float): Minimum learning rate. Default: 0.
last_epoch (int): The index of last epoch. Default: -1.
"""
self.warmup_epochs = warmup_epochs
self.max_epochs = max_epochs
self.warmup_start_lr = warmup_start_lr
self.eta_min = eta_min
super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)
def get_lr(self) -> List[float]:
"""
Compute learning rate using chainable form of the scheduler
"""
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
if self.last_epoch == 0:
return [self.warmup_start_lr] * len(self.base_lrs)
elif self.last_epoch < self.warmup_epochs:
return [
group["lr"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
elif self.last_epoch == self.warmup_epochs:
return self.base_lrs
elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:
return [
group["lr"]
+ (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
return [
(1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
/ (
1
+ math.cos(
math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)
)
)
* (group["lr"] - self.eta_min)
+ self.eta_min
for group in self.optimizer.param_groups
]
def _get_closed_form_lr(self) -> List[float]:
"""
Called when epoch is passed as a param to the `step` function of the scheduler.
"""
if self.last_epoch < self.warmup_epochs:
return [
self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr in self.base_lrs
]
return [
self.eta_min
+ 0.5
* (base_lr - self.eta_min)
* (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
for base_lr in self.base_lrs
]
================================================
FILE: Finetune/BTCV/trainer.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import time
import numpy as np
import torch
import torch.nn.parallel
import torch.utils.data.distributed
from tensorboardX import SummaryWriter
from torch.cuda.amp import GradScaler, autocast
from utils.utils import AverageMeter, distributed_all_gather
from monai.data import decollate_batch
def train_epoch(model, loader, optimizer, scheduler, scaler, epoch, loss_func, args):
model.train()
start_time = time.time()
run_loss = AverageMeter()
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
for param in model.parameters():
param.grad = None
with autocast(enabled=args.amp):
logits = model(data)
loss = loss_func(logits, target)
#
if args.amp:
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
else:
loss.backward()
optimizer.step()
if args.distributed:
loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)
run_loss.update(
np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size
)
else:
run_loss.update(loss.item(), n=args.batch_size)
lr = optimizer.param_groups[0]["lr"]
if scheduler is not None:
scheduler.step()
if args.rank == 0:
print(
"Epoch {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"loss: {:.4f}".format(run_loss.avg),
"lr: {:.8f}".format(lr),
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
for param in model.parameters():
param.grad = None
return run_loss.avg
def val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):
model.eval()
run_acc = AverageMeter()
start_time = time.time()
with torch.no_grad():
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
with autocast(enabled=args.amp):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
if not logits.is_cuda:
target = target.cpu()
val_labels_list = decollate_batch(target)
val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]
val_outputs_list = decollate_batch(logits)
val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]
acc_func.reset()
acc_func(y_pred=val_output_convert, y=val_labels_convert)
acc, not_nans = acc_func.aggregate()
acc = acc.cuda(args.rank)
if args.distributed:
acc_list, not_nans_list = distributed_all_gather(
[acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length
)
for al, nl in zip(acc_list, not_nans_list):
run_acc.update(al, n=nl)
else:
run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())
if args.rank == 0:
avg_acc = np.mean(run_acc.avg)
print(
"Val {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"acc",
avg_acc,
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
torch.cuda.empty_cache()
return run_acc.avg
def save_checkpoint(model, epoch, args, filename="model.pt", best_acc=0, optimizer=None, scheduler=None):
state_dict = model.state_dict() if not args.distributed else model.module.state_dict()
save_dict = {"epoch": epoch, "best_acc": best_acc, "state_dict": state_dict}
if optimizer is not None:
save_dict["optimizer"] = optimizer.state_dict()
if scheduler is not None:
save_dict["scheduler"] = scheduler.state_dict()
filename = os.path.join(args.logdir, filename)
torch.save(save_dict, filename)
print("Saving checkpoint", filename)
def run_training(
model,
train_loader,
val_loader,
optimizer,
loss_func,
acc_func,
args,
model_inferer=None,
scheduler=None,
start_epoch=0,
post_label=None,
post_pred=None,
):
writer = None
if args.logdir is not None and args.rank == 0:
writer = SummaryWriter(log_dir=args.logdir)
if args.rank == 0:
print("Writing Tensorboard logs to ", args.logdir)
scaler = None
if args.amp:
scaler = GradScaler()
val_acc_max = 0.0
for epoch in range(start_epoch, args.max_epochs):
if args.distributed:
train_loader.sampler.set_epoch(epoch)
torch.distributed.barrier()
print(args.rank, time.ctime(), "Epoch:", epoch)
epoch_time = time.time()
train_loss = train_epoch(
model, train_loader, optimizer, scheduler, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args
)
if args.rank == 0:
print(
"Final training {}/{}".format(epoch, args.max_epochs - 1),
"loss: {:.4f}".format(train_loss),
"time {:.2f}s".format(time.time() - epoch_time),
)
if args.rank == 0 and writer is not None:
writer.add_scalar("train_loss", train_loss, epoch)
b_new_best = False
if (epoch + 1) % args.val_every == 0:
if args.distributed:
torch.distributed.barrier()
epoch_time = time.time()
val_avg_acc = val_epoch(
model,
val_loader,
epoch=epoch,
acc_func=acc_func,
model_inferer=model_inferer,
args=args,
post_label=post_label,
post_pred=post_pred,
)
val_avg_acc = np.mean(val_avg_acc)
if args.rank == 0:
print(
"Final validation {}/{}".format(epoch, args.max_epochs - 1),
"acc",
val_avg_acc,
"time {:.2f}s".format(time.time() - epoch_time),
)
if writer is not None:
writer.add_scalar("val_acc", val_avg_acc, epoch)
if val_avg_acc > val_acc_max:
print("new best ({:.6f} --> {:.6f}). ".format(val_acc_max, val_avg_acc))
val_acc_max = val_avg_acc
b_new_best = True
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(
model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler
)
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename="model_final.pt")
if b_new_best:
print("Copying to model.pt new best model!!!!")
shutil.copyfile(os.path.join(args.logdir, "model_final.pt"), os.path.join(args.logdir, "model.pt"))
print("Training Finished !, Best Accuracy: ", val_acc_max)
return val_acc_max
================================================
FILE: Finetune/BTCV/utils/__init__.py
================================================
================================================
FILE: Finetune/BTCV/utils/data_test.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import pickle
import numpy as np
import torch
import itertools as it
from monai import data, transforms
from monai.data import *
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank : self.total_size : self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
def get_loader(args):
data_dir = args.data_dir
datalist_json = os.path.join(data_dir, args.json_list)
transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image"]),
transforms.EnsureChannelFirstd(keys=["image"]),
transforms.Orientationd(keys=["image"], axcodes="RAS"),
transforms.Spacingd(
keys=["image"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image"], source_key="image"),
transforms.ToTensord(keys=["image"]),
]
)
datalist = load_decathlon_datalist(datalist_json, True, "test", base_dir=data_dir)
print('use persistent')
ds = PersistentDataset(data=datalist,
transform=transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/BTCV_test')
sampler = Sampler(ds) if args.distributed else None
loader = data.DataLoader(
ds,
batch_size=args.batch_size,
shuffle=(sampler is None),
num_workers=args.workers,
sampler=sampler,
pin_memory=True,
)
return loader
================================================
FILE: Finetune/BTCV/utils/data_utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import pickle
import numpy as np
import torch
import itertools as it
from monai import data, transforms
from monai.data import *
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank : self.total_size : self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
def get_loader(args):
data_dir = args.data_dir
datalist_json = os.path.join(data_dir, args.json_list)
train_transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
transforms.RandCropByPosNegLabeld(
keys=["image", "label"],
label_key="label",
spatial_size=(args.roi_x, args.roi_y, args.roi_z),
pos=3,
neg=1,
num_samples=args.sw_batch_size,
image_key="image",
image_threshold=0,
),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=0),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=1),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=2),
transforms.RandRotate90d(keys=["image", "label"], prob=args.RandRotate90d_prob, max_k=3),
transforms.RandScaleIntensityd(keys="image", factors=0.1, prob=args.RandScaleIntensityd_prob),
transforms.RandShiftIntensityd(keys="image", offsets=0.1, prob=args.RandShiftIntensityd_prob),
# transforms.ToTensord(keys=["image", "label"]),
]
)
val_transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
# transforms.ToTensord(keys=["image", "label"]),
]
)
if args.test_mode:
test_files = load_decathlon_datalist(datalist_json, True, "validation", base_dir=data_dir)
test_ds = PersistentDataset(data=test_files,
transform=val_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir=args.cache_dir)
test_sampler = Sampler(test_ds, shuffle=False) if args.distributed else None
test_loader = data.DataLoader(
test_ds,
batch_size=1,
shuffle=False,
num_workers=args.workers,
sampler=test_sampler,
pin_memory=True,
persistent_workers=True,
)
loader = test_loader
else:
datalist = load_decathlon_datalist(datalist_json, True, "training", base_dir=data_dir)
if args.use_normal_dataset:
print('use persistent')
train_ds = PersistentDataset(data=datalist,
transform=train_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir=args.cache_dir)
# train_ds = data.Dataset(data=datalist, transform=train_transform)
else:
train_ds = data.CacheDataset(
data=datalist, transform=train_transform, cache_num=24, cache_rate=1.0, num_workers=args.workers
)
train_sampler = Sampler(train_ds) if args.distributed else None
train_loader = data.DataLoader(
train_ds,
batch_size=args.batch_size,
shuffle=(train_sampler is None),
num_workers=args.workers,
sampler=train_sampler,
pin_memory=True,
)
val_files = load_decathlon_datalist(datalist_json, True, "validation", base_dir=data_dir)
# val_ds = data.Dataset(data=val_files, transform=val_transform)
val_ds = PersistentDataset(data=val_files,
transform=val_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir=args.cache_dir)
val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None
val_loader = data.DataLoader(
val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=True
)
loader = [train_loader, val_loader]
return loader
================================================
FILE: Finetune/BTCV/utils/utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import scipy.ndimage as ndimage
import torch
def resample_3d(img, target_size):
imx, imy, imz = img.shape
tx, ty, tz = target_size
zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))
img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)
return img_resampled
def dice(x, y):
intersect = np.sum(np.sum(np.sum(x * y)))
y_sum = np.sum(np.sum(np.sum(y)))
if y_sum == 0:
return 0.0
x_sum = np.sum(np.sum(np.sum(x)))
return 2 * intersect / (x_sum + y_sum)
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)
def distributed_all_gather(
tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None
):
if world_size is None:
world_size = torch.distributed.get_world_size()
if valid_batch_size is not None:
valid_batch_size = min(valid_batch_size, world_size)
elif is_valid is not None:
is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)
if not no_barrier:
torch.distributed.barrier()
tensor_list_out = []
with torch.no_grad():
if is_valid is not None:
is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]
torch.distributed.all_gather(is_valid_list, is_valid)
is_valid = [x.item() for x in is_valid_list]
for tensor in tensor_list:
gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]
torch.distributed.all_gather(gather_list, tensor)
if valid_batch_size is not None:
gather_list = gather_list[:valid_batch_size]
elif is_valid is not None:
gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]
if out_numpy:
gather_list = [t.cpu().numpy() for t in gather_list]
tensor_list_out.append(gather_list)
return tensor_list_out
================================================
FILE: Finetune/BTCV/val.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
import torch.nn.functional as F
from torch.cuda.amp import GradScaler, autocast
from utils.data_utils import get_loader
from utils.utils import dice, resample_3d
from utils.utils import AverageMeter, distributed_all_gather
from monai.inferers import sliding_window_inference
from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
os.environ['CUDA_VISIBLE_DEVICES'] = "0"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument(
"--pretrained_dir", default="./runs/logs_scratch_v2/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="/data/linshan/CTs/BTCV/", type=str, help="dataset directory")
parser.add_argument("--exp_name", default="BTCV_0.8451", type=str, help="experiment name")
parser.add_argument("--json_list", default="dataset_0.json", type=str, help="dataset json file")
parser.add_argument(
"--pretrained_model_name",
default="model_0.8451.pt",
type=str,
help="pretrained model name",
)
roi=96
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=4, type=int, help="number of sliding window batch size")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=14, type=int, help="number of output channels")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
def main():
args = parser.parse_args()
args.test_mode = True
output_directory = "./outputs/" + args.exp_name
if not os.path.exists(output_directory):
os.makedirs(output_directory)
val_loader = get_loader(args)
pretrained_dir = args.pretrained_dir
model_name = args.pretrained_model_name
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
pretrained_pth = os.path.join(pretrained_dir, model_name)
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=0.0,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
model_dict = torch.load(pretrained_pth)["state_dict"]
model.load_state_dict(model_dict, strict=True)
model.eval()
model.to(device)
acc_func = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)
run_acc = AverageMeter()
post_label = AsDiscrete(to_onehot=args.out_channels)
post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)
with torch.no_grad():
all_dice = None
num = np.zeros(13)
dice_list_case = []
for idx, batch_data in enumerate(val_loader):
# img_name = batch_data["image_meta_dict"]["filename_or_obj"][0].split("/")[-1]
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
print(data.shape, target.shape)
z = data.shape[-1]
data = F.interpolate(data, size=(263, 218, z), mode='trilinear')
target = F.interpolate(target, size=(263, 218, z), mode='nearest')
print(data.shape, target.shape)
with autocast(enabled=True):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
if not logits.is_cuda:
target = target.cpu()
val_labels_list = decollate_batch(target)
val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]
val_outputs_list = decollate_batch(logits)
val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]
acc_func.reset()
acc_func(y_pred=val_output_convert, y=val_labels_convert)
acc, not_nans = acc_func.aggregate()
run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())
print(np.mean(run_acc.avg))
# # save predict
# print(logits.shape)
# val_outputs = torch.argmax(logits, 1).cpu().numpy()
# np.save(os.path.join(output_directory, 'pre'+ img_name[3:-7]+'.npy'), val_outputs.astype(np.uint8)[0])
# # save label
# val_labels = target.cpu().numpy()
# np.save(os.path.join(output_directory, 'label' + img_name[3:-7] + '.npy'), val_labels.astype(np.uint8)[0][0])
#
# # save input
# img = data.cpu().numpy()
# img = img * 255
# print(np.max(img))
# np.save(os.path.join(output_directory, 'img' + img_name[3:-7] + '.npy'), img.astype(np.uint8)[0][0])
if __name__ == "__main__":
main()
# outputs = torch.argmax(logits, 1).cpu().numpy()
# outputs = outputs.astype(np.uint8)[0]
# val_labels = target.cpu().numpy()[0, 0, :, :, :]
#
# len_class = len(list(np.unique(val_labels))) - 1
# dice_list_sub = []
# for i in range(1, 14):
# # judge this class exist or not, ignore background
# num[i - 1] += (np.sum(val_labels == i) > 0).astype(np.uint8)
# organ_Dice = dice(outputs == i, val_labels == i)
# dice_list_sub.append(organ_Dice)
#
# mean_dice = np.sum(dice_list_sub) / len_class
# print("Mean Organ Dice: {}".format(mean_dice))
#
# # acc of each organ
# print("Organ Dice:", dice_list_sub)
#
# if all_dice is None:
# all_dice = (np.asarray(dice_list_sub)).copy()
# else:
# all_dice = all_dice + np.asarray(dice_list_sub)
# print("Organ Dice accumulate:", all_dice*100 / num)
#
# dice_list_case.append(mean_dice)
# print("Overall Mean Dice: {}".format(100*np.mean(dice_list_case)))
================================================
FILE: Finetune/CC-CCII/csv/CC_CCII_fold0_train.csv
================================================
zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids
CP-20.zip,1,CP,2668,3259,52,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
CP-11.zip,1,CP,1436,3940,45,2,"[3940, 3941]"
NCP-13.zip,2,NCP,364,1880,56,2,"[1879, 1880]"
CP-9.zip,1,CP,1369,3790,67,2,"[3790, 3791]"
NCP-21.zip,2,NCP,65,1263,128,2,"[1263, 1264]"
CP-23.zip,1,CP,661,3023,116,1,[3023]
CP-30.zip,1,CP,3937,5643,66,2,"[5643, 5644]"
CP-25.zip,1,CP,8,3514,36,2,"[3513, 3514]"
NCP-15.zip,2,NCP,421,1996,67,2,"[1995, 1996]"
CP-25.zip,1,CP,738,3100,110,1,[3100]
NCP-11.zip,2,NCP,304,1755,67,2,"[1754, 1755]"
NCP-22.zip,2,NCP,834,2348,226,2,"[2347, 2348]"
Normal-1.zip,0,Normal,1680,840,66,6,"[839, 840, 841, 842, 843, 844]"
CP-13.zip,1,CP,1519,4141,68,2,"[4141, 4142]"
NCP-12.zip,2,NCP,315,1777,107,2,"[1777, 1778]"
Normal-2.zip,0,Normal,1753,1088,66,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
CP-8.zip,1,CP,1341,3722,57,1,[3722]
CP-13.zip,1,CP,1491,4075,48,3,"[4074, 4075, 4076]"
CP-28.zip,1,CP,3785,5729,28,1,[5729]
NCP-6.zip,2,NCP,212,1568,165,2,"[1568, 1569]"
CP-12.zip,1,CP,1477,4035,54,2,"[4035, 4036]"
CP-16.zip,1,CP,1605,4293,23,1,[4293]
NCP-29.zip,2,NCP,926,2468,24,1,[2468]
CP-10.zip,1,CP,1394,3847,62,2,"[3847, 3848]"
NCP-21.zip,2,NCP,580,2318,58,2,"[2317, 2318]"
NCP-19.zip,2,NCP,526,2208,137,2,"[2208, 2209]"
CP-13.zip,1,CP,1494,4085,65,3,"[4083, 4084, 4085]"
Normal-27.zip,0,Normal,3895,5421,71,4,"[5418, 5419, 5420, 5421]"
NCP-8.zip,2,NCP,267,1680,129,2,"[1680, 1681]"
NCP-18.zip,2,NCP,49,1232,61,2,"[1231, 1232]"
CP-21.zip,1,CP,589,2951,300,1,[2951]
CP-25.zip,1,CP,8,3513,42,2,"[3513, 3514]"
CP-27.zip,1,CP,3765,5709,20,1,[5709]
NCP-4.zip,2,NCP,147,1438,173,2,"[1438, 1439]"
Normal-27.zip,0,Normal,3904,5436,82,1,[5436]
NCP-14.zip,2,NCP,384,1921,54,2,"[1920, 1921]"
CP-18.zip,1,CP,1780,3560,69,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-14.zip,1,CP,1522,4148,61,2,"[4148, 4149]"
NCP-8.zip,2,NCP,256,1658,139,2,"[1658, 1659]"
CP-10.zip,1,CP,1406,3874,60,2,"[3874, 3875]"
CP-4.zip,1,CP,1177,3395,210,1,[3395]
Normal-1.zip,0,Normal,1673,804,291,6,"[804, 805, 806, 807, 808, 809]"
NCP-2.zip,2,NCP,122,1385,149,2,"[1385, 1386]"
CP-9.zip,1,CP,1354,3752,46,3,"[3751, 3752, 3753]"
NCP-23.zip,2,NCP,922,2464,240,1,[2464]
CP-20.zip,1,CP,2668,3251,58,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-6.zip,0,Normal,1796,251,96,1,[251]
CP-9.zip,1,CP,1374,3803,50,2,"[3802, 3803]"
NCP-7.zip,2,NCP,237,1620,61,2,"[1619, 1620]"
NCP-13.zip,2,NCP,363,1878,58,2,"[1877, 1878]"
CP-1.zip,1,CP,1084,3130,67,1,[3130]
Normal-14.zip,0,Normal,2082,537,78,1,[537]
CP-18.zip,1,CP,1656,4344,26,1,[4344]
NCP-18.zip,2,NCP,491,2138,149,2,"[2138, 2139]"
CP-22.zip,1,CP,609,2971,76,1,[2971]
Normal-18.zip,0,Normal,2198,653,88,1,[653]
NCP-6.zip,2,NCP,212,1569,69,2,"[1568, 1569]"
CP-21.zip,1,CP,607,2969,178,1,[2969]
NCP-9.zip,2,NCP,269,1685,64,2,"[1684, 1685]"
CP-9.zip,1,CP,1364,3777,56,3,"[3776, 3777, 3778]"
CP-17.zip,1,CP,1622,4310,27,1,[4310]
CP-16.zip,1,CP,1601,4289,19,1,[4289]
CP-10.zip,1,CP,1388,3832,51,2,"[3831, 3832]"
Normal-27.zip,0,Normal,3908,5442,56,1,[5442]
CP-25.zip,1,CP,732,3094,159,1,[3094]
NCP-14.zip,2,NCP,40,1212,149,2,"[1212, 1213]"
NCP-21.zip,2,NCP,65,1264,54,2,"[1263, 1264]"
CP-12.zip,1,CP,1477,4036,54,2,"[4035, 4036]"
Normal-10.zip,0,Normal,1953,408,94,1,[408]
CP-15.zip,1,CP,1577,4265,22,1,[4265]
Normal-14.zip,0,Normal,2055,510,91,1,[510]
Normal-17.zip,0,Normal,2154,609,94,1,[609]
Normal-27.zip,0,Normal,3895,5418,61,4,"[5418, 5419, 5420, 5421]"
Normal-19.zip,0,Normal,2227,682,73,1,[682]
Normal-11.zip,0,Normal,1975,430,101,1,[430]
CP-15.zip,1,CP,1584,4272,20,1,[4272]
Normal-20.zip,0,Normal,2262,717,84,1,[717]
CP-14.zip,1,CP,1543,4200,190,3,"[4200, 4201, 4202]"
Normal-3.zip,0,Normal,753,188,300,1,[188]
CP-12.zip,1,CP,1475,4032,50,2,"[4031, 4032]"
NCP-16.zip,2,NCP,458,2071,55,2,"[2070, 2071]"
NCP-5.zip,2,NCP,180,1504,136,2,"[1504, 1505]"
CP-30.zip,1,CP,3938,5645,94,1,[5645]
CP-9.zip,1,CP,1364,3778,56,3,"[3776, 3777, 3778]"
Normal-23.zip,0,Normal,2632,142,39,1,[142]
Normal-5.zip,0,Normal,810,245,324,1,[245]
NCP-5.zip,2,NCP,174,1493,56,2,"[1492, 1493]"
CP-17.zip,1,CP,1632,4320,23,1,[4320]
NCP-2.zip,2,NCP,112,1366,56,2,"[1365, 1366]"
CP-18.zip,1,CP,1780,3554,67,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-20.zip,1,CP,2668,3252,51,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-6.zip,0,Normal,1820,275,83,1,[275]
Normal-1.zip,0,Normal,1673,809,57,6,"[804, 805, 806, 807, 808, 809]"
Normal-18.zip,0,Normal,2204,659,94,1,[659]
CP-14.zip,1,CP,1531,4169,59,2,"[4169, 4170]"
CP-12.zip,1,CP,1474,4030,62,2,"[4029, 4030]"
Normal-18.zip,0,Normal,2215,670,80,1,[670]
NCP-21.zip,2,NCP,579,2315,150,2,"[2315, 2316]"
NCP-28.zip,2,NCP,854,2374,265,1,[2374]
Normal-25.zip,0,Normal,3838,5350,201,1,[5350]
CP-9.zip,1,CP,1352,3747,61,1,[3747]
Normal-1.zip,0,Normal,1719,994,76,2,"[993, 994]"
NCP-28.zip,2,NCP,852,2372,47,2,"[2371, 2372]"
Normal-19.zip,0,Normal,2225,680,94,1,[680]
Normal-16.zip,0,Normal,2148,603,86,1,[603]
NCP-19.zip,2,NCP,544,2245,147,2,"[2245, 2246]"
CP-29.zip,1,CP,3826,5770,26,1,[5770]
NCP-7.zip,2,NCP,229,1602,156,2,"[1602, 1603]"
Normal-1.zip,0,Normal,1673,807,283,6,"[804, 805, 806, 807, 808, 809]"
Normal-6.zip,0,Normal,1823,278,85,1,[278]
NCP-27.zip,2,NCP,824,2335,259,1,[2335]
CP-18.zip,1,CP,1776,3535,64,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-18.zip,2,NCP,513,2183,68,2,"[2182, 2183]"
CP-30.zip,1,CP,3934,5639,77,3,"[5638, 5639, 5640]"
CP-4.zip,1,CP,1168,3386,203,1,[3386]
NCP-12.zip,2,NCP,323,1794,116,2,"[1794, 1795]"
CP-8.zip,1,CP,1340,3720,64,2,"[3720, 3721]"
CP-5.zip,1,CP,1223,3441,232,1,[3441]
NCP-4.zip,2,NCP,166,1477,58,2,"[1476, 1477]"
NCP-6.zip,2,NCP,219,1583,65,2,"[1582, 1583]"
NCP-1.zip,2,NCP,101,1340,57,2,"[1339, 1340]"
NCP-11.zip,2,NCP,298,1742,145,2,"[1742, 1743]"
Normal-1.zip,0,Normal,1684,874,71,5,"[870, 871, 873, 874, 875]"
CP-14.zip,1,CP,1554,4227,41,2,"[4226, 4227]"
NCP-18.zip,2,NCP,489,2134,139,2,"[2134, 2135]"
Normal-23.zip,0,Normal,2615,125,36,1,[125]
NCP-8.zip,2,NCP,2674,2693,45,1,[2693]
NCP-6.zip,2,NCP,226,1596,142,2,"[1596, 1597]"
NCP-10.zip,2,NCP,274,1695,67,2,"[1694, 1695]"
Normal-10.zip,0,Normal,1944,399,97,1,[399]
CP-6.zip,1,CP,1236,3454,159,1,[3454]
CP-20.zip,1,CP,2668,3257,53,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
CP-23.zip,1,CP,670,3032,78,1,[3032]
NCP-20.zip,2,NCP,548,2253,144,2,"[2253, 2254]"
CP-18.zip,1,CP,1769,3516,23,1,[3516]
Normal-3.zip,0,Normal,754,189,308,1,[189]
NCP-7.zip,2,NCP,239,1623,146,2,"[1623, 1624]"
NCP-14.zip,2,NCP,392,1935,58,2,"[1934, 1935]"
Normal-6.zip,0,Normal,1824,279,86,1,[279]
Normal-2.zip,0,Normal,1753,1087,77,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
NCP-30.zip,2,NCP,997,2554,49,2,"[2553, 2554]"
CP-26.zip,1,CP,3727,5663,42,1,[5663]
CP-11.zip,1,CP,1433,3934,62,2,"[3934, 3935]"
Normal-18.zip,0,Normal,2187,642,92,1,[642]
NCP-2.zip,2,NCP,112,1365,133,2,"[1365, 1366]"
NCP-6.zip,2,NCP,219,1582,156,2,"[1582, 1583]"
Normal-10.zip,0,Normal,1939,394,93,1,[394]
CP-18.zip,1,CP,1775,3532,57,4,"[3530, 3531, 3532, 3533]"
CP-2.zip,1,CP,11,3165,268,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
Normal-12.zip,0,Normal,2012,467,102,1,[467]
CP-21.zip,1,CP,587,2949,151,1,[2949]
Normal-15.zip,0,Normal,2116,571,92,1,[571]
CP-1.zip,1,CP,10,3156,289,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
Normal-27.zip,0,Normal,3895,5419,61,4,"[5418, 5419, 5420, 5421]"
Normal-25.zip,0,Normal,3854,5366,197,1,[5366]
Normal-4.zip,0,Normal,771,206,306,1,[206]
NCP-3.zip,2,NCP,129,1403,132,2,"[1403, 1404]"
Normal-13.zip,0,Normal,2042,497,90,1,[497]
Normal-2.zip,0,Normal,1753,1090,296,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
NCP-17.zip,2,NCP,478,2111,145,2,"[2111, 2112]"
Normal-17.zip,0,Normal,2171,626,92,1,[626]
CP-10.zip,1,CP,1410,3884,51,2,"[3883, 3884]"
CP-3.zip,1,CP,1140,3358,370,1,[3358]
NCP-22.zip,2,NCP,885,2422,52,2,"[2422, 2423]"
NCP-27.zip,2,NCP,1050,2624,428,2,"[2623, 2624]"
NCP-17.zip,2,NCP,478,2112,61,2,"[2111, 2112]"
CP-20.zip,1,CP,2668,3254,47,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-16.zip,2,NCP,433,2019,120,2,"[2019, 2020]"
NCP-19.zip,2,NCP,517,2191,58,2,"[2190, 2191]"
Normal-24.zip,0,Normal,2657,167,27,1,[167]
CP-8.zip,1,CP,1339,3718,59,2,"[3718, 3719]"
NCP-17.zip,2,NCP,482,2119,139,2,"[2119, 2120]"
CP-17.zip,1,CP,1635,4323,27,1,[4323]
Normal-10.zip,0,Normal,1930,385,98,1,[385]
Normal-1.zip,0,Normal,1679,837,70,6,"[833, 834, 835, 836, 837, 838]"
NCP-25.zip,2,NCP,3942,5539,37,1,[5539]
Normal-17.zip,0,Normal,2180,635,95,1,[635]
Normal-1.zip,0,Normal,1680,839,66,6,"[839, 840, 841, 842, 843, 844]"
Normal-1.zip,0,Normal,1705,965,69,2,"[965, 966]"
NCP-5.zip,2,NCP,174,1492,134,2,"[1492, 1493]"
NCP-14.zip,2,NCP,386,1923,62,1,[1923]
CP-22.zip,1,CP,625,2987,100,1,[2987]
CP-20.zip,1,CP,2450,2929,90,2,"[2928, 2929]"
Normal-10.zip,0,Normal,1949,404,92,1,[404]
CP-14.zip,1,CP,1546,4208,58,2,"[4208, 4209]"
NCP-21.zip,2,NCP,63,1260,58,2,"[1259, 1260]"
Normal-23.zip,0,Normal,2624,134,38,1,[134]
NCP-10.zip,2,NCP,272,1690,153,2,"[1690, 1691]"
CP-5.zip,1,CP,1209,3427,313,1,[3427]
NCP-11.zip,2,NCP,293,1731,122,2,"[1731, 1732]"
CP-9.zip,1,CP,1383,3822,71,2,"[3821, 3822]"
Normal-4.zip,0,Normal,793,228,94,1,[228]
NCP-2.zip,2,NCP,1057,2633,570,1,[2633]
Normal-1.zip,0,Normal,1679,835,67,6,"[833, 834, 835, 836, 837, 838]"
CP-4.zip,1,CP,1185,3403,131,1,[3403]
CP-11.zip,1,CP,1446,3965,63,2,"[3965, 3966]"
CP-15.zip,1,CP,1576,4264,23,1,[4264]
CP-12.zip,1,CP,1487,4062,68,3,"[4061, 4062, 4063]"
CP-9.zip,1,CP,1381,3817,66,3,"[3815, 3816, 3817]"
CP-28.zip,1,CP,3767,5711,17,1,[5711]
Normal-23.zip,0,Normal,2610,120,41,1,[120]
CP-10.zip,1,CP,1394,3848,62,2,"[3847, 3848]"
NCP-4.zip,2,NCP,160,1465,61,2,"[1464, 1465]"
CP-14.zip,1,CP,1543,4201,57,3,"[4200, 4201, 4202]"
CP-23.zip,1,CP,652,3014,277,1,[3014]
CP-16.zip,1,CP,1607,4295,17,1,[4295]
Normal-18.zip,0,Normal,2213,668,84,1,[668]
Normal-16.zip,0,Normal,2121,576,87,1,[576]
Normal-23.zip,0,Normal,2627,137,41,1,[137]
NCP-21.zip,2,NCP,582,2322,54,2,"[2321, 2322]"
CP-19.zip,1,CP,2431,2893,361,1,[2893]
Normal-1.zip,0,Normal,1717,989,67,2,"[989, 990]"
CP-10.zip,1,CP,1385,3825,64,2,"[3825, 3826]"
CP-5.zip,1,CP,1198,3416,162,1,[3416]
NCP-21.zip,2,NCP,578,2314,55,2,"[2313, 2314]"
NCP-20.zip,2,NCP,56,1246,68,2,"[1245, 1246]"
NCP-19.zip,2,NCP,532,2222,139,2,"[2222, 2223]"
Normal-21.zip,0,Normal,2283,738,87,1,[738]
Normal-19.zip,0,Normal,2222,677,78,1,[677]
CP-9.zip,1,CP,1361,3770,50,2,"[3770, 3771]"
NCP-15.zip,2,NCP,420,1993,177,2,"[1993, 1994]"
CP-18.zip,1,CP,1776,3538,76,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
Normal-1.zip,0,Normal,1706,968,64,2,"[967, 968]"
CP-20.zip,1,CP,2668,3253,51,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-5.zip,2,NCP,171,1486,143,2,"[1486, 1487]"
Normal-3.zip,0,Normal,750,185,281,1,[185]
CP-18.zip,1,CP,1780,3565,80,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-13.zip,2,NCP,362,1876,63,2,"[1875, 1876]"
CP-6.zip,1,CP,1234,3452,191,1,[3452]
Normal-1.zip,0,Normal,1684,873,133,5,"[870, 871, 873, 874, 875]"
Normal-6.zip,0,Normal,1812,267,99,1,[267]
NCP-17.zip,2,NCP,474,2103,114,2,"[2103, 2104]"
Normal-7.zip,0,Normal,1857,312,80,1,[312]
Normal-12.zip,0,Normal,1992,447,104,1,[447]
CP-18.zip,1,CP,1664,4352,20,1,[4352]
Normal-27.zip,0,Normal,3895,5420,71,4,"[5418, 5419, 5420, 5421]"
NCP-19.zip,2,NCP,517,2190,139,2,"[2190, 2191]"
Normal-23.zip,0,Normal,2625,135,39,1,[135]
Normal-5.zip,0,Normal,811,246,124,1,[246]
CP-4.zip,1,CP,1162,3380,212,1,[3380]
CP-22.zip,1,CP,611,2973,76,1,[2973]
CP-9.zip,1,CP,1381,3815,261,3,"[3815, 3816, 3817]"
CP-9.zip,1,CP,1371,3794,200,3,"[3794, 3795, 3796]"
NCP-16.zip,2,NCP,432,2017,128,2,"[2017, 2018]"
Normal-20.zip,0,Normal,2278,733,90,1,[733]
Normal-19.zip,0,Normal,2240,695,78,1,[695]
CP-28.zip,1,CP,3786,5730,29,1,[5730]
Normal-15.zip,0,Normal,2097,552,89,1,[552]
NCP-18.zip,2,NCP,500,2156,162,2,"[2156, 2157]"
CP-9.zip,1,CP,1374,3802,50,2,"[3802, 3803]"
Normal-23.zip,0,Normal,2606,116,33,1,[116]
CP-26.zip,1,CP,3651,5550,395,1,[5550]
Normal-9.zip,0,Normal,1912,367,92,1,[367]
NCP-25.zip,2,NCP,3953,5466,44,1,[5466]
CP-25.zip,1,CP,724,3086,100,1,[3086]
Normal-21.zip,0,Normal,2292,747,82,1,[747]
CP-7.zip,1,CP,1262,3480,384,1,[3480]
Normal-10.zip,0,Normal,1931,386,80,1,[386]
NCP-20.zip,2,NCP,563,2284,141,2,"[2284, 2285]"
CP-2.zip,1,CP,1123,3341,213,1,[3341]
NCP-17.zip,2,NCP,486,2127,153,2,"[2127, 2128]"
CP-26.zip,1,CP,3733,5673,32,3,"[5673, 5674, 5675]"
CP-3.zip,1,CP,1152,3370,69,1,[3370]
NCP-28.zip,2,NCP,838,2353,89,1,[2353]
Normal-1.zip,0,Normal,1717,990,67,2,"[989, 990]"
NCP-30.zip,2,NCP,997,2553,54,2,"[2553, 2554]"
NCP-17.zip,2,NCP,48,1230,61,2,"[1229, 1230]"
NCP-17.zip,2,NCP,467,2089,138,2,"[2089, 2090]"
NCP-20.zip,2,NCP,564,2286,143,2,"[2286, 2287]"
Normal-7.zip,0,Normal,1854,309,82,1,[309]
Normal-2.zip,0,Normal,1747,1065,60,1,[1065]
NCP-19.zip,2,NCP,535,2228,47,2,"[2227, 2228]"
NCP-26.zip,2,NCP,3974,5508,52,1,[5508]
Normal-7.zip,0,Normal,1829,284,92,1,[284]
Normal-1.zip,0,Normal,1673,808,57,6,"[804, 805, 806, 807, 808, 809]"
NCP-2.zip,2,NCP,1271,2712,56,1,[2712]
CP-30.zip,1,CP,3934,5638,59,3,"[5638, 5639, 5640]"
NCP-26.zip,2,NCP,3979,5486,52,1,[5486]
NCP-20.zip,2,NCP,554,2265,128,2,"[2265, 2266]"
NCP-6.zip,2,NCP,221,1587,53,2,"[1586, 1587]"
NCP-20.zip,2,NCP,558,2273,119,2,"[2273, 2274]"
CP-8.zip,1,CP,1321,3678,58,2,"[3678, 3679]"
NCP-6.zip,2,NCP,226,1597,60,2,"[1596, 1597]"
NCP-21.zip,2,NCP,76,1286,51,2,"[1285, 1286]"
NCP-1.zip,2,NCP,1042,2613,143,2,"[2613, 2614]"
NCP-13.zip,2,NCP,366,1884,67,2,"[1883, 1884]"
NCP-18.zip,2,NCP,490,2136,147,2,"[2136, 2137]"
NCP-28.zip,2,NCP,856,2376,227,2,"[2376, 2377]"
CP-19.zip,1,CP,2445,2920,283,2,"[2920, 2921]"
Normal-1.zip,0,Normal,1673,806,59,6,"[804, 805, 806, 807, 808, 809]"
CP-25.zip,1,CP,9,3151,72,4,"[3148, 3149, 3150, 3151]"
Normal-25.zip,0,Normal,3847,5359,219,1,[5359]
Normal-12.zip,0,Normal,2005,460,77,1,[460]
CP-30.zip,1,CP,3936,5642,59,1,[5642]
NCP-12.zip,2,NCP,326,1800,117,2,"[1800, 1801]"
Normal-13.zip,0,Normal,2045,500,85,1,[500]
CP-15.zip,1,CP,1583,4271,18,1,[4271]
Normal-20.zip,0,Normal,2261,716,83,1,[716]
Normal-20.zip,0,Normal,2276,731,91,1,[731]
CP-18.zip,1,CP,1776,3536,75,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-27.zip,2,NCP,1034,2605,19,1,[2605]
NCP-16.zip,2,NCP,445,2044,139,2,"[2044, 2045]"
CP-12.zip,1,CP,1461,4001,53,2,"[4000, 4001]"
CP-12.zip,1,CP,1485,4056,114,3,"[4056, 4057, 4058]"
NCP-7.zip,2,NCP,231,1606,139,2,"[1606, 1607]"
NCP-13.zip,2,NCP,343,1838,55,2,"[1837, 1838]"
NCP-6.zip,2,NCP,202,1548,161,2,"[1548, 1549]"
Normal-17.zip,0,Normal,2160,615,96,1,[615]
CP-28.zip,1,CP,3780,5724,27,1,[5724]
CP-9.zip,1,CP,1354,3753,46,3,"[3751, 3752, 3753]"
CP-16.zip,1,CP,1598,4286,23,1,[4286]
CP-19.zip,1,CP,2445,2921,119,2,"[2920, 2921]"
CP-9.zip,1,CP,1361,3771,50,2,"[3770, 3771]"
NCP-15.zip,2,NCP,412,1974,54,2,"[1973, 1974]"
Normal-8.zip,0,Normal,1861,316,76,1,[316]
Normal-3.zip,0,Normal,1766,1150,57,3,"[1149, 1150, 1151]"
Normal-17.zip,0,Normal,2182,637,96,1,[637]
Normal-7.zip,0,Normal,1833,288,102,1,[288]
Normal-9.zip,0,Normal,1894,349,99,1,[349]
Normal-22.zip,0,Normal,2319,774,101,1,[774]
Normal-1.zip,0,Normal,1680,844,64,6,"[839, 840, 841, 842, 843, 844]"
CP-24.zip,1,CP,679,3041,94,1,[3041]
CP-30.zip,1,CP,3832,5776,23,1,[5776]
CP-25.zip,1,CP,720,3082,84,1,[3082]
Normal-19.zip,0,Normal,2235,690,89,1,[690]
CP-11.zip,1,CP,1429,3927,52,2,"[3926, 3927]"
Normal-7.zip,0,Normal,1835,290,83,1,[290]
NCP-7.zip,2,NCP,239,1624,61,2,"[1623, 1624]"
Normal-27.zip,0,Normal,3899,5430,76,2,"[5429, 5430]"
CP-4.zip,1,CP,1165,3383,151,1,[3383]
NCP-3.zip,2,NCP,1297,2738,56,1,[2738]
NCP-22.zip,2,NCP,832,2345,25,1,[2345]
NCP-25.zip,2,NCP,3952,5505,46,1,[5505]
NCP-26.zip,2,NCP,3977,5509,56,1,[5509]
CP-16.zip,1,CP,1609,4297,20,1,[4297]
Normal-21.zip,0,Normal,2294,749,103,1,[749]
NCP-25.zip,2,NCP,3967,5507,46,1,[5507]
CP-13.zip,1,CP,1495,4089,48,4,"[4086, 4087, 4088, 4089]"
CP-7.zip,1,CP,1317,3672,58,3,"[3670, 3671, 3672]"
Normal-26.zip,0,Normal,3877,5389,25,1,[5389]
CP-20.zip,1,CP,2766,3297,41,1,[3297]
CP-18.zip,1,CP,1661,4349,32,1,[4349]
NCP-19.zip,2,NCP,535,2227,112,2,"[2227, 2228]"
CP-2.zip,1,CP,1120,3338,159,1,[3338]
NCP-2.zip,2,NCP,118,1377,142,2,"[1377, 1378]"
Normal-7.zip,0,Normal,1843,298,96,1,[298]
NCP-15.zip,2,NCP,400,1950,155,1,[1950]
NCP-25.zip,2,NCP,3704,5531,60,1,[5531]
Normal-15.zip,0,Normal,2095,550,99,1,[550]
Normal-1.zip,0,Normal,1684,870,68,5,"[870, 871, 873, 874, 875]"
NCP-16.zip,2,NCP,44,1222,52,2,"[1221, 1222]"
NCP-11.zip,2,NCP,31,1194,137,2,"[1194, 1195]"
NCP-15.zip,2,NCP,409,1968,64,2,"[1967, 1968]"
NCP-16.zip,2,NCP,451,2057,48,3,"[2056, 2057, 2058]"
Normal-2.zip,0,Normal,1753,1086,77,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
NCP-8.zip,2,NCP,262,1670,139,2,"[1670, 1671]"
Normal-10.zip,0,Normal,1955,410,93,1,[410]
Normal-6.zip,0,Normal,1826,281,104,1,[281]
NCP-28.zip,2,NCP,852,2371,47,2,"[2371, 2372]"
NCP-27.zip,2,NCP,1000,2558,39,1,[2558]
CP-1.zip,1,CP,1072,3115,52,1,[3115]
Normal-13.zip,0,Normal,2052,507,71,1,[507]
CP-7.zip,1,CP,1314,3663,30,2,"[3663, 3664]"
NCP-21.zip,2,NCP,67,1267,70,2,"[1266, 1267]"
NCP-3.zip,2,NCP,132,1409,117,1,[1409]
Normal-18.zip,0,Normal,2205,660,91,1,[660]
Normal-14.zip,0,Normal,2054,509,88,1,[509]
Normal-5.zip,0,Normal,809,244,114,1,[244]
NCP-27.zip,2,NCP,1029,2599,39,1,[2599]
NCP-26.zip,2,NCP,3972,5481,58,1,[5481]
Normal-13.zip,0,Normal,2026,481,85,1,[481]
NCP-17.zip,2,NCP,47,1227,139,2,"[1227, 1228]"
CP-27.zip,1,CP,3763,5707,20,1,[5707]
Normal-6.zip,0,Normal,1798,253,93,1,[253]
NCP-9.zip,2,NCP,2703,2669,41,1,[2669]
CP-1.zip,1,CP,1071,3113,57,2,"[3113, 3114]"
NCP-16.zip,2,NCP,430,2014,64,2,"[2013, 2014]"
NCP-4.zip,2,NCP,144,1432,139,2,"[1432, 1433]"
Normal-4.zip,0,Normal,780,215,116,1,[215]
Normal-12.zip,0,Normal,2020,475,88,1,[475]
NCP-13.zip,2,NCP,366,1883,161,2,"[1883, 1884]"
Normal-2.zip,0,Normal,1761,1127,18,5,"[1125, 1126, 1127, 1128, 1129]"
NCP-29.zip,2,NCP,899,2441,42,2,"[2440, 2441]"
CP-16.zip,1,CP,1612,4300,26,1,[4300]
NCP-15.zip,2,NCP,412,1973,129,2,"[1973, 1974]"
NCP-10.zip,2,NCP,2717,2710,42,1,[2710]
CP-19.zip,1,CP,1792,3214,71,2,"[3214, 3215]"
Normal-20.zip,0,Normal,2269,724,113,1,[724]
CP-11.zip,1,CP,1451,3976,51,2,"[3975, 3976]"
Normal-11.zip,0,Normal,1978,433,94,1,[433]
NCP-3.zip,2,NCP,1282,2723,70,1,[2723]
CP-23.zip,1,CP,654,3016,74,1,[3016]
NCP-13.zip,2,NCP,345,1842,62,2,"[1841, 1842]"
CP-22.zip,1,CP,610,2972,70,1,[2972]
CP-29.zip,1,CP,3799,5743,23,1,[5743]
NCP-18.zip,2,NCP,506,2168,124,2,"[2168, 2169]"
Normal-19.zip,0,Normal,2218,673,84,1,[673]
NCP-7.zip,2,NCP,243,1632,31,3,"[1631, 1632, 1633]"
NCP-25.zip,2,NCP,3948,5504,50,1,[5504]
CP-7.zip,1,CP,1312,3658,65,2,"[3658, 3659]"
NCP-16.zip,2,NCP,451,2058,23,3,"[2056, 2057, 2058]"
CP-12.zip,1,CP,1461,4000,53,2,"[4000, 4001]"
CP-1.zip,1,CP,10,3154,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-10.zip,1,CP,1388,3831,51,2,"[3831, 3832]"
Normal-1.zip,0,Normal,1702,957,69,2,"[957, 958]"
Normal-17.zip,0,Normal,2181,636,100,1,[636]
NCP-19.zip,2,NCP,521,2198,139,2,"[2198, 2199]"
Normal-9.zip,0,Normal,1922,377,87,1,[377]
Normal-8.zip,0,Normal,1872,327,86,1,[327]
CP-9.zip,1,CP,1369,3791,67,2,"[3790, 3791]"
CP-29.zip,1,CP,3815,5759,23,1,[5759]
NCP-2.zip,2,NCP,118,1378,60,2,"[1377, 1378]"
CP-19.zip,1,CP,1793,3216,69,1,[3216]
NCP-5.zip,2,NCP,178,1501,52,2,"[1500, 1501]"
CP-13.zip,1,CP,1495,4087,50,4,"[4086, 4087, 4088, 4089]"
CP-18.zip,1,CP,1780,3566,41,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-8.zip,1,CP,1323,3682,62,2,"[3682, 3683]"
CP-20.zip,1,CP,2754,3285,30,1,[3285]
Normal-26.zip,0,Normal,3865,5377,24,1,[5377]
Normal-23.zip,0,Normal,2614,124,37,1,[124]
CP-12.zip,1,CP,1465,4009,67,2,"[4009, 4010]"
CP-14.zip,1,CP,1537,4183,53,3,"[4182, 4183, 4184]"
Normal-1.zip,0,Normal,1719,993,76,2,"[993, 994]"
NCP-3.zip,2,NCP,128,1401,122,2,"[1401, 1402]"
CP-28.zip,1,CP,3778,5722,25,1,[5722]
NCP-1.zip,2,NCP,1018,2584,252,1,[2584]
NCP-9.zip,2,NCP,27,1187,33,2,"[1186, 1187]"
CP-13.zip,1,CP,1494,4084,65,3,"[4083, 4084, 4085]"
NCP-13.zip,2,NCP,344,1839,152,2,"[1839, 1840]"
CP-21.zip,1,CP,604,2966,134,1,[2966]
NCP-1.zip,2,NCP,1037,2608,32,1,[2608]
CP-12.zip,1,CP,1485,4057,49,3,"[4056, 4057, 4058]"
NCP-16.zip,2,NCP,45,1223,152,2,"[1223, 1224]"
Normal-14.zip,0,Normal,2058,513,95,1,[513]
NCP-12.zip,2,NCP,323,1795,49,2,"[1794, 1795]"
NCP-26.zip,2,NCP,3999,5496,52,1,[5496]
Normal-15.zip,0,Normal,2107,562,92,1,[562]
CP-12.zip,1,CP,1478,4038,53,2,"[4037, 4038]"
Normal-15.zip,0,Normal,2099,554,85,1,[554]
NCP-21.zip,2,NCP,64,1261,132,2,"[1261, 1262]"
CP-9.zip,1,CP,1384,3824,66,2,"[3823, 3824]"
NCP-18.zip,2,NCP,511,2178,132,2,"[2178, 2179]"
CP-6.zip,1,CP,1227,3445,307,1,[3445]
Normal-23.zip,0,Normal,2633,143,40,1,[143]
NCP-10.zip,2,NCP,2722,2678,53,1,[2678]
NCP-15.zip,2,NCP,427,2008,56,2,"[2007, 2008]"
NCP-23.zip,2,NCP,94,1324,153,2,"[1324, 1325]"
CP-19.zip,1,CP,2446,2922,690,1,[2922]
CP-26.zip,1,CP,3728,5664,229,1,[5664]
CP-20.zip,1,CP,2668,3249,45,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-27.zip,0,Normal,3899,5429,75,2,"[5429, 5430]"
Normal-9.zip,0,Normal,1902,357,93,1,[357]
NCP-9.zip,2,NCP,27,1186,75,2,"[1186, 1187]"
NCP-18.zip,2,NCP,508,2172,145,2,"[2172, 2173]"
Normal-8.zip,0,Normal,1862,317,91,1,[317]
NCP-3.zip,2,NCP,128,1402,52,2,"[1401, 1402]"
NCP-8.zip,2,NCP,257,1660,152,2,"[1660, 1661]"
NCP-30.zip,2,NCP,973,2516,57,1,[2516]
CP-9.zip,1,CP,1357,3759,61,3,"[3758, 3759, 3760]"
Normal-26.zip,0,Normal,3864,5376,178,1,[5376]
CP-25.zip,1,CP,727,3089,104,1,[3089]
NCP-8.zip,2,NCP,259,1664,155,2,"[1664, 1665]"
CP-10.zip,1,CP,1390,3838,56,3,"[3836, 3837, 3838]"
Normal-21.zip,0,Normal,2295,750,79,1,[750]
NCP-18.zip,2,NCP,49,1231,146,2,"[1231, 1232]"
CP-10.zip,1,CP,1391,3840,59,4,"[3839, 3840, 3841, 3842]"
NCP-17.zip,2,NCP,48,1229,145,2,"[1229, 1230]"
NCP-21.zip,2,NCP,73,1278,130,3,"[1278, 1279, 1280]"
NCP-11.zip,2,NCP,296,1738,58,2,"[1737, 1738]"
NCP-3.zip,2,NCP,129,1404,56,2,"[1403, 1404]"
NCP-12.zip,2,NCP,330,1808,153,2,"[1808, 1809]"
CP-14.zip,1,CP,1529,4165,100,3,"[4165, 4166, 4167]"
CP-4.zip,1,CP,1187,3405,325,1,[3405]
NCP-11.zip,2,NCP,307,1761,136,2,"[1761, 1762]"
CP-26.zip,1,CP,3725,5661,258,2,"[5660, 5661]"
Normal-10.zip,0,Normal,1950,405,102,1,[405]
CP-15.zip,1,CP,1563,4247,61,3,"[4245, 4246, 4247]"
NCP-4.zip,2,NCP,144,1433,58,2,"[1432, 1433]"
NCP-28.zip,2,NCP,855,2375,39,1,[2375]
Normal-1.zip,0,Normal,1726,1008,69,2,"[1007, 1008]"
CP-22.zip,1,CP,629,2991,304,1,[2991]
NCP-4.zip,2,NCP,142,1428,141,2,"[1428, 1429]"
CP-21.zip,1,CP,592,2954,104,1,[2954]
CP-1.zip,1,CP,10,3159,293,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-9.zip,1,CP,1357,3760,61,3,"[3758, 3759, 3760]"
Normal-24.zip,0,Normal,2648,158,32,1,[158]
NCP-9.zip,2,NCP,269,1684,153,2,"[1684, 1685]"
Normal-15.zip,0,Normal,2108,563,101,1,[563]
CP-25.zip,1,CP,9,3148,290,4,"[3148, 3149, 3150, 3151]"
NCP-13.zip,2,NCP,364,1879,132,2,"[1879, 1880]"
Normal-23.zip,0,Normal,2605,115,35,1,[115]
NCP-10.zip,2,NCP,282,1711,51,2,"[1710, 1711]"
CP-14.zip,1,CP,1546,4209,58,2,"[4208, 4209]"
NCP-29.zip,2,NCP,925,2467,22,1,[2467]
Normal-21.zip,0,Normal,2296,751,102,1,[751]
CP-2.zip,1,CP,1114,3332,361,1,[3332]
NCP-5.zip,2,NCP,19,1171,61,2,"[1170, 1171]"
NCP-13.zip,2,NCP,363,1877,139,2,"[1877, 1878]"
CP-12.zip,1,CP,1475,4031,50,2,"[4031, 4032]"
NCP-14.zip,2,NCP,399,1949,62,2,"[1948, 1949]"
CP-17.zip,1,CP,1626,4314,26,1,[4314]
CP-18.zip,1,CP,1780,3556,60,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
Normal-19.zip,0,Normal,2236,691,83,1,[691]
CP-15.zip,1,CP,1572,4260,19,1,[4260]
CP-6.zip,1,CP,1240,3458,137,1,[3458]
NCP-21.zip,2,NCP,76,1285,121,2,"[1285, 1286]"
CP-22.zip,1,CP,623,2985,463,1,[2985]
CP-27.zip,1,CP,3760,5704,23,1,[5704]
CP-23.zip,1,CP,672,3034,86,1,[3034]
NCP-1.zip,2,NCP,1026,2596,21,1,[2596]
CP-22.zip,1,CP,635,2997,106,1,[2997]
NCP-14.zip,2,NCP,375,1901,115,3,"[1901, 1902, 1903]"
NCP-11.zip,2,NCP,304,1754,161,2,"[1754, 1755]"
NCP-15.zip,2,NCP,408,1965,131,2,"[1965, 1966]"
NCP-9.zip,2,NCP,2702,2668,41,1,[2668]
CP-11.zip,1,CP,1452,3978,56,2,"[3977, 3978]"
NCP-29.zip,2,NCP,891,2430,22,1,[2430]
NCP-16.zip,2,NCP,458,2070,131,2,"[2070, 2071]"
Normal-2.zip,0,Normal,1753,1092,60,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
Normal-1.zip,0,Normal,1702,958,69,2,"[957, 958]"
Normal-2.zip,0,Normal,1761,1126,45,5,"[1125, 1126, 1127, 1128, 1129]"
CP-12.zip,1,CP,1487,4063,68,3,"[4061, 4062, 4063]"
NCP-25.zip,2,NCP,3958,5471,38,1,[5471]
CP-15.zip,1,CP,1556,4231,40,2,"[4230, 4231]"
NCP-16.zip,2,NCP,431,2015,160,2,"[2015, 2016]"
Normal-2.zip,0,Normal,1745,1060,298,3,"[1060, 1061, 1062]"
NCP-23.zip,2,NCP,906,2448,55,1,[2448]
CP-2.zip,1,CP,11,3163,265,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-17.zip,2,NCP,487,2130,70,2,"[2129, 2130]"
CP-16.zip,1,CP,1600,4288,19,1,[4288]
NCP-21.zip,2,NCP,580,2317,139,2,"[2317, 2318]"
Normal-1.zip,0,Normal,1673,805,59,6,"[804, 805, 806, 807, 808, 809]"
CP-29.zip,1,CP,3801,5745,26,1,[5745]
Normal-1.zip,0,Normal,1726,1007,69,2,"[1007, 1008]"
NCP-29.zip,2,NCP,893,2432,25,2,"[2432, 2433]"
CP-3.zip,1,CP,1143,3361,177,1,[3361]
CP-8.zip,1,CP,1343,3726,56,2,"[3726, 3727]"
NCP-2.zip,2,NCP,115,1371,118,2,"[1371, 1372]"
NCP-11.zip,2,NCP,31,1195,57,2,"[1194, 1195]"
CP-1.zip,1,CP,1071,3114,57,2,"[3113, 3114]"
NCP-23.zip,2,NCP,951,2494,38,1,[2494]
Normal-1.zip,0,Normal,1706,967,64,2,"[967, 968]"
NCP-8.zip,2,NCP,262,1671,58,2,"[1670, 1671]"
Normal-10.zip,0,Normal,1943,398,94,1,[398]
NCP-8.zip,2,NCP,257,1661,64,2,"[1660, 1661]"
Normal-24.zip,0,Normal,2644,154,39,1,[154]
NCP-15.zip,2,NCP,407,1964,52,2,"[1963, 1964]"
Normal-26.zip,0,Normal,3883,5395,61,1,[5395]
NCP-9.zip,2,NCP,2685,2698,52,1,[2698]
NCP-30.zip,2,NCP,992,2545,213,1,[2545]
CP-21.zip,1,CP,596,2958,255,1,[2958]
CP-7.zip,1,CP,1314,3664,30,2,"[3663, 3664]"
NCP-16.zip,2,NCP,432,2018,54,2,"[2017, 2018]"
NCP-14.zip,2,NCP,371,1894,59,2,"[1893, 1894]"
NCP-7.zip,2,NCP,2482,2685,45,1,[2685]
Normal-1.zip,0,Normal,1679,834,66,6,"[833, 834, 835, 836, 837, 838]"
CP-29.zip,1,CP,3824,5768,23,1,[5768]
Normal-2.zip,0,Normal,1753,1089,66,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
Normal-7.zip,0,Normal,1859,314,85,1,[314]
NCP-21.zip,2,NCP,578,2313,130,2,"[2313, 2314]"
CP-10.zip,1,CP,1402,3866,55,3,"[3865, 3866, 3867]"
Normal-4.zip,0,Normal,791,226,138,1,[226]
Normal-13.zip,0,Normal,2039,494,101,1,[494]
Normal-15.zip,0,Normal,2115,570,94,1,[570]
CP-12.zip,1,CP,1470,4021,54,2,"[4020, 4021]"
CP-24.zip,1,CP,695,3057,201,1,[3057]
Normal-12.zip,0,Normal,1994,449,95,1,[449]
Normal-5.zip,0,Normal,804,239,325,1,[239]
CP-17.zip,1,CP,1623,4311,23,1,[4311]
Normal-18.zip,0,Normal,2208,663,95,1,[663]
NCP-19.zip,2,NCP,526,2209,58,2,"[2208, 2209]"
NCP-16.zip,2,NCP,45,1224,64,2,"[1223, 1224]"
Normal-1.zip,0,Normal,1679,838,70,6,"[833, 834, 835, 836, 837, 838]"
CP-2.zip,1,CP,11,3161,244,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
Normal-19.zip,0,Normal,2239,694,89,1,[694]
NCP-7.zip,2,NCP,243,1631,145,3,"[1631, 1632, 1633]"
NCP-7.zip,2,NCP,243,1633,61,3,"[1631, 1632, 1633]"
CP-18.zip,1,CP,1780,3561,63,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-11.zip,1,CP,1429,3926,52,2,"[3926, 3927]"
NCP-7.zip,2,NCP,237,1619,146,2,"[1619, 1620]"
CP-7.zip,1,CP,1319,3674,61,2,"[3674, 3675]"
NCP-28.zip,2,NCP,829,2342,36,1,[2342]
Normal-18.zip,0,Normal,2186,641,84,1,[641]
Normal-16.zip,0,Normal,2127,582,84,1,[582]
CP-5.zip,1,CP,1197,3415,191,1,[3415]
CP-10.zip,1,CP,1414,3893,63,3,"[3891, 3892, 3893]"
NCP-14.zip,2,NCP,384,1920,127,2,"[1920, 1921]"
CP-7.zip,1,CP,1317,3671,116,3,"[3670, 3671, 3672]"
NCP-22.zip,2,NCP,81,1295,125,2,"[1295, 1296]"
CP-3.zip,1,CP,1156,3374,173,1,[3374]
Normal-2.zip,0,Normal,1761,1129,60,5,"[1125, 1126, 1127, 1128, 1129]"
NCP-8.zip,2,NCP,252,1651,58,2,"[1650, 1651]"
NCP-25.zip,2,NCP,3959,5472,44,1,[5472]
Normal-11.zip,0,Normal,1988,443,90,1,[443]
CP-30.zip,1,CP,3833,5777,23,1,[5777]
NCP-26.zip,2,NCP,3985,5491,50,1,[5491]
CP-20.zip,1,CP,2668,3255,28,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-14.zip,0,Normal,2077,532,92,1,[532]
Normal-14.zip,0,Normal,2059,514,95,1,[514]
CP-29.zip,1,CP,3829,5773,26,1,[5773]
NCP-15.zip,2,NCP,402,1954,62,2,"[1953, 1954]"
CP-29.zip,1,CP,3800,5744,29,1,[5744]
CP-9.zip,1,CP,1383,3821,71,2,"[3821, 3822]"
NCP-6.zip,2,NCP,225,1594,135,2,"[1594, 1595]"
CP-27.zip,1,CP,3759,5703,23,1,[5703]
CP-4.zip,1,CP,1190,3408,173,1,[3408]
NCP-29.zip,2,NCP,889,2427,38,2,"[2427, 2428]"
NCP-14.zip,2,NCP,375,1902,40,3,"[1901, 1902, 1903]"
Normal-19.zip,0,Normal,2238,693,91,1,[693]
NCP-2.zip,2,NCP,1273,2714,56,1,[2714]
NCP-18.zip,2,NCP,497,2151,53,2,"[2150, 2151]"
CP-25.zip,1,CP,715,3077,609,1,[3077]
CP-7.zip,1,CP,1264,3482,126,1,[3482]
CP-1.zip,1,CP,10,3157,46,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
Normal-20.zip,0,Normal,2266,721,94,1,[721]
CP-11.zip,1,CP,1433,3935,62,2,"[3934, 3935]"
NCP-18.zip,2,NCP,511,2179,56,2,"[2178, 2179]"
CP-3.zip,1,CP,1138,3356,158,1,[3356]
Normal-20.zip,0,Normal,2249,704,66,1,[704]
Normal-6.zip,0,Normal,1809,264,94,1,[264]
CP-14.zip,1,CP,1547,4210,142,3,"[4210, 4211, 4212]"
CP-21.zip,1,CP,586,2948,174,1,[2948]
CP-23.zip,1,CP,650,3012,102,1,[3012]
CP-14.zip,1,CP,1522,4149,61,2,"[4148, 4149]"
NCP-8.zip,2,NCP,250,1646,144,2,"[1646, 1647]"
Normal-26.zip,0,Normal,3884,5397,298,2,"[5396, 5397]"
CP-28.zip,1,CP,3773,5717,20,1,[5717]
Normal-21.zip,0,Normal,2309,764,88,1,[764]
NCP-12.zip,2,NCP,326,1801,50,2,"[1800, 1801]"
Normal-1.zip,0,Normal,1729,1017,74,2,"[1017, 1018]"
Normal-1.zip,0,Normal,1684,871,68,5,"[870, 871, 873, 874, 875]"
CP-15.zip,1,CP,1567,4254,118,2,"[4254, 4255]"
NCP-4.zip,2,NCP,163,1470,154,2,"[1470, 1471]"
Normal-1.zip,0,Normal,1705,966,69,2,"[965, 966]"
CP-11.zip,1,CP,1446,3966,63,2,"[3965, 3966]"
NCP-6.zip,2,NCP,225,1595,57,2,"[1594, 1595]"
NCP-11.zip,2,NCP,293,1732,52,2,"[1731, 1732]"
NCP-28.zip,2,NCP,839,2354,209,1,[2354]
NCP-18.zip,2,NCP,513,2182,163,2,"[2182, 2183]"
Normal-8.zip,0,Normal,1889,344,87,1,[344]
CP-2.zip,1,CP,1112,3330,154,1,[3330]
Normal-26.zip,0,Normal,3874,5386,28,1,[5386]
CP-29.zip,1,CP,3813,5757,21,1,[5757]
CP-7.zip,1,CP,1317,3670,229,3,"[3670, 3671, 3672]"
NCP-20.zip,2,NCP,553,2264,58,2,"[2263, 2264]"
CP-29.zip,1,CP,3820,5764,31,1,[5764]
NCP-17.zip,2,NCP,482,2120,58,2,"[2119, 2120]"
NCP-7.zip,2,NCP,233,1610,86,2,"[1610, 1612]"
NCP-18.zip,2,NCP,500,2157,68,2,"[2156, 2157]"
Normal-4.zip,0,Normal,799,234,118,1,[234]
NCP-23.zip,2,NCP,94,1325,64,2,"[1324, 1325]"
CP-18.zip,1,CP,1780,3563,60,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-23.zip,2,NCP,902,2444,45,1,[2444]
CP-2.zip,1,CP,11,3162,260,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-3.zip,2,NCP,135,1415,58,2,"[1414, 1415]"
CP-8.zip,1,CP,1350,3745,55,1,[3745]
Normal-14.zip,0,Normal,2065,520,81,1,[520]
NCP-5.zip,2,NCP,188,1521,57,2,"[1520, 1521]"
Normal-2.zip,0,Normal,1745,1061,60,3,"[1060, 1061, 1062]"
NCP-15.zip,2,NCP,424,2002,64,2,"[2001, 2002]"
Normal-4.zip,0,Normal,790,225,126,1,[225]
NCP-4.zip,2,NCP,142,1429,59,2,"[1428, 1429]"
CP-7.zip,1,CP,1310,3653,51,2,"[3653, 3654]"
CP-14.zip,1,CP,1537,4182,53,3,"[4182, 4183, 4184]"
CP-17.zip,1,CP,1625,4313,26,1,[4313]
Normal-1.zip,0,Normal,1680,843,64,6,"[839, 840, 841, 842, 843, 844]"
NCP-11.zip,2,NCP,311,1769,134,2,"[1769, 1770]"
CP-1.zip,1,CP,1075,3118,553,2,"[3118, 3119]"
Normal-4.zip,0,Normal,770,205,116,1,[205]
CP-7.zip,1,CP,1311,3655,160,3,"[3655, 3656, 3657]"
Normal-1.zip,0,Normal,1724,1005,55,1,[1005]
NCP-20.zip,2,NCP,563,2285,59,2,"[2284, 2285]"
NCP-4.zip,2,NCP,163,1471,65,2,"[1470, 1471]"
Normal-15.zip,0,Normal,2114,569,101,1,[569]
Normal-12.zip,0,Normal,2016,471,89,1,[471]
CP-23.zip,1,CP,657,3019,343,1,[3019]
Normal-1.zip,0,Normal,1729,1018,74,2,"[1017, 1018]"
CP-18.zip,1,CP,1780,3558,73,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-5.zip,2,NCP,183,1511,52,2,"[1510, 1511]"
CP-1.zip,1,CP,1074,3117,61,1,[3117]
Normal-8.zip,0,Normal,1870,325,88,1,[325]
CP-6.zip,1,CP,1254,3472,125,1,[3472]
CP-21.zip,1,CP,2775,3306,43,1,[3306]
CP-16.zip,1,CP,1587,4275,20,1,[4275]
NCP-26.zip,2,NCP,3984,5490,54,1,[5490]
CP-27.zip,1,CP,3747,5691,20,1,[5691]
CP-13.zip,1,CP,1495,4088,48,4,"[4086, 4087, 4088, 4089]"
CP-9.zip,1,CP,1384,3823,66,2,"[3823, 3824]"
NCP-1.zip,2,NCP,100,1338,58,2,"[1337, 1338]"
NCP-27.zip,2,NCP,1025,2595,252,1,[2595]
NCP-18.zip,2,NCP,510,2177,43,2,"[2176, 2177]"
NCP-11.zip,2,NCP,298,1743,61,2,"[1742, 1743]"
Normal-17.zip,0,Normal,2174,629,88,1,[629]
CP-23.zip,1,CP,677,3039,309,1,[3039]
Normal-21.zip,0,Normal,2284,739,80,1,[739]
Normal-18.zip,0,Normal,2193,648,85,1,[648]
NCP-27.zip,2,NCP,1015,2579,39,1,[2579]
NCP-6.zip,2,NCP,214,1572,144,2,"[1572, 1573]"
CP-6.zip,1,CP,1248,3466,141,1,[3466]
Normal-27.zip,0,Normal,3901,5433,66,1,[5433]
CP-13.zip,1,CP,1519,4142,68,2,"[4141, 4142]"
NCP-14.zip,2,NCP,385,1922,64,1,[1922]
CP-7.zip,1,CP,1311,3657,67,3,"[3655, 3656, 3657]"
CP-14.zip,1,CP,1547,4212,58,3,"[4210, 4211, 4212]"
CP-4.zip,1,CP,1186,3404,204,1,[3404]
NCP-4.zip,2,NCP,165,1474,131,2,"[1474, 1475]"
CP-1.zip,1,CP,10,3160,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-3.zip,1,CP,1157,3375,204,1,[3375]
NCP-11.zip,2,NCP,307,1762,57,2,"[1761, 1762]"
CP-11.zip,1,CP,1441,3952,53,3,"[3951, 3952, 3953]"
NCP-21.zip,2,NCP,63,1259,139,2,"[1259, 1260]"
Normal-6.zip,0,Normal,1806,261,100,1,[261]
CP-17.zip,1,CP,1627,4315,26,1,[4315]
Normal-14.zip,0,Normal,2064,519,91,1,[519]
NCP-5.zip,2,NCP,180,1505,57,2,"[1504, 1505]"
Normal-16.zip,0,Normal,2134,589,72,1,[589]
Normal-14.zip,0,Normal,2063,518,99,1,[518]
CP-11.zip,1,CP,1451,3975,51,2,"[3975, 3976]"
Normal-24.zip,0,Normal,2647,157,34,1,[157]
NCP-21.zip,2,NCP,66,1265,58,1,[1265]
Normal-25.zip,0,Normal,3843,5355,180,1,[5355]
CP-25.zip,1,CP,729,3091,106,1,[3091]
CP-20.zip,1,CP,2668,3256,53,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-6.zip,2,NCP,200,1544,123,2,"[1544, 1545]"
Normal-1.zip,0,Normal,1685,879,65,4,"[877, 878, 879, 880]"
NCP-24.zip,2,NCP,972,2515,120,1,[2515]
CP-14.zip,1,CP,1547,4211,58,3,"[4210, 4211, 4212]"
CP-18.zip,1,CP,1775,3530,58,4,"[3530, 3531, 3532, 3533]"
CP-11.zip,1,CP,1427,3921,43,2,"[3921, 3922]"
CP-18.zip,1,CP,1776,3534,64,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-13.zip,2,NCP,368,1888,54,2,"[1887, 1888]"
CP-23.zip,1,CP,644,3006,134,1,[3006]
CP-7.zip,1,CP,1312,3659,65,2,"[3658, 3659]"
NCP-4.zip,2,NCP,139,1422,132,2,"[1422, 1423]"
NCP-15.zip,2,NCP,422,1998,63,2,"[1997, 1998]"
CP-10.zip,1,CP,1391,3842,59,4,"[3839, 3840, 3841, 3842]"
CP-11.zip,1,CP,1441,3953,53,3,"[3951, 3952, 3953]"
NCP-4.zip,2,NCP,154,1452,110,2,"[1452, 1453]"
NCP-6.zip,2,NCP,202,1549,67,2,"[1548, 1549]"
CP-11.zip,1,CP,1436,3941,45,2,"[3940, 3941]"
NCP-16.zip,2,NCP,431,2016,67,2,"[2015, 2016]"
Normal-26.zip,0,Normal,3870,5382,30,1,[5382]
Normal-17.zip,0,Normal,2159,614,89,1,[614]
CP-11.zip,1,CP,1427,3922,43,2,"[3921, 3922]"
CP-6.zip,1,CP,1228,3446,307,1,[3446]
NCP-15.zip,2,NCP,422,1997,156,2,"[1997, 1998]"
Normal-1.zip,0,Normal,1679,836,67,6,"[833, 834, 835, 836, 837, 838]"
CP-16.zip,1,CP,1604,4292,22,1,[4292]
CP-4.zip,1,CP,1179,3397,153,1,[3397]
NCP-6.zip,2,NCP,221,1586,125,2,"[1586, 1587]"
CP-18.zip,1,CP,1780,3564,41,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-4.zip,2,NCP,139,1423,56,2,"[1422, 1423]"
Normal-1.zip,0,Normal,1685,880,65,4,"[877, 878, 879, 880]"
CP-18.zip,1,CP,1780,3557,73,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-7.zip,1,CP,1269,3487,172,1,[3487]
Normal-1.zip,0,Normal,1680,841,69,6,"[839, 840, 841, 842, 843, 844]"
CP-13.zip,1,CP,1491,4074,113,3,"[4074, 4075, 4076]"
NCP-13.zip,2,NCP,344,1840,63,2,"[1839, 1840]"
NCP-17.zip,2,NCP,476,2108,53,2,"[2107, 2108]"
Normal-12.zip,0,Normal,1997,452,104,1,[452]
Normal-2.zip,0,Normal,1745,1062,60,3,"[1060, 1061, 1062]"
Normal-19.zip,0,Normal,2224,679,82,1,[679]
CP-2.zip,1,CP,1101,3319,187,1,[3319]
Normal-26.zip,0,Normal,3873,5385,25,1,[5385]
CP-15.zip,1,CP,1578,4266,22,1,[4266]
Normal-22.zip,0,Normal,2591,101,37,1,[101]
Normal-11.zip,0,Normal,1966,421,90,1,[421]
NCP-17.zip,2,NCP,480,2115,139,2,"[2115, 2116]"
CP-19.zip,1,CP,2,3503,34,1,[3503]
CP-16.zip,1,CP,1616,4304,29,1,[4304]
CP-10.zip,1,CP,1410,3883,51,2,"[3883, 3884]"
CP-24.zip,1,CP,701,3063,66,1,[3063]
NCP-6.zip,2,NCP,200,1545,52,2,"[1544, 1545]"
CP-1.zip,1,CP,10,3155,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
NCP-4.zip,2,NCP,160,1464,146,2,"[1464, 1465]"
Normal-8.zip,0,Normal,1890,345,99,1,[345]
NCP-9.zip,2,NCP,2694,2660,39,1,[2660]
CP-30.zip,1,CP,3930,5628,62,2,"[5628, 5629]"
CP-25.zip,1,CP,9,3149,290,4,"[3148, 3149, 3150, 3151]"
Normal-13.zip,0,Normal,2022,477,92,1,[477]
Normal-1.zip,0,Normal,1680,842,69,6,"[839, 840, 841, 842, 843, 844]"
NCP-7.zip,2,NCP,229,1603,65,2,"[1602, 1603]"
Normal-1.zip,0,Normal,1712,979,70,1,[979]
Normal-12.zip,0,Normal,2002,457,96,1,[457]
CP-6.zip,1,CP,1233,3451,150,1,[3451]
NCP-18.zip,2,NCP,489,2135,58,2,"[2134, 2135]"
CP-7.zip,1,CP,1310,3654,51,2,"[3653, 3654]"
CP-22.zip,1,CP,636,2998,102,1,[2998]
NCP-21.zip,2,NCP,70,1273,51,2,"[1272, 1273]"
Normal-23.zip,0,Normal,2603,113,41,1,[113]
CP-8.zip,1,CP,1323,3683,62,2,"[3682, 3683]"
Normal-20.zip,0,Normal,2274,729,85,1,[729]
NCP-29.zip,2,NCP,889,2428,121,2,"[2427, 2428]"
NCP-1.zip,2,NCP,1040,2611,113,1,[2611]
Normal-21.zip,0,Normal,2298,753,80,1,[753]
CP-19.zip,1,CP,1792,3215,71,2,"[3214, 3215]"
Normal-27.zip,0,Normal,3916,5459,77,1,[5459]
Normal-21.zip,0,Normal,2311,766,91,1,[766]
NCP-13.zip,2,NCP,343,1837,130,2,"[1837, 1838]"
NCP-26.zip,2,NCP,3989,5513,45,1,[5513]
CP-13.zip,1,CP,1495,4086,112,4,"[4086, 4087, 4088, 4089]"
Normal-5.zip,0,Normal,812,247,126,1,[247]
Normal-15.zip,0,Normal,2098,553,84,1,[553]
Normal-16.zip,0,Normal,2119,574,93,1,[574]
CP-25.zip,1,CP,731,3093,82,1,[3093]
CP-16.zip,1,CP,1597,4285,23,1,[4285]
CP-26.zip,1,CP,3726,5662,232,1,[5662]
CP-4.zip,1,CP,1183,3401,294,1,[3401]
CP-10.zip,1,CP,1391,3839,59,4,"[3839, 3840, 3841, 3842]"
NCP-23.zip,2,NCP,901,2443,320,1,[2443]
Normal-11.zip,0,Normal,1957,412,78,1,[412]
NCP-17.zip,2,NCP,474,2104,48,2,"[2103, 2104]"
NCP-9.zip,2,NCP,2698,2664,57,1,[2664]
NCP-7.zip,2,NCP,233,1612,45,2,"[1610, 1612]"
NCP-9.zip,2,NCP,2686,2699,48,1,[2699]
CP-18.zip,1,CP,1776,3537,75,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
CP-3.zip,1,CP,1158,3376,193,1,[3376]
CP-27.zip,1,CP,3755,5699,23,1,[5699]
CP-13.zip,1,CP,1509,4120,59,3,"[4118, 4119, 4120]"
NCP-29.zip,2,NCP,910,2452,76,1,[2452]
CP-2.zip,1,CP,11,3166,274,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-16.zip,2,NCP,433,2020,51,2,"[2019, 2020]"
Normal-26.zip,0,Normal,3863,5375,231,1,[5375]
Normal-7.zip,0,Normal,1851,306,102,1,[306]
NCP-23.zip,2,NCP,917,2459,272,1,[2459]
NCP-26.zip,2,NCP,3986,5492,42,1,[5492]
CP-12.zip,1,CP,1478,4037,53,2,"[4037, 4038]"
NCP-2.zip,2,NCP,115,1372,50,2,"[1371, 1372]"
NCP-13.zip,2,NCP,362,1875,151,2,"[1875, 1876]"
Normal-22.zip,0,Normal,2592,102,39,1,[102]
CP-9.zip,1,CP,1357,3758,61,3,"[3758, 3759, 3760]"
Normal-6.zip,0,Normal,1825,280,81,1,[280]
Normal-4.zip,0,Normal,775,210,134,1,[210]
NCP-13.zip,2,NCP,365,1881,117,2,"[1881, 1882]"
CP-24.zip,1,CP,709,3071,302,1,[3071]
CP-17.zip,1,CP,1630,4318,23,1,[4318]
CP-15.zip,1,CP,1557,4232,43,2,"[4232, 4233]"
NCP-23.zip,2,NCP,956,2499,156,1,[2499]
CP-2.zip,1,CP,1106,3324,164,1,[3324]
Normal-9.zip,0,Normal,1895,350,92,1,[350]
CP-21.zip,1,CP,599,2961,68,1,[2961]
NCP-16.zip,2,NCP,448,2051,58,2,"[2050, 2051]"
CP-5.zip,1,CP,1206,3424,176,1,[3424]
CP-26.zip,1,CP,3648,5540,170,1,[5540]
CP-1.zip,1,CP,1091,3309,354,1,[3309]
NCP-10.zip,2,NCP,2713,2706,39,1,[2706]
NCP-30.zip,2,NCP,949,2492,42,1,[2492]
NCP-17.zip,2,NCP,480,2116,58,2,"[2115, 2116]"
CP-7.zip,1,CP,1306,3643,48,3,"[3642, 3643, 3644]"
Normal-7.zip,0,Normal,1840,295,108,1,[295]
CP-18.zip,1,CP,1780,3562,63,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-1.zip,2,NCP,1011,2575,111,2,"[2574, 2575]"
Normal-16.zip,0,Normal,2132,587,97,1,[587]
CP-29.zip,1,CP,3814,5758,29,1,[5758]
CP-18.zip,1,CP,1768,3175,175,1,[3175]
Normal-13.zip,0,Normal,2028,483,89,1,[483]
NCP-16.zip,2,NCP,454,2062,139,2,"[2062, 2063]"
CP-8.zip,1,CP,1333,3706,52,2,"[3705, 3706]"
CP-25.zip,1,CP,737,3099,84,1,[3099]
NCP-9.zip,2,NCP,2683,2653,46,1,[2653]
Normal-11.zip,0,Normal,1958,413,90,1,[413]
Normal-7.zip,0,Normal,1855,310,86,1,[310]
NCP-10.zip,2,NCP,282,1710,120,2,"[1710, 1711]"
NCP-8.zip,2,NCP,252,1650,139,2,"[1650, 1651]"
NCP-3.zip,2,NCP,133,1411,41,2,"[1410, 1411]"
CP-21.zip,1,CP,588,2950,116,1,[2950]
Normal-15.zip,0,Normal,2094,549,78,1,[549]
NCP-20.zip,2,NCP,562,2282,113,2,"[2282, 2283]"
Normal-5.zip,0,Normal,806,241,104,1,[241]
CP-3.zip,1,CP,1145,3363,169,1,[3363]
NCP-28.zip,2,NCP,847,2365,53,1,[2365]
NCP-4.zip,2,NCP,143,1431,54,2,"[1430, 1431]"
NCP-15.zip,2,NCP,407,1963,124,2,"[1963, 1964]"
Normal-6.zip,0,Normal,1817,272,85,1,[272]
CP-32.zip,1,CP,1089,3224,90,1,[3224]
NCP-22.zip,2,NCP,834,2347,194,2,"[2347, 2348]"
CP-9.zip,1,CP,1381,3816,66,3,"[3815, 3816, 3817]"
Normal-8.zip,0,Normal,1866,321,75,1,[321]
NCP-22.zip,2,NCP,86,1306,50,2,"[1305, 1306]"
CP-26.zip,1,CP,3725,5660,251,2,"[5660, 5661]"
NCP-18.zip,2,NCP,497,2150,126,2,"[2150, 2151]"
NCP-27.zip,2,NCP,1043,2615,45,1,[2615]
CP-4.zip,1,CP,1167,3385,149,1,[3385]
Normal-4.zip,0,Normal,782,217,340,1,[217]
NCP-15.zip,2,NCP,421,1995,161,2,"[1995, 1996]"
Normal-9.zip,0,Normal,1897,352,88,1,[352]
NCP-13.zip,2,NCP,365,1882,50,2,"[1881, 1882]"
CP-1.zip,1,CP,1067,3106,62,1,[3106]
CP-22.zip,1,CP,642,3004,128,1,[3004]
CP-20.zip,1,CP,2668,3258,52,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
CP-10.zip,1,CP,1406,3875,60,2,"[3874, 3875]"
CP-1.zip,1,CP,10,3158,285,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
NCP-21.zip,2,NCP,60,1254,59,2,"[1253, 1254]"
Normal-26.zip,0,Normal,3884,5396,62,2,"[5396, 5397]"
NCP-25.zip,2,NCP,3710,5537,66,1,[5537]
CP-9.zip,1,CP,1371,3795,60,3,"[3794, 3795, 3796]"
CP-20.zip,1,CP,2450,2928,92,2,"[2928, 2929]"
NCP-4.zip,2,NCP,166,1476,139,2,"[1476, 1477]"
NCP-20.zip,2,NCP,554,2266,54,2,"[2265, 2266]"
NCP-18.zip,2,NCP,491,2139,62,2,"[2138, 2139]"
CP-2.zip,1,CP,1098,3316,171,1,[3316]
CP-12.zip,1,CP,1465,4010,67,2,"[4009, 4010]"
NCP-20.zip,2,NCP,548,2254,61,2,"[2253, 2254]"
Normal-16.zip,0,Normal,2150,605,88,1,[605]
NCP-16.zip,2,NCP,451,2056,51,3,"[2056, 2057, 2058]"
Normal-11.zip,0,Normal,1965,420,88,1,[420]
NCP-1.zip,2,NCP,101,1339,136,2,"[1339, 1340]"
Normal-12.zip,0,Normal,2008,463,92,1,[463]
CP-10.zip,1,CP,1402,3867,55,3,"[3865, 3866, 3867]"
NCP-2.zip,2,NCP,122,1386,62,2,"[1385, 1386]"
CP-20.zip,1,CP,2457,2941,108,1,[2941]
NCP-14.zip,2,NCP,38,1208,137,2,"[1208, 1209]"
Normal-10.zip,0,Normal,1933,388,103,1,[388]
CP-1.zip,1,CP,10,3152,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
NCP-20.zip,2,NCP,562,2283,48,2,"[2282, 2283]"
NCP-12.zip,2,NCP,335,1819,55,2,"[1818, 1819]"
NCP-21.zip,2,NCP,579,2316,63,2,"[2315, 2316]"
Normal-7.zip,0,Normal,1856,311,80,1,[311]
NCP-18.zip,2,NCP,506,2169,51,2,"[2168, 2169]"
CP-8.zip,1,CP,1339,3719,59,2,"[3718, 3719]"
CP-18.zip,1,CP,1652,4340,25,1,[4340]
NCP-11.zip,2,NCP,296,1737,139,2,"[1737, 1738]"
Normal-8.zip,0,Normal,1886,341,84,1,[341]
NCP-8.zip,2,NCP,250,1647,60,2,"[1646, 1647]"
CP-14.zip,1,CP,1537,4184,53,3,"[4182, 4183, 4184]"
NCP-17.zip,2,NCP,486,2128,64,2,"[2127, 2128]"
CP-8.zip,1,CP,1335,3711,62,3,"[3709, 3710, 3711]"
CP-27.zip,1,CP,3739,5683,19,1,[5683]
NCP-25.zip,2,NCP,3950,5464,41,1,[5464]
CP-12.zip,1,CP,1474,4029,62,2,"[4029, 4030]"
Normal-10.zip,0,Normal,1946,401,93,1,[401]
NCP-30.zip,2,NCP,947,2490,41,1,[2490]
NCP-14.zip,2,NCP,371,1893,141,2,"[1893, 1894]"
NCP-8.zip,2,NCP,2676,2694,54,1,[2694]
NCP-1.zip,2,NCP,1011,2574,117,2,"[2574, 2575]"
Normal-9.zip,0,Normal,1906,361,93,1,[361]
NCP-4.zip,2,NCP,147,1439,72,2,"[1438, 1439]"
CP-12.zip,1,CP,1485,4058,49,3,"[4056, 4057, 4058]"
Normal-7.zip,0,Normal,1838,293,86,1,[293]
CP-25.zip,1,CP,9,3150,72,4,"[3148, 3149, 3150, 3151]"
NCP-12.zip,2,NCP,330,1809,64,2,"[1808, 1809]"
NCP-8.zip,2,NCP,267,1681,54,2,"[1680, 1681]"
NCP-20.zip,2,NCP,553,2263,137,2,"[2263, 2264]"
NCP-29.zip,2,NCP,893,2433,24,2,"[2432, 2433]"
NCP-21.zip,2,NCP,582,2321,128,2,"[2321, 2322]"
Normal-24.zip,0,Normal,2642,152,38,1,[152]
CP-25.zip,1,CP,726,3088,183,1,[3088]
NCP-5.zip,2,NCP,171,1487,60,2,"[1486, 1487]"
CP-22.zip,1,CP,632,2994,132,1,[2994]
Normal-7.zip,0,Normal,1850,305,99,1,[305]
NCP-30.zip,2,NCP,945,2488,45,1,[2488]
Normal-19.zip,0,Normal,2244,699,98,1,[699]
CP-1.zip,1,CP,1073,3116,52,1,[3116]
Normal-21.zip,0,Normal,2310,765,91,1,[765]
CP-1.zip,1,CP,10,3153,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-1.zip,1,CP,1075,3119,70,2,"[3118, 3119]"
CP-12.zip,1,CP,1470,4020,54,2,"[4020, 4021]"
NCP-26.zip,2,NCP,3997,5519,56,1,[5519]
NCP-10.zip,2,NCP,274,1694,160,2,"[1694, 1695]"
Normal-15.zip,0,Normal,2089,544,98,1,[544]
CP-24.zip,1,CP,681,3043,102,1,[3043]
NCP-20.zip,2,NCP,573,2305,63,2,"[2304, 2305]"
CP-15.zip,1,CP,1557,4233,43,2,"[4232, 4233]"
NCP-30.zip,2,NCP,990,2543,59,1,[2543]
CP-7.zip,1,CP,1305,3640,20,2,"[3640, 3641]"
NCP-5.zip,2,NCP,183,1510,123,2,"[1510, 1511]"
CP-15.zip,1,CP,1582,4270,20,1,[4270]
CP-29.zip,1,CP,3817,5761,25,1,[5761]
NCP-20.zip,2,NCP,56,1245,164,2,"[1245, 1246]"
NCP-21.zip,2,NCP,58,1250,55,2,"[1249, 1250]"
CP-8.zip,1,CP,1335,3710,62,3,"[3709, 3710, 3711]"
Normal-3.zip,0,Normal,1766,1149,60,3,"[1149, 1150, 1151]"
NCP-10.zip,2,NCP,2716,2709,49,1,[2709]
CP-10.zip,1,CP,1402,3865,131,3,"[3865, 3866, 3867]"
CP-10.zip,1,CP,1391,3841,59,4,"[3839, 3840, 3841, 3842]"
Normal-22.zip,0,Normal,2594,104,42,1,[104]
CP-26.zip,1,CP,3733,5675,174,3,"[5673, 5674, 5675]"
Normal-25.zip,0,Normal,3715,5345,30,1,[5345]
Normal-3.zip,0,Normal,762,197,363,1,[197]
NCP-15.zip,2,NCP,420,1994,71,2,"[1993, 1994]"
Normal-12.zip,0,Normal,1996,451,90,1,[451]
NCP-22.zip,2,NCP,885,2423,195,2,"[2422, 2423]"
NCP-29.zip,2,NCP,921,2463,36,1,[2463]
Normal-25.zip,0,Normal,3848,5360,192,1,[5360]
CP-28.zip,1,CP,3776,5720,30,1,[5720]
NCP-15.zip,2,NCP,402,1953,148,2,"[1953, 1954]"
Normal-19.zip,0,Normal,2232,687,99,1,[687]
CP-11.zip,1,CP,1447,3968,63,2,"[3967, 3968]"
Normal-17.zip,0,Normal,2176,631,91,1,[631]
NCP-12.zip,2,NCP,315,1778,46,2,"[1777, 1778]"
CP-2.zip,1,CP,1102,3320,182,1,[3320]
NCP-14.zip,2,NCP,373,1897,122,2,"[1897, 1898]"
CP-4.zip,1,CP,1175,3393,189,1,[3393]
NCP-14.zip,2,NCP,392,1934,143,2,"[1934, 1935]"
CP-8.zip,1,CP,1321,3679,58,2,"[3678, 3679]"
NCP-16.zip,2,NCP,430,2013,152,2,"[2013, 2014]"
NCP-26.zip,2,NCP,3988,5512,53,1,[5512]
Normal-22.zip,0,Normal,2316,771,92,1,[771]
CP-14.zip,1,CP,1531,4170,59,2,"[4169, 4170]"
Normal-3.zip,0,Normal,748,183,261,1,[183]
NCP-23.zip,2,NCP,943,2486,334,1,[2486]
Normal-18.zip,0,Normal,2202,657,82,1,[657]
CP-27.zip,1,CP,3735,5679,26,1,[5679]
NCP-15.zip,2,NCP,409,1967,153,2,"[1967, 1968]"
CP-4.zip,1,CP,1171,3389,180,1,[3389]
CP-11.zip,1,CP,1452,3977,56,2,"[3977, 3978]"
Normal-1.zip,0,Normal,1684,875,71,5,"[870, 871, 873, 874, 875]"
CP-8.zip,1,CP,1333,3705,52,2,"[3705, 3706]"
NCP-3.zip,2,NCP,135,1414,138,2,"[1414, 1415]"
NCP-25.zip,2,NCP,3965,5506,53,1,[5506]
NCP-8.zip,2,NCP,258,1662,135,2,"[1662, 1663]"
Normal-10.zip,0,Normal,1926,381,87,1,[381]
CP-16.zip,1,CP,1596,4284,22,1,[4284]
CP-14.zip,1,CP,1554,4226,41,2,"[4226, 4227]"
CP-26.zip,1,CP,3645,5605,38,1,[5605]
CP-2.zip,1,CP,1110,3328,143,1,[3328]
NCP-22.zip,2,NCP,81,1296,53,2,"[1295, 1296]"
Normal-1.zip,0,Normal,1685,877,65,4,"[877, 878, 879, 880]"
NCP-29.zip,2,NCP,923,2465,19,1,[2465]
NCP-14.zip,2,NCP,399,1948,149,2,"[1948, 1949]"
NCP-18.zip,2,NCP,510,2176,102,2,"[2176, 2177]"
NCP-20.zip,2,NCP,558,2274,51,2,"[2273, 2274]"
Normal-2.zip,0,Normal,1762,1131,70,2,"[1130, 1131]"
CP-19.zip,1,CP,2434,2898,102,3,"[2898, 2899, 2900]"
Normal-19.zip,0,Normal,2219,674,106,1,[674]
Normal-8.zip,0,Normal,1869,324,94,1,[324]
NCP-21.zip,2,NCP,70,1272,120,2,"[1272, 1273]"
NCP-10.zip,2,NCP,2710,2703,48,1,[2703]
Normal-9.zip,0,Normal,1904,359,94,1,[359]
NCP-20.zip,2,NCP,564,2287,60,2,"[2286, 2287]"
NCP-15.zip,2,NCP,424,2001,161,2,"[2001, 2002]"
CP-14.zip,1,CP,1529,4166,42,3,"[4165, 4166, 4167]"
Normal-16.zip,0,Normal,2138,593,72,1,[593]
CP-16.zip,1,CP,1613,4301,27,1,[4301]
CP-24.zip,1,CP,697,3059,114,1,[3059]
CP-10.zip,1,CP,1390,3836,215,3,"[3836, 3837, 3838]"
Normal-6.zip,0,Normal,1805,260,79,1,[260]
CP-10.zip,1,CP,1390,3837,56,3,"[3836, 3837, 3838]"
CP-3.zip,1,CP,1150,3368,214,1,[3368]
CP-2.zip,1,CP,1116,3334,183,1,[3334]
Normal-14.zip,0,Normal,2057,512,78,1,[512]
NCP-19.zip,2,NCP,532,2223,58,2,"[2222, 2223]"
CP-29.zip,1,CP,3810,5754,24,1,[5754]
CP-14.zip,1,CP,1539,4188,131,3,"[4188, 4189, 4190]"
CP-10.zip,1,CP,1385,3826,64,2,"[3825, 3826]"
NCP-29.zip,2,NCP,929,2471,21,1,[2471]
NCP-28.zip,2,NCP,856,2377,229,2,"[2376, 2377]"
NCP-15.zip,2,NCP,408,1966,55,2,"[1965, 1966]"
CP-7.zip,1,CP,1319,3675,61,2,"[3674, 3675]"
NCP-1.zip,2,NCP,1022,2591,48,1,[2591]
Normal-20.zip,0,Normal,2254,709,75,1,[709]
NCP-22.zip,2,NCP,862,2385,33,1,[2385]
CP-29.zip,1,CP,3812,5756,27,1,[5756]
CP-11.zip,1,CP,1447,3967,63,2,"[3967, 3968]"
CP-15.zip,1,CP,1556,4230,40,2,"[4230, 4231]"
CP-1.zip,1,CP,1080,3125,64,1,[3125]
Normal-4.zip,0,Normal,778,213,114,1,[213]
CP-14.zip,1,CP,1529,4167,42,3,"[4165, 4166, 4167]"
CP-2.zip,1,CP,11,3167,283,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-20.zip,2,NCP,549,2256,36,2,"[2255, 2256]"
NCP-3.zip,2,NCP,1292,2733,66,1,[2733]
Normal-13.zip,0,Normal,2047,502,93,1,[502]
NCP-20.zip,2,NCP,549,2255,83,2,"[2255, 2256]"
CP-15.zip,1,CP,1563,4246,122,3,"[4245, 4246, 4247]"
NCP-25.zip,2,NCP,3956,5469,49,1,[5469]
NCP-22.zip,2,NCP,833,2346,484,1,[2346]
CP-12.zip,1,CP,1487,4061,163,3,"[4061, 4062, 4063]"
CP-7.zip,1,CP,1306,3642,52,3,"[3642, 3643, 3644]"
NCP-17.zip,2,NCP,47,1228,58,2,"[1227, 1228]"
CP-8.zip,1,CP,1338,3716,67,2,"[3716, 3717]"
Normal-25.zip,0,Normal,3711,5341,27,1,[5341]
NCP-16.zip,2,NCP,452,2059,63,1,[2059]
Normal-23.zip,0,Normal,2604,114,36,1,[114]
NCP-28.zip,2,NCP,849,2368,224,1,[2368]
NCP-29.zip,2,NCP,886,2424,52,1,[2424]
NCP-28.zip,2,NCP,875,2408,218,1,[2408]
NCP-20.zip,2,NCP,573,2304,151,2,"[2304, 2305]"
NCP-22.zip,2,NCP,83,1300,70,2,"[1299, 1300]"
Normal-14.zip,0,Normal,2056,511,84,1,[511]
Normal-7.zip,0,Normal,1844,299,93,1,[299]
CP-13.zip,1,CP,1494,4083,154,3,"[4083, 4084, 4085]"
CP-5.zip,1,CP,1201,3419,171,1,[3419]
NCP-23.zip,2,NCP,897,2438,40,1,[2438]
Normal-27.zip,0,Normal,3914,5456,55,2,"[5456, 5457]"
CP-9.zip,1,CP,1354,3751,181,3,"[3751, 3752, 3753]"
NCP-29.zip,2,NCP,899,2440,34,2,"[2440, 2441]"
CP-10.zip,1,CP,1414,3891,151,3,"[3891, 3892, 3893]"
CP-14.zip,1,CP,1543,4202,57,3,"[4200, 4201, 4202]"
Normal-25.zip,0,Normal,3837,5349,208,1,[5349]
NCP-10.zip,2,NCP,272,1691,64,2,"[1690, 1691]"
Normal-9.zip,0,Normal,1905,360,93,1,[360]
CP-8.zip,1,CP,1340,3721,64,2,"[3720, 3721]"
NCP-5.zip,2,NCP,19,1170,146,2,"[1170, 1171]"
Normal-2.zip,0,Normal,1738,1041,75,1,[1041]
NCP-2.zip,2,NCP,108,1354,58,2,"[1353, 1354]"
Normal-25.zip,0,Normal,3844,5356,201,1,[5356]
CP-20.zip,1,CP,2459,2945,108,1,[2945]
CP-10.zip,1,CP,1414,3892,63,3,"[3891, 3892, 3893]"
Normal-18.zip,0,Normal,2201,656,66,1,[656]
NCP-21.zip,2,NCP,78,1289,166,2,"[1289, 1290]"
CP-18.zip,1,CP,1776,3539,76,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-1.zip,2,NCP,1010,2572,126,2,"[2572, 2573]"
CP-11.zip,1,CP,1441,3951,203,3,"[3951, 3952, 3953]"
CP-13.zip,1,CP,1512,4125,50,2,"[4125, 4126]"
CP-30.zip,1,CP,3934,5640,53,3,"[5638, 5639, 5640]"
NCP-4.zip,2,NCP,143,1430,128,2,"[1430, 1431]"
Normal-17.zip,0,Normal,2166,621,93,1,[621]
NCP-22.zip,2,NCP,83,1299,167,2,"[1299, 1300]"
CP-29.zip,1,CP,3804,5748,29,1,[5748]
CP-22.zip,1,CP,624,2986,90,1,[2986]
NCP-7.zip,2,NCP,231,1607,58,2,"[1606, 1607]"
NCP-8.zip,2,NCP,258,1663,57,2,"[1662, 1663]"
Normal-10.zip,0,Normal,1956,411,89,1,[411]
NCP-4.zip,2,NCP,165,1475,55,2,"[1474, 1475]"
Normal-2.zip,0,Normal,1753,1091,60,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
CP-6.zip,1,CP,1247,3465,218,1,[3465]
CP-17.zip,1,CP,1644,4332,23,1,[4332]
NCP-5.zip,2,NCP,188,1520,134,2,"[1520, 1521]"
CP-13.zip,1,CP,1509,4118,233,3,"[4118, 4119, 4120]"
CP-19.zip,1,CP,2434,2899,102,3,"[2898, 2899, 2900]"
Normal-27.zip,0,Normal,3914,5457,55,2,"[5456, 5457]"
NCP-3.zip,2,NCP,133,1410,100,2,"[1410, 1411]"
CP-24.zip,1,CP,690,3052,134,1,[3052]
NCP-6.zip,2,NCP,208,1560,134,2,"[1560, 1561]"
Normal-26.zip,0,Normal,3872,5384,29,1,[5384]
CP-7.zip,1,CP,1258,3476,202,1,[3476]
NCP-4.zip,2,NCP,154,1453,47,2,"[1452, 1453]"
CP-8.zip,1,CP,1335,3709,207,3,"[3709, 3710, 3711]"
CP-7.zip,1,CP,1305,3641,50,2,"[3640, 3641]"
CP-25.zip,1,CP,716,3078,640,1,[3078]
Normal-2.zip,0,Normal,1761,1125,45,5,"[1125, 1126, 1127, 1128, 1129]"
NCP-14.zip,2,NCP,38,1209,57,2,"[1208, 1209]"
Normal-1.zip,0,Normal,1685,878,65,4,"[877, 878, 879, 880]"
NCP-17.zip,2,NCP,467,2090,58,2,"[2089, 2090]"
CP-14.zip,1,CP,1539,4189,54,3,"[4188, 4189, 4190]"
NCP-16.zip,2,NCP,454,2063,58,2,"[2062, 2063]"
CP-13.zip,1,CP,1491,4076,48,3,"[4074, 4075, 4076]"
Normal-4.zip,0,Normal,794,229,341,1,[229]
NCP-19.zip,2,NCP,521,2199,58,2,"[2198, 2199]"
CP-7.zip,1,CP,1311,3656,67,3,"[3655, 3656, 3657]"
Normal-22.zip,0,Normal,2584,94,44,1,[94]
CP-23.zip,1,CP,678,3040,46,1,[3040]
CP-14.zip,1,CP,1539,4190,54,3,"[4188, 4189, 4190]"
CP-30.zip,1,CP,3937,5644,55,2,"[5643, 5644]"
NCP-15.zip,2,NCP,427,2007,132,2,"[2007, 2008]"
NCP-28.zip,2,NCP,843,2358,279,1,[2358]
NCP-14.zip,2,NCP,375,1903,49,3,"[1901, 1902, 1903]"
NCP-11.zip,2,NCP,306,1759,153,2,"[1759, 1760]"
NCP-16.zip,2,NCP,44,1221,124,2,"[1221, 1222]"
NCP-8.zip,2,NCP,256,1659,58,2,"[1658, 1659]"
CP-8.zip,1,CP,1338,3717,67,2,"[3716, 3717]"
CP-18.zip,1,CP,1780,3553,67,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-7.zip,1,CP,1267,3485,151,1,[3485]
CP-13.zip,1,CP,1509,4119,118,3,"[4118, 4119, 4120]"
Normal-3.zip,0,Normal,1766,1151,62,3,"[1149, 1150, 1151]"
CP-10.zip,1,CP,1405,3873,60,2,"[3872, 3873]"
CP-1.zip,1,CP,1079,3124,63,1,[3124]
CP-18.zip,1,CP,1780,3559,69,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
Normal-7.zip,0,Normal,1852,307,94,1,[307]
CP-5.zip,1,CP,1195,3413,247,1,[3413]
NCP-20.zip,2,NCP,556,2270,53,2,"[2269, 2270]"
NCP-2.zip,2,NCP,108,1353,139,2,"[1353, 1354]"
NCP-16.zip,2,NCP,445,2045,58,2,"[2044, 2045]"
CP-13.zip,1,CP,1512,4126,50,2,"[4125, 4126]"
NCP-21.zip,2,NCP,64,1262,55,2,"[1261, 1262]"
CP-5.zip,1,CP,1211,3429,143,1,[3429]
NCP-1.zip,2,NCP,1042,2614,143,2,"[2613, 2614]"
NCP-21.zip,2,NCP,73,1280,55,3,"[1278, 1279, 1280]"
CP-9.zip,1,CP,1364,3776,133,3,"[3776, 3777, 3778]"
NCP-21.zip,2,NCP,58,1249,131,2,"[1249, 1250]"
CP-20.zip,1,CP,2668,3250,44,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-21.zip,2,NCP,73,1279,57,3,"[1278, 1279, 1280]"
CP-26.zip,1,CP,3733,5674,159,3,"[5673, 5674, 5675]"
Normal-19.zip,0,Normal,2247,702,86,1,[702]
NCP-28.zip,2,NCP,867,2394,161,1,[2394]
CP-22.zip,1,CP,633,2995,114,1,[2995]
CP-9.zip,1,CP,1371,3796,60,3,"[3794, 3795, 3796]"
NCP-22.zip,2,NCP,86,1305,117,2,"[1305, 1306]"
NCP-14.zip,2,NCP,40,1213,63,2,"[1212, 1213]"
Normal-26.zip,0,Normal,3892,5415,72,1,[5415]
CP-7.zip,1,CP,1306,3644,237,3,"[3642, 3643, 3644]"
CP-24.zip,1,CP,702,3064,78,1,[3064]
NCP-26.zip,2,NCP,3975,5483,44,1,[5483]
CP-4.zip,1,CP,1164,3382,193,1,[3382]
Normal-11.zip,0,Normal,1960,415,98,1,[415]
CP-5.zip,1,CP,1203,3421,231,1,[3421]
CP-19.zip,1,CP,2434,2900,104,3,"[2898, 2899, 2900]"
NCP-29.zip,2,NCP,890,2429,203,1,[2429]
NCP-16.zip,2,NCP,448,2050,139,2,"[2050, 2051]"
CP-18.zip,1,CP,1780,3555,60,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-12.zip,1,CP,1457,3991,69,1,[3991]
Normal-3.zip,0,Normal,756,191,106,1,[191]
NCP-29.zip,2,NCP,900,2442,506,1,[2442]
NCP-17.zip,2,NCP,476,2107,127,2,"[2107, 2108]"
CP-28.zip,1,CP,3794,5738,26,1,[5738]
CP-23.zip,1,CP,669,3031,70,1,[3031]
Normal-9.zip,0,Normal,1911,366,96,1,[366]
Normal-9.zip,0,Normal,1919,374,99,1,[374]
NCP-12.zip,2,NCP,335,1818,129,2,"[1818, 1819]"
CP-18.zip,1,CP,1651,4339,31,1,[4339]
Normal-4.zip,0,Normal,798,233,122,1,[233]
NCP-18.zip,2,NCP,508,2173,61,2,"[2172, 2173]"
NCP-21.zip,2,NCP,67,1266,168,2,"[1266, 1267]"
NCP-6.zip,2,NCP,214,1573,60,2,"[1572, 1573]"
CP-10.zip,1,CP,1405,3872,60,2,"[3872, 3873]"
NCP-6.zip,2,NCP,208,1561,56,2,"[1560, 1561]"
NCP-14.zip,2,NCP,373,1898,52,2,"[1897, 1898]"
NCP-3.zip,2,NCP,1281,2722,65,1,[2722]
CP-24.zip,1,CP,707,3069,72,1,[3069]
NCP-28.zip,2,NCP,831,2344,278,1,[2344]
Normal-17.zip,0,Normal,2179,634,101,1,[634]
NCP-21.zip,2,NCP,60,1253,141,2,"[1253, 1254]"
NCP-8.zip,2,NCP,259,1665,65,2,"[1664, 1665]"
NCP-11.zip,2,NCP,311,1770,55,2,"[1769, 1770]"
NCP-27.zip,2,NCP,1050,2623,46,2,"[2623, 2624]"
NCP-18.zip,2,NCP,490,2137,62,2,"[2136, 2137]"
Normal-27.zip,0,Normal,3900,5431,64,2,"[5431, 5432]"
Normal-15.zip,0,Normal,2110,565,83,1,[565]
NCP-13.zip,2,NCP,368,1887,129,2,"[1887, 1888]"
NCP-27.zip,2,NCP,817,2326,120,1,[2326]
CP-15.zip,1,CP,1567,4255,59,2,"[4254, 4255]"
NCP-5.zip,2,NCP,178,1500,124,2,"[1500, 1501]"
NCP-13.zip,2,NCP,345,1841,147,2,"[1841, 1842]"
Normal-2.zip,0,Normal,1761,1128,60,5,"[1125, 1126, 1127, 1128, 1129]"
CP-8.zip,1,CP,1343,3727,56,2,"[3726, 3727]"
NCP-30.zip,2,NCP,936,2478,21,1,[2478]
NCP-11.zip,2,NCP,306,1760,64,2,"[1759, 1760]"
NCP-17.zip,2,NCP,487,2129,167,2,"[2129, 2130]"
CP-30.zip,1,CP,3930,5629,62,2,"[5628, 5629]"
NCP-9.zip,2,NCP,2692,2700,48,1,[2700]
NCP-20.zip,2,NCP,556,2269,125,2,"[2269, 2270]"
CP-18.zip,1,CP,1775,3531,58,4,"[3530, 3531, 3532, 3533]"
NCP-23.zip,2,NCP,896,2437,39,1,[2437]
CP-21.zip,1,CP,5,3509,275,1,[3509]
Normal-19.zip,0,Normal,2217,672,71,1,[672]
NCP-1.zip,2,NCP,1010,2573,126,2,"[2572, 2573]"
NCP-1.zip,2,NCP,100,1337,139,2,"[1337, 1338]"
NCP-26.zip,2,NCP,3998,5495,41,1,[5495]
CP-25.zip,1,CP,711,3073,112,1,[3073]
CP-24.zip,1,CP,699,3061,64,1,[3061]
CP-4.zip,1,CP,1173,3391,201,1,[3391]
CP-27.zip,1,CP,3740,5684,23,1,[5684]
CP-16.zip,1,CP,1590,4278,20,1,[4278]
Normal-2.zip,0,Normal,1762,1130,70,2,"[1130, 1131]"
Normal-1.zip,0,Normal,1679,833,66,6,"[833, 834, 835, 836, 837, 838]"
NCP-29.zip,2,NCP,928,2470,25,1,[2470]
CP-18.zip,1,CP,1775,3533,57,4,"[3530, 3531, 3532, 3533]"
Normal-3.zip,0,Normal,766,201,94,1,[201]
Normal-11.zip,0,Normal,1964,419,100,1,[419]
NCP-9.zip,2,NCP,2690,2657,48,1,[2657]
NCP-21.zip,2,NCP,78,1290,69,2,"[1289, 1290]"
Normal-16.zip,0,Normal,2147,602,95,1,[602]
NCP-19.zip,2,NCP,544,2246,62,2,"[2245, 2246]"
Normal-27.zip,0,Normal,3900,5432,64,2,"[5431, 5432]"
Normal-8.zip,0,Normal,1860,315,92,1,[315]
CP-21.zip,1,CP,601,2963,104,1,[2963]
CP-2.zip,1,CP,11,3164,287,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
CP-15.zip,1,CP,1563,4245,241,3,"[4245, 4246, 4247]"
CP-19.zip,1,CP,1789,3205,59,4,"[3204, 3205, 3206, 3207]"
CP-4.zip,1,CP,1176,3394,161,1,[3394]
CP-10.zip,1,CP,1397,3855,60,2,"[3854, 3855]"
CP-16.zip,1,CP,1594,4282,26,1,[4282]
CP-1.zip,1,CP,1077,3121,74,2,"[3121, 3122]"
CP-29.zip,1,CP,3819,5763,31,1,[5763]
CP-12.zip,1,CP,1468,4016,54,3,"[4015, 4016, 4017]"
CP-3.zip,1,CP,1139,3357,332,1,[3357]
Normal-14.zip,0,Normal,2070,525,104,1,[525]
Normal-1.zip,0,Normal,1672,798,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
CP-11.zip,1,CP,1435,3939,46,2,"[3938, 3939]"
CP-30.zip,1,CP,4019,5568,38,1,[5568]
CP-18.zip,1,CP,1777,3540,67,5,"[3540, 3541, 3542, 3543, 3544]"
CP-23.zip,1,CP,666,3028,192,1,[3028]
Normal-1.zip,0,Normal,1703,959,70,2,"[959, 960]"
CP-3.zip,1,CP,1133,3351,213,1,[3351]
CP-13.zip,1,CP,1504,4107,64,1,[4107]
Normal-3.zip,0,Normal,745,180,105,1,[180]
Normal-26.zip,0,Normal,3869,5381,27,1,[5381]
CP-18.zip,1,CP,1774,3528,58,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
Normal-21.zip,0,Normal,2301,756,88,1,[756]
CP-18.zip,1,CP,1771,3519,51,4,"[3518, 3519, 3520, 3521]"
CP-22.zip,1,CP,643,3005,126,1,[3005]
CP-26.zip,1,CP,3723,5658,43,1,[5658]
Normal-8.zip,0,Normal,1884,339,82,1,[339]
CP-15.zip,1,CP,1586,4274,23,1,[4274]
CP-8.zip,1,CP,1349,3743,58,3,"[3742, 3743, 3744]"
Normal-22.zip,0,Normal,2586,96,30,1,[96]
Normal-4.zip,0,Normal,785,220,292,1,[220]
CP-19.zip,1,CP,2428,2887,124,1,[2887]
NCP-13.zip,2,NCP,352,1856,58,2,"[1855, 1856]"
NCP-2.zip,2,NCP,109,1355,143,2,"[1355, 1356]"
CP-13.zip,1,CP,1493,4080,125,3,"[4080, 4081, 4082]"
CP-4.zip,1,CP,1191,3409,220,1,[3409]
CP-17.zip,1,CP,1642,4330,25,1,[4330]
CP-7.zip,1,CP,1304,3635,232,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-27.zip,2,NCP,1058,2635,46,1,[2635]
Normal-14.zip,0,Normal,2071,526,103,1,[526]
CP-26.zip,1,CP,3719,5650,55,3,"[5649, 5650, 5651]"
Normal-24.zip,0,Normal,2663,173,48,1,[173]
NCP-3.zip,2,NCP,1298,2739,60,1,[2739]
CP-19.zip,1,CP,2430,2891,102,2,"[2891, 2892]"
CP-12.zip,1,CP,1458,3993,69,3,"[3992, 3993, 3994]"
Normal-1.zip,0,Normal,1677,823,64,4,"[823, 824, 825, 826]"
CP-12.zip,1,CP,1469,4018,47,2,"[4018, 4019]"
CP-7.zip,1,CP,1268,3486,336,1,[3486]
Normal-18.zip,0,Normal,2203,658,75,1,[658]
CP-21.zip,1,CP,593,2955,100,1,[2955]
Normal-16.zip,0,Normal,2143,598,87,1,[598]
NCP-20.zip,2,NCP,552,2261,146,2,"[2261, 2262]"
NCP-11.zip,2,NCP,309,1766,69,2,"[1766, 1765]"
NCP-19.zip,2,NCP,520,2197,55,2,"[2196, 2197]"
CP-14.zip,1,CP,1550,4217,64,2,"[4217, 4218]"
NCP-26.zip,2,NCP,3976,5484,32,1,[5484]
NCP-31.zip,2,NCP,998,2555,44,1,[2555]
NCP-2.zip,2,NCP,107,1351,146,2,"[1351, 1352]"
Normal-16.zip,0,Normal,2136,591,83,1,[591]
CP-12.zip,1,CP,1463,4006,49,2,"[4005, 4006]"
NCP-4.zip,2,NCP,156,1457,58,2,"[1456, 1457]"
NCP-1.zip,2,NCP,1002,2561,58,1,[2561]
Normal-1.zip,0,Normal,1672,801,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
Normal-14.zip,0,Normal,2078,533,73,1,[533]
NCP-5.zip,2,NCP,185,1514,121,2,"[1514, 1515]"
CP-14.zip,1,CP,1530,4168,60,1,[4168]
NCP-15.zip,2,NCP,413,1976,128,4,"[1975, 1976, 1977, 1979]"
CP-5.zip,1,CP,1224,3442,204,1,[3442]
CP-5.zip,1,CP,1215,3433,165,1,[3433]
Normal-26.zip,0,Normal,3886,5399,76,1,[5399]
Normal-24.zip,0,Normal,2640,150,41,1,[150]
NCP-28.zip,2,NCP,836,2351,52,1,[2351]
NCP-4.zip,2,NCP,146,1436,123,2,"[1436, 1437]"
Normal-17.zip,0,Normal,2155,610,89,1,[610]
CP-30.zip,1,CP,3939,5547,38,1,[5547]
CP-19.zip,1,CP,1784,3590,112,4,"[3590, 3591, 3592, 3593]"
CP-10.zip,1,CP,1399,3859,45,2,"[3858, 3859]"
NCP-19.zip,2,NCP,519,2194,126,2,"[2194, 2195]"
NCP-11.zip,2,NCP,297,1739,144,2,"[1739, 1741]"
NCP-22.zip,2,NCP,88,1309,170,2,"[1309, 1310]"
CP-18.zip,1,CP,1778,3547,65,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
NCP-30.zip,2,NCP,968,2511,61,1,[2511]
CP-9.zip,1,CP,1360,3769,67,3,"[3767, 3768, 3769]"
CP-26.zip,1,CP,3638,5597,285,1,[5597]
NCP-13.zip,2,NCP,353,1857,167,2,"[1857, 1858]"
CP-30.zip,1,CP,3932,5634,71,2,"[5634, 5635]"
NCP-21.zip,2,NCP,62,1257,144,2,"[1257, 1258]"
CP-2.zip,1,CP,1127,3345,278,1,[3345]
NCP-12.zip,2,NCP,337,1823,58,2,"[1822, 1823]"
NCP-14.zip,2,NCP,390,1931,53,2,"[1930, 1931]"
NCP-15.zip,2,NCP,417,1988,58,2,"[1987, 1988]"
CP-24.zip,1,CP,689,3051,58,1,[3051]
CP-9.zip,1,CP,1377,3808,58,2,"[3808, 3809]"
CP-13.zip,1,CP,1505,4110,54,3,"[4108, 4109, 4110]"
CP-13.zip,1,CP,1492,4078,58,3,"[4077, 4078, 4079]"
NCP-4.zip,2,NCP,159,1463,61,2,"[1462, 1463]"
NCP-6.zip,2,NCP,220,1585,67,2,"[1584, 1585]"
NCP-29.zip,2,NCP,884,2421,23,1,[2421]
Normal-3.zip,0,Normal,757,192,110,1,[192]
CP-21.zip,1,CP,4,3505,298,4,"[3505, 3506, 3507, 3508]"
CP-16.zip,1,CP,1608,4296,23,1,[4296]
CP-4.zip,1,CP,1169,3387,171,1,[3387]
Normal-4.zip,0,Normal,797,232,112,1,[232]
NCP-19.zip,2,NCP,540,2238,54,2,"[2237, 2238]"
Normal-14.zip,0,Normal,2068,523,81,1,[523]
Normal-11.zip,0,Normal,1985,440,96,1,[440]
CP-9.zip,1,CP,1353,3748,140,3,"[3748, 3749, 3750]"
NCP-6.zip,2,NCP,224,1592,136,2,"[1592, 1593]"
CP-10.zip,1,CP,1397,3854,60,2,"[3854, 3855]"
NCP-12.zip,2,NCP,318,1784,63,2,"[1783, 1784]"
NCP-21.zip,2,NCP,59,1251,122,2,"[1251, 1252]"
Normal-17.zip,0,Normal,2184,639,86,1,[639]
NCP-18.zip,2,NCP,493,2143,56,2,"[2142, 2143]"
NCP-25.zip,2,NCP,3954,5467,42,1,[5467]
Normal-2.zip,0,Normal,1763,1137,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
CP-23.zip,1,CP,675,3037,124,1,[3037]
CP-9.zip,1,CP,1365,3780,60,3,"[3779, 3780, 3781]"
CP-6.zip,1,CP,1256,3474,140,1,[3474]
NCP-16.zip,2,NCP,441,2037,49,2,"[2036, 2037]"
NCP-7.zip,2,NCP,2484,2643,46,1,[2643]
CP-20.zip,1,CP,2771,3302,37,1,[3302]
NCP-10.zip,2,NCP,2714,2707,53,1,[2707]
Normal-4.zip,0,Normal,772,207,363,1,[207]
NCP-16.zip,2,NCP,440,2035,53,2,"[2034, 2035]"
CP-17.zip,1,CP,1646,4334,26,1,[4334]
NCP-11.zip,2,NCP,284,1713,139,2,"[1713, 1714]"
CP-23.zip,1,CP,656,3018,575,1,[3018]
CP-2.zip,1,CP,1104,3322,164,1,[3322]
NCP-22.zip,2,NCP,85,1303,139,2,"[1303, 1304]"
CP-30.zip,1,CP,3933,5637,38,2,"[5636, 5637]"
Normal-7.zip,0,Normal,1839,294,94,1,[294]
NCP-6.zip,2,NCP,223,1590,132,2,"[1590, 1591]"
CP-2.zip,1,CP,1119,3337,157,1,[3337]
CP-11.zip,1,CP,1431,3931,61,2,"[3930, 3931]"
CP-7.zip,1,CP,1304,3634,47,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-11.zip,2,NCP,299,1745,58,2,"[1744, 1745]"
NCP-15.zip,2,NCP,405,1960,60,2,"[1959, 1960]"
NCP-20.zip,2,NCP,574,2307,58,2,"[2306, 2307]"
CP-10.zip,1,CP,1412,3887,66,2,"[3887, 3888]"
NCP-4.zip,2,NCP,167,1479,60,2,"[1478, 1479]"
NCP-4.zip,2,NCP,157,1459,49,2,"[1458, 1459]"
NCP-13.zip,2,NCP,349,1849,135,2,"[1849, 1850]"
CP-18.zip,1,CP,1771,3520,51,4,"[3518, 3519, 3520, 3521]"
NCP-14.zip,2,NCP,372,1895,109,2,"[1895, 1896]"
NCP-18.zip,2,NCP,503,2162,146,2,"[2162, 2163]"
NCP-6.zip,2,NCP,199,1543,58,2,"[1542, 1543]"
CP-18.zip,1,CP,1662,4350,19,1,[4350]
CP-9.zip,1,CP,1377,3809,57,2,"[3808, 3809]"
Normal-1.zip,0,Normal,1727,1009,63,4,"[1009, 1010, 1011, 1012]"
NCP-20.zip,2,NCP,566,2290,160,2,"[2290, 2291]"
CP-29.zip,1,CP,3821,5765,29,1,[5765]
NCP-5.zip,2,NCP,190,1525,64,2,"[1524, 1525]"
Normal-2.zip,0,Normal,1746,1064,68,2,"[1063, 1064]"
CP-27.zip,1,CP,3744,5688,17,1,[5688]
CP-2.zip,1,CP,1111,3329,204,1,[3329]
Normal-10.zip,0,Normal,1948,403,98,1,[403]
NCP-12.zip,2,NCP,338,1824,150,2,"[1824, 1825]"
NCP-13.zip,2,NCP,348,1847,112,2,"[1847, 1848]"
CP-24.zip,1,CP,700,3062,86,1,[3062]
CP-18.zip,1,CP,1655,4343,23,1,[4343]
CP-27.zip,1,CP,3736,5680,16,1,[5680]
Normal-24.zip,0,Normal,2654,164,31,1,[164]
NCP-13.zip,2,NCP,359,1869,145,2,"[1869, 1870]"
NCP-16.zip,2,NCP,437,2027,142,2,"[2027, 2028]"
CP-27.zip,1,CP,3741,5685,17,1,[5685]
CP-24.zip,1,CP,693,3055,273,1,[3055]
CP-24.zip,1,CP,682,3044,149,1,[3044]
Normal-17.zip,0,Normal,2175,630,80,1,[630]
NCP-6.zip,2,NCP,223,1591,56,2,"[1590, 1591]"
NCP-2.zip,2,NCP,1051,2626,178,2,"[2625, 2626]"
CP-11.zip,1,CP,1454,3982,125,3,"[3982, 3983, 3984]"
Normal-20.zip,0,Normal,2253,708,70,1,[708]
Normal-20.zip,0,Normal,2252,707,84,1,[707]
Normal-21.zip,0,Normal,2308,763,85,1,[763]
NCP-18.zip,2,NCP,516,2189,57,2,"[2188, 2189]"
NCP-12.zip,2,NCP,313,1774,62,2,"[1773, 1774]"
CP-2.zip,1,CP,1126,3344,204,1,[3344]
Normal-20.zip,0,Normal,2257,712,83,1,[712]
NCP-6.zip,2,NCP,203,1551,59,2,"[1550, 1551]"
CP-13.zip,1,CP,1503,4106,64,3,"[4104, 4105, 4106]"
Normal-20.zip,0,Normal,2280,735,82,1,[735]
CP-19.zip,1,CP,2443,2915,112,3,"[2915, 2916, 2917]"
CP-20.zip,1,CP,2451,2930,136,1,[2930]
CP-1.zip,1,CP,1093,3311,173,1,[3311]
CP-13.zip,1,CP,1518,4138,160,3,"[4138, 4139, 4140]"
CP-20.zip,1,CP,2773,3304,30,1,[3304]
NCP-15.zip,2,NCP,414,1981,51,2,"[1980, 1981]"
NCP-23.zip,2,NCP,96,1328,145,2,"[1328, 1329]"
CP-11.zip,1,CP,1422,3909,59,3,"[3908, 3909, 3910]"
Normal-20.zip,0,Normal,2258,713,74,1,[713]
NCP-29.zip,2,NCP,882,2417,52,2,"[2417, 2418]"
Normal-2.zip,0,Normal,1737,1038,79,4,"[1037, 1038, 1039, 1040]"
Normal-13.zip,0,Normal,2025,480,101,1,[480]
NCP-5.zip,2,NCP,173,1490,139,2,"[1490, 1491]"
CP-6.zip,1,CP,1257,3475,155,1,[3475]
NCP-23.zip,2,NCP,952,2495,379,1,[2495]
Normal-1.zip,0,Normal,1700,954,64,2,"[953, 954]"
NCP-17.zip,2,NCP,465,2085,31,3,"[2084, 2085, 2086]"
Normal-16.zip,0,Normal,2122,577,85,1,[577]
CP-13.zip,1,CP,1502,4102,73,2,"[4102, 4103]"
Normal-17.zip,0,Normal,2153,608,82,1,[608]
Normal-24.zip,0,Normal,2650,160,40,1,[160]
NCP-27.zip,2,NCP,1031,2602,231,2,"[2601, 2602]"
NCP-14.zip,2,NCP,393,1937,62,2,"[1936, 1937]"
CP-5.zip,1,CP,12,3169,233,2,"[3168, 3169]"
Normal-11.zip,0,Normal,1986,441,88,1,[441]
CP-19.zip,1,CP,2433,2897,108,1,[2897]
NCP-4.zip,2,NCP,151,1447,54,2,"[1446, 1447]"
NCP-13.zip,2,NCP,370,1891,128,2,"[1891, 1892]"
Normal-17.zip,0,Normal,2168,623,89,1,[623]
NCP-29.zip,2,NCP,880,2415,312,1,[2415]
NCP-12.zip,2,NCP,338,1825,63,2,"[1824, 1825]"
Normal-23.zip,0,Normal,2634,144,37,1,[144]
NCP-14.zip,2,NCP,396,1942,170,2,"[1942, 1943]"
NCP-16.zip,2,NCP,439,2032,162,2,"[2032, 2033]"
NCP-8.zip,2,NCP,266,1678,137,2,"[1678, 1679]"
CP-11.zip,1,CP,1423,3911,204,3,"[3911, 3912, 3913]"
CP-11.zip,1,CP,1454,3984,53,3,"[3982, 3983, 3984]"
CP-28.zip,1,CP,3792,5736,20,1,[5736]
Normal-1.zip,0,Normal,1727,1011,66,4,"[1009, 1010, 1011, 1012]"
Normal-19.zip,0,Normal,2234,689,89,1,[689]
NCP-13.zip,2,NCP,35,1203,58,2,"[1202, 1203]"
NCP-18.zip,2,NCP,51,1236,59,2,"[1235, 1236]"
NCP-2.zip,2,NCP,113,1368,58,2,"[1367, 1368]"
Normal-2.zip,0,Normal,1757,1107,68,4,"[1105, 1106, 1107, 1108]"
NCP-12.zip,2,NCP,319,1785,158,2,"[1785, 1787]"
Normal-22.zip,0,Normal,2322,777,88,1,[777]
CP-21.zip,1,CP,584,2946,116,1,[2946]
CP-9.zip,1,CP,1365,3781,60,3,"[3779, 3780, 3781]"
NCP-12.zip,2,NCP,322,1792,120,2,"[1792, 1793]"
Normal-2.zip,0,Normal,1763,1140,75,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-21.zip,2,NCP,59,1252,52,2,"[1251, 1252]"
NCP-5.zip,2,NCP,170,1485,59,2,"[1484, 1485]"
NCP-21.zip,2,NCP,72,1276,129,2,"[1276, 1277]"
NCP-22.zip,2,NCP,887,2425,38,1,[2425]
CP-2.zip,1,CP,1117,3335,155,1,[3335]
Normal-2.zip,0,Normal,1763,1134,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
CP-18.zip,1,CP,1778,3550,64,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
CP-23.zip,1,CP,664,3026,78,1,[3026]
CP-23.zip,1,CP,668,3030,102,1,[3030]
NCP-13.zip,2,NCP,355,1862,53,2,"[1861, 1862]"
NCP-13.zip,2,NCP,358,1867,160,2,"[1867, 1868]"
CP-14.zip,1,CP,1550,4218,64,2,"[4217, 4218]"
CP-26.zip,1,CP,3729,5667,207,3,"[5665, 5666, 5667]"
CP-21.zip,1,CP,603,2965,88,1,[2965]
NCP-13.zip,2,NCP,370,1892,54,2,"[1891, 1892]"
NCP-13.zip,2,NCP,35,1202,139,2,"[1202, 1203]"
CP-3.zip,1,CP,1155,3373,171,1,[3373]
Normal-10.zip,0,Normal,1927,382,99,1,[382]
CP-15.zip,1,CP,1574,4262,26,1,[4262]
CP-13.zip,1,CP,1498,4096,60,2,"[4095, 4096]"
NCP-6.zip,2,NCP,205,1555,53,2,"[1554, 1555]"
NCP-11.zip,2,NCP,301,1748,147,2,"[1748, 1749]"
NCP-11.zip,2,NCP,303,1752,139,2,"[1752, 1753]"
CP-12.zip,1,CP,1468,4017,54,3,"[4015, 4016, 4017]"
Normal-14.zip,0,Normal,2081,536,93,1,[536]
Normal-2.zip,0,Normal,1763,1141,75,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-22.zip,2,NCP,859,2380,299,2,"[2380, 2381]"
Normal-26.zip,0,Normal,3885,5398,63,1,[5398]
CP-13.zip,1,CP,1505,4109,54,3,"[4108, 4109, 4110]"
NCP-1.zip,2,NCP,103,1343,150,2,"[1343, 1344]"
NCP-14.zip,2,NCP,396,1943,71,2,"[1942, 1943]"
NCP-22.zip,2,NCP,871,2402,293,2,"[2401, 2402]"
Normal-10.zip,0,Normal,1951,406,105,1,[406]
CP-11.zip,1,CP,1434,3936,63,2,"[3936, 3937]"
CP-26.zip,1,CP,3724,5659,51,1,[5659]
CP-12.zip,1,CP,1471,4022,56,2,"[4022, 4023]"
Normal-21.zip,0,Normal,2304,759,110,1,[759]
CP-28.zip,1,CP,3777,5721,26,1,[5721]
NCP-28.zip,2,NCP,837,2352,57,1,[2352]
Normal-2.zip,0,Normal,1763,1133,72,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
Normal-8.zip,0,Normal,1873,328,104,1,[328]
CP-12.zip,1,CP,1458,3992,165,3,"[3992, 3993, 3994]"
NCP-7.zip,2,NCP,230,1604,139,2,"[1604, 1605]"
CP-30.zip,1,CP,4042,5591,37,1,[5591]
Normal-4.zip,0,Normal,774,209,134,1,[209]
Normal-19.zip,0,Normal,2228,683,85,1,[683]
Normal-18.zip,0,Normal,2206,661,77,1,[661]
CP-17.zip,1,CP,1628,4316,23,1,[4316]
Normal-11.zip,0,Normal,1969,424,90,1,[424]
Normal-20.zip,0,Normal,2259,714,97,1,[714]
CP-17.zip,1,CP,1640,4328,25,1,[4328]
NCP-8.zip,2,NCP,254,1654,139,2,"[1654, 1655]"
Normal-16.zip,0,Normal,2140,595,88,1,[595]
CP-6.zip,1,CP,1249,3467,144,1,[3467]
NCP-23.zip,2,NCP,92,1321,37,2,"[1320, 1321]"
CP-18.zip,1,CP,1657,4345,24,1,[4345]
NCP-17.zip,2,NCP,484,2124,58,2,"[2123, 2124]"
Normal-2.zip,0,Normal,1743,1057,73,2,"[1056, 1057]"
CP-18.zip,1,CP,1778,3545,66,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
NCP-30.zip,2,NCP,966,2509,279,1,[2509]
CP-9.zip,1,CP,1376,3807,60,2,"[3806, 3807]"
Normal-1.zip,0,Normal,1716,987,71,2,"[987, 988]"
CP-7.zip,1,CP,1302,3602,42,4,"[3602, 3603, 3604, 3605]"
NCP-18.zip,2,NCP,50,1233,141,2,"[1233, 1234]"
CP-32.zip,1,CP,1781,3572,65,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-5.zip,2,NCP,192,1528,135,2,"[1528, 1529]"
NCP-7.zip,2,NCP,2489,2646,40,1,[2646]
CP-11.zip,1,CP,1434,3937,63,2,"[3936, 3937]"
CP-23.zip,1,CP,645,3007,124,1,[3007]
Normal-10.zip,0,Normal,1941,396,91,1,[396]
Normal-12.zip,0,Normal,2001,456,86,1,[456]
Normal-3.zip,0,Normal,761,196,120,1,[196]
CP-7.zip,1,CP,1265,3483,166,1,[3483]
NCP-3.zip,2,NCP,1287,2728,66,1,[2728]
NCP-28.zip,2,NCP,835,2350,52,2,"[2349, 2350]"
NCP-19.zip,2,NCP,543,2243,128,2,"[2243, 2244]"
CP-21.zip,1,CP,4,3507,259,4,"[3505, 3506, 3507, 3508]"
CP-17.zip,1,CP,1633,4321,26,1,[4321]
NCP-20.zip,2,NCP,565,2289,57,2,"[2288, 2289]"
NCP-22.zip,2,NCP,878,2412,46,2,"[2412, 2413]"
CP-14.zip,1,CP,1520,4144,57,3,"[4143, 4144, 4145]"
Normal-23.zip,0,Normal,2620,130,36,1,[130]
NCP-23.zip,2,NCP,958,2501,133,1,[2501]
CP-13.zip,1,CP,1513,4128,60,2,"[4127, 4128]"
NCP-24.zip,2,NCP,98,1332,139,2,"[1332, 1333]"
CP-9.zip,1,CP,1375,3804,60,2,"[3804, 3805]"
NCP-2.zip,2,NCP,1051,2625,88,2,"[2625, 2626]"
NCP-31.zip,2,NCP,999,2556,41,1,[2556]
CP-18.zip,1,CP,1781,3575,78,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-10.zip,2,NCP,278,1703,57,2,"[1702, 1703]"
NCP-12.zip,2,NCP,313,1773,147,2,"[1773, 1774]"
NCP-14.zip,2,NCP,381,1915,60,2,"[1914, 1915]"
NCP-11.zip,2,NCP,295,1735,236,2,"[1735, 1736]"
CP-11.zip,1,CP,1440,3948,196,3,"[3948, 3949, 3950]"
CP-19.zip,1,CP,1795,3597,41,2,"[3596, 3597]"
CP-12.zip,1,CP,1467,4013,60,2,"[4013, 4014]"
NCP-12.zip,2,NCP,322,1793,51,2,"[1792, 1793]"
CP-9.zip,1,CP,1353,3750,59,3,"[3748, 3749, 3750]"
CP-19.zip,1,CP,1784,3591,50,4,"[3590, 3591, 3592, 3593]"
NCP-9.zip,2,NCP,2699,2665,51,1,[2665]
NCP-12.zip,2,NCP,331,1810,158,2,"[1810, 1811]"
NCP-12.zip,2,NCP,334,1817,59,2,"[1816, 1817]"
NCP-1.zip,2,NCP,1009,2571,29,2,"[2570, 2571]"
CP-30.zip,1,CP,4041,5590,31,1,[5590]
CP-24.zip,1,CP,705,3067,168,1,[3067]
Normal-24.zip,0,Normal,2665,175,33,1,[175]
NCP-12.zip,2,NCP,332,1813,70,2,"[1812, 1813]"
CP-11.zip,1,CP,1444,3962,58,3,"[3960, 3961, 3962]"
CP-22.zip,1,CP,614,2976,100,1,[2976]
Normal-23.zip,0,Normal,2630,140,38,1,[140]
Normal-8.zip,0,Normal,1876,331,97,1,[331]
NCP-1.zip,2,NCP,1001,2559,141,1,[2559]
NCP-22.zip,2,NCP,845,2361,148,4,"[2360, 2361, 2362, 2363]"
CP-26.zip,1,CP,3646,5606,36,1,[5606]
Normal-9.zip,0,Normal,1907,362,92,1,[362]
Normal-1.zip,0,Normal,1672,800,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
NCP-12.zip,2,NCP,333,1815,68,2,"[1814, 1815]"
CP-17.zip,1,CP,1634,4322,23,1,[4322]
Normal-12.zip,0,Normal,2009,464,93,1,[464]
CP-26.zip,1,CP,3731,5670,215,1,[5670]
Normal-25.zip,0,Normal,3714,5344,22,1,[5344]
Normal-19.zip,0,Normal,2231,686,85,1,[686]
NCP-23.zip,2,NCP,940,2483,22,1,[2483]
Normal-25.zip,0,Normal,3851,5363,201,1,[5363]
NCP-6.zip,2,NCP,209,1562,139,2,"[1562, 1563]"
NCP-13.zip,2,NCP,347,1846,53,2,"[1845, 1846]"
NCP-11.zip,2,NCP,312,1772,62,2,"[1771, 1772]"
CP-5.zip,1,CP,1196,3414,186,1,[3414]
NCP-21.zip,2,NCP,74,1282,54,2,"[1281, 1282]"
CP-23.zip,1,CP,662,3024,114,1,[3024]
NCP-7.zip,2,NCP,23,1177,151,2,"[1177, 1178]"
CP-16.zip,1,CP,1591,4279,23,1,[4279]
Normal-12.zip,0,Normal,1995,450,95,1,[450]
Normal-20.zip,0,Normal,2264,719,82,1,[719]
NCP-30.zip,2,NCP,948,2491,365,1,[2491]
Normal-12.zip,0,Normal,1998,453,99,1,[453]
NCP-19.zip,2,NCP,522,2201,58,2,"[2200, 2201]"
CP-13.zip,1,CP,1510,4121,60,2,"[4121, 4122]"
NCP-15.zip,2,NCP,406,1962,61,2,"[1961, 1962]"
NCP-4.zip,2,NCP,162,1468,148,2,"[1468, 1469]"
CP-11.zip,1,CP,1431,3930,61,2,"[3930, 3931]"
CP-15.zip,1,CP,1569,4257,20,1,[4257]
CP-9.zip,1,CP,1379,3813,52,2,"[3812, 3813]"
NCP-30.zip,2,NCP,981,2525,40,2,"[2525, 2526]"
NCP-8.zip,2,NCP,2679,2650,42,1,[2650]
NCP-25.zip,2,NCP,3951,5465,43,1,[5465]
NCP-7.zip,2,NCP,2460,2684,36,1,[2684]
CP-25.zip,1,CP,734,3096,106,1,[3096]
NCP-6.zip,2,NCP,209,1563,58,2,"[1562, 1563]"
Normal-22.zip,0,Normal,2593,103,38,1,[103]
NCP-16.zip,2,NCP,438,2029,149,2,"[2029, 2030]"
CP-7.zip,1,CP,1304,3638,43,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
Normal-8.zip,0,Normal,1885,340,101,1,[340]
NCP-17.zip,2,NCP,484,2123,137,2,"[2123, 2124]"
NCP-20.zip,2,NCP,565,2288,135,2,"[2288, 2289]"
NCP-5.zip,2,NCP,185,1515,51,2,"[1514, 1515]"
NCP-29.zip,2,NCP,877,2411,65,1,[2411]
NCP-6.zip,2,NCP,216,1577,58,2,"[1576, 1577]"
Normal-24.zip,0,Normal,2658,168,37,1,[168]
CP-28.zip,1,CP,3779,5723,26,1,[5723]
Normal-15.zip,0,Normal,2090,545,83,1,[545]
Normal-2.zip,0,Normal,1750,1077,69,3,"[1074, 1077, 1078]"
NCP-24.zip,2,NCP,98,1333,58,2,"[1332, 1333]"
CP-5.zip,1,CP,1199,3417,180,1,[3417]
CP-3.zip,1,CP,1146,3364,161,1,[3364]
CP-11.zip,1,CP,1449,3971,50,2,"[3971, 3972]"
Normal-3.zip,0,Normal,1767,1154,66,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-22.zip,0,Normal,2585,95,41,1,[95]
CP-29.zip,1,CP,3816,5760,29,1,[5760]
NCP-21.zip,2,NCP,62,1258,60,2,"[1257, 1258]"
NCP-2.zip,2,NCP,1056,2632,473,1,[2632]
NCP-19.zip,2,NCP,525,2206,144,2,"[2206, 2207]"
Normal-22.zip,0,Normal,2600,110,41,1,[110]
CP-3.zip,1,CP,1161,3379,310,1,[3379]
NCP-12.zip,2,NCP,316,1779,139,2,"[1779, 1780]"
NCP-28.zip,2,NCP,868,2396,200,2,"[2395, 2396]"
CP-7.zip,1,CP,1301,3600,52,4,"[3598, 3599, 3600, 3601]"
NCP-11.zip,2,NCP,301,1749,62,2,"[1748, 1749]"
Normal-9.zip,0,Normal,1917,372,96,1,[372]
NCP-20.zip,2,NCP,571,2300,163,2,"[2300, 2301]"
Normal-3.zip,0,Normal,1767,1152,68,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-1.zip,0,Normal,1716,988,71,2,"[987, 988]"
NCP-28.zip,2,NCP,842,2357,42,1,[2357]
NCP-27.zip,2,NCP,309,1765,162,2,"[1766, 1765]"
CP-12.zip,1,CP,1479,4040,60,3,"[4039, 4040, 4041]"
NCP-6.zip,2,NCP,22,1175,163,2,"[1175, 1176]"
NCP-28.zip,2,NCP,868,2395,51,2,"[2395, 2396]"
CP-14.zip,1,CP,1532,4171,50,2,"[4171, 4172]"
Normal-11.zip,0,Normal,1984,439,86,1,[439]
Normal-24.zip,0,Normal,2643,153,39,1,[153]
CP-20.zip,1,CP,2765,3296,42,1,[3296]
Normal-2.zip,0,Normal,1763,1132,72,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-2.zip,2,NCP,109,1356,60,2,"[1355, 1356]"
NCP-7.zip,2,NCP,241,1628,55,2,"[1627, 1628]"
Normal-22.zip,0,Normal,2587,97,44,1,[97]
CP-20.zip,1,CP,2753,3284,37,1,[3284]
Normal-1.zip,0,Normal,1670,790,63,6,"[787, 788, 789, 790, 791, 792]"
Normal-15.zip,0,Normal,2103,558,88,1,[558]
CP-13.zip,1,CP,1503,4104,64,3,"[4104, 4105, 4106]"
Normal-21.zip,0,Normal,2313,768,94,1,[768]
CP-9.zip,1,CP,1382,3818,200,3,"[3818, 3819, 3820]"
Normal-2.zip,0,Normal,1756,1102,64,4,"[1101, 1102, 1103, 1104]"
NCP-12.zip,2,NCP,334,1816,140,2,"[1816, 1817]"
CP-13.zip,1,CP,1518,4140,67,3,"[4138, 4139, 4140]"
CP-13.zip,1,CP,1492,4077,139,3,"[4077, 4078, 4079]"
Normal-11.zip,0,Normal,1982,437,99,1,[437]
NCP-6.zip,2,NCP,213,1570,159,2,"[1570, 1571]"
CP-18.zip,1,CP,1779,3551,59,2,"[3551, 3552]"
NCP-12.zip,2,NCP,321,1790,122,2,"[1790, 1791]"
NCP-4.zip,2,NCP,159,1462,144,2,"[1462, 1463]"
CP-24.zip,1,CP,684,3046,161,1,[3046]
CP-29.zip,1,CP,3828,5772,26,1,[5772]
CP-12.zip,1,CP,1462,4004,51,3,"[4002, 4003, 4004]"
Normal-1.zip,0,Normal,1707,969,65,2,"[969, 970]"
CP-24.zip,1,CP,685,3047,168,1,[3047]
NCP-16.zip,2,NCP,444,2043,61,2,"[2042, 2043]"
CP-19.zip,1,CP,2430,2892,106,2,"[2891, 2892]"
Normal-25.zip,0,Normal,3857,5369,222,1,[5369]
CP-28.zip,1,CP,3774,5718,20,1,[5718]
CP-21.zip,1,CP,591,2953,124,1,[2953]
Normal-1.zip,0,Normal,1670,792,66,6,"[787, 788, 789, 790, 791, 792]"
NCP-14.zip,2,NCP,387,1925,54,2,"[1924, 1925]"
CP-10.zip,1,CP,14,3515,115,1,[3515]
NCP-4.zip,2,NCP,16,1164,113,2,"[1164, 1165]"
Normal-17.zip,0,Normal,2162,617,96,1,[617]
CP-13.zip,1,CP,1513,4127,60,2,"[4127, 4128]"
NCP-11.zip,2,NCP,300,1746,139,2,"[1746, 1747]"
NCP-21.zip,2,NCP,577,2312,61,2,"[2311, 2312]"
Normal-8.zip,0,Normal,1875,330,93,1,[330]
Normal-27.zip,0,Normal,3906,5439,62,1,[5439]
NCP-7.zip,2,NCP,249,1645,58,2,"[1644, 1645]"
NCP-20.zip,2,NCP,552,2262,61,2,"[2261, 2262]"
NCP-9.zip,2,NCP,2701,2667,56,1,[2667]
NCP-15.zip,2,NCP,417,1987,139,2,"[1987, 1988]"
NCP-9.zip,2,NCP,2705,2671,56,1,[2671]
Normal-3.zip,0,Normal,1767,1160,71,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-15.zip,1,CP,1585,4273,23,1,[4273]
CP-27.zip,1,CP,3742,5686,17,1,[5686]
CP-14.zip,1,CP,1521,4146,57,2,"[4146, 4147]"
Normal-1.zip,0,Normal,1703,960,70,2,"[959, 960]"
CP-21.zip,1,CP,6,3510,36,1,[3510]
NCP-19.zip,2,NCP,54,1242,62,2,"[1241, 1242]"
NCP-5.zip,2,NCP,17,1166,143,2,"[1166, 1167]"
NCP-15.zip,2,NCP,413,1977,47,4,"[1975, 1976, 1977, 1979]"
NCP-22.zip,2,NCP,845,2360,53,4,"[2360, 2361, 2362, 2363]"
NCP-2.zip,2,NCP,120,1381,139,2,"[1381, 1382]"
CP-5.zip,1,CP,1207,3425,189,1,[3425]
CP-27.zip,1,CP,3758,5702,23,1,[5702]
CP-16.zip,1,CP,1592,4280,25,1,[4280]
CP-21.zip,1,CP,4,3506,275,4,"[3505, 3506, 3507, 3508]"
NCP-21.zip,2,NCP,72,1277,55,2,"[1276, 1277]"
NCP-17.zip,2,NCP,475,2105,156,2,"[2105, 2106]"
NCP-13.zip,2,NCP,358,1868,67,2,"[1867, 1868]"
Normal-3.zip,0,Normal,764,199,130,1,[199]
CP-9.zip,1,CP,1358,3763,63,3,"[3761, 3762, 3763]"
NCP-4.zip,2,NCP,169,1483,56,2,"[1482, 1483]"
Normal-1.zip,0,Normal,1707,970,65,2,"[969, 970]"
NCP-18.zip,2,NCP,502,2160,140,2,"[2160, 2161]"
CP-18.zip,1,CP,1781,3568,67,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-10.zip,2,NCP,2727,2683,44,1,[2683]
CP-26.zip,1,CP,3719,5651,277,3,"[5649, 5650, 5651]"
CP-11.zip,1,CP,1422,3910,58,3,"[3908, 3909, 3910]"
NCP-4.zip,2,NCP,168,1480,139,2,"[1480, 1481]"
CP-8.zip,1,CP,1329,3695,89,3,"[3695, 3696, 3697]"
CP-12.zip,1,CP,1463,4005,49,2,"[4005, 4006]"
Normal-27.zip,0,Normal,3915,5458,70,1,[5458]
Normal-18.zip,0,Normal,2209,664,82,1,[664]
CP-13.zip,1,CP,1492,4079,58,3,"[4077, 4078, 4079]"
CP-30.zip,1,CP,3830,5774,29,1,[5774]
CP-8.zip,1,CP,1329,3696,45,3,"[3695, 3696, 3697]"
Normal-16.zip,0,Normal,2139,594,87,1,[594]
NCP-14.zip,2,NCP,393,1936,149,2,"[1936, 1937]"
CP-21.zip,1,CP,4,3508,290,4,"[3505, 3506, 3507, 3508]"
Normal-2.zip,0,Normal,1737,1037,79,4,"[1037, 1038, 1039, 1040]"
NCP-25.zip,2,NCP,3708,5535,59,1,[5535]
CP-7.zip,1,CP,1301,3601,276,4,"[3598, 3599, 3600, 3601]"
NCP-7.zip,2,NCP,249,1644,139,2,"[1644, 1645]"
NCP-12.zip,2,NCP,339,1827,51,2,"[1826, 1827]"
NCP-2.zip,2,NCP,1275,2716,68,1,[2716]
NCP-13.zip,2,NCP,354,1860,73,2,"[1859, 1860]"
Normal-2.zip,0,Normal,1757,1105,71,4,"[1105, 1106, 1107, 1108]"
NCP-27.zip,2,NCP,1016,2582,108,3,"[2580, 2581, 2582]"
CP-18.zip,1,CP,1777,3541,62,5,"[3540, 3541, 3542, 3543, 3544]"
NCP-1.zip,2,NCP,1008,2569,387,1,[2569]
CP-7.zip,1,CP,1315,3665,59,2,"[3665, 3666]"
CP-27.zip,1,CP,3737,5681,17,1,[5681]
Normal-9.zip,0,Normal,1914,369,88,1,[369]
Normal-1.zip,0,Normal,1672,802,75,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
NCP-8.zip,2,NCP,25,1181,129,2,"[1181, 1183]"
CP-19.zip,1,CP,1789,3207,64,4,"[3204, 3205, 3206, 3207]"
CP-11.zip,1,CP,1444,3960,139,3,"[3960, 3961, 3962]"
NCP-4.zip,2,NCP,145,1435,58,2,"[1434, 1435]"
CP-23.zip,1,CP,659,3021,594,1,[3021]
Normal-25.zip,0,Normal,3716,5346,31,1,[5346]
Normal-10.zip,0,Normal,1936,391,82,1,[391]
NCP-22.zip,2,NCP,821,2331,30,1,[2331]
CP-13.zip,1,CP,1505,4108,54,3,"[4108, 4109, 4110]"
NCP-15.zip,2,NCP,411,1972,62,2,"[1971, 1972]"
CP-7.zip,1,CP,1304,3633,18,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-27.zip,2,NCP,1048,2621,44,2,"[2620, 2621]"
CP-21.zip,1,CP,595,2957,306,1,[2957]
NCP-22.zip,2,NCP,861,2384,197,1,[2384]
CP-7.zip,1,CP,1302,3604,39,4,"[3602, 3603, 3604, 3605]"
NCP-17.zip,2,NCP,472,2099,151,2,"[2099, 2100]"
NCP-8.zip,2,NCP,26,1185,36,2,"[1184, 1185]"
Normal-27.zip,0,Normal,3903,5435,75,1,[5435]
Normal-25.zip,0,Normal,3840,5352,210,1,[5352]
NCP-8.zip,2,NCP,266,1679,58,2,"[1678, 1679]"
Normal-16.zip,0,Normal,2120,575,84,1,[575]
Normal-16.zip,0,Normal,2128,583,76,1,[583]
CP-11.zip,1,CP,1449,3972,50,2,"[3971, 3972]"
CP-7.zip,1,CP,1304,3636,47,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
Normal-22.zip,0,Normal,2597,107,41,1,[107]
NCP-10.zip,2,NCP,2726,2682,50,1,[2682]
Normal-7.zip,0,Normal,1849,304,87,1,[304]
Normal-13.zip,0,Normal,2040,495,95,1,[495]
Normal-16.zip,0,Normal,2125,580,83,1,[580]
CP-25.zip,1,CP,740,3102,193,1,[3102]
NCP-22.zip,2,NCP,871,2401,281,2,"[2401, 2402]"
NCP-9.zip,2,NCP,2704,2670,56,1,[2670]
NCP-12.zip,2,NCP,33,1198,147,2,"[1198, 1199]"
CP-18.zip,1,CP,1663,4351,26,1,[4351]
Normal-3.zip,0,Normal,1767,1157,28,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-2.zip,0,Normal,1735,1031,76,2,"[1030, 1031]"
Normal-10.zip,0,Normal,1938,393,66,1,[393]
NCP-24.zip,2,NCP,975,2518,484,1,[2518]
CP-18.zip,1,CP,1774,3523,65,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-14.zip,2,NCP,381,1914,143,2,"[1914, 1915]"
NCP-12.zip,2,NCP,33,1199,62,2,"[1198, 1199]"
NCP-13.zip,2,NCP,352,1855,138,2,"[1855, 1856]"
NCP-12.zip,2,NCP,333,1814,162,2,"[1814, 1815]"
NCP-23.zip,2,NCP,904,2446,667,1,[2446]
NCP-24.zip,2,NCP,985,2531,508,1,[2531]
NCP-6.zip,2,NCP,228,1600,161,2,"[1600, 1601]"
NCP-15.zip,2,NCP,414,1980,121,2,"[1980, 1981]"
NCP-1.zip,2,NCP,103,1344,63,2,"[1343, 1344]"
Normal-3.zip,0,Normal,1767,1155,66,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-8.zip,1,CP,1349,3744,58,3,"[3742, 3743, 3744]"
NCP-8.zip,2,NCP,261,1669,65,2,"[1668, 1669]"
Normal-21.zip,0,Normal,2300,755,98,1,[755]
NCP-13.zip,2,NCP,354,1859,177,2,"[1859, 1860]"
CP-23.zip,1,CP,665,3027,116,1,[3027]
CP-15.zip,1,CP,1561,4242,49,2,"[4241, 4242]"
CP-9.zip,1,CP,1376,3806,60,2,"[3806, 3807]"
Normal-1.zip,0,Normal,1727,1012,66,4,"[1009, 1010, 1011, 1012]"
NCP-28.zip,2,NCP,835,2349,46,2,"[2349, 2350]"
CP-8.zip,1,CP,1349,3742,142,3,"[3742, 3743, 3744]"
Normal-20.zip,0,Normal,2277,732,95,1,[732]
NCP-28.zip,2,NCP,876,2409,52,1,[2409]
Normal-15.zip,0,Normal,2101,556,85,1,[556]
CP-11.zip,1,CP,1444,3961,58,3,"[3960, 3961, 3962]"
NCP-2.zip,2,NCP,1276,2717,61,1,[2717]
Normal-3.zip,0,Normal,1767,1153,68,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-13.zip,0,Normal,2051,506,86,1,[506]
Normal-2.zip,0,Normal,1734,1029,66,2,"[1028, 1029]"
Normal-26.zip,0,Normal,3871,5383,22,1,[5383]
NCP-1.zip,2,NCP,1009,2570,39,2,"[2570, 2571]"
Normal-2.zip,0,Normal,1763,1139,65,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-13.zip,2,NCP,359,1870,61,2,"[1869, 1870]"
Normal-19.zip,0,Normal,2220,675,78,1,[675]
CP-9.zip,1,CP,1382,3819,60,3,"[3818, 3819, 3820]"
CP-20.zip,1,CP,2752,3283,26,1,[3283]
CP-13.zip,1,CP,1510,4122,60,2,"[4121, 4122]"
NCP-16.zip,2,NCP,440,2034,125,2,"[2034, 2035]"
CP-12.zip,1,CP,1458,3994,69,3,"[3992, 3993, 3994]"
NCP-11.zip,2,NCP,284,1714,58,2,"[1713, 1714]"
NCP-11.zip,2,NCP,303,1753,58,2,"[1752, 1753]"
NCP-6.zip,2,NCP,205,1554,126,2,"[1554, 1555]"
CP-14.zip,1,CP,1535,4179,53,2,"[4178, 4179]"
Normal-27.zip,0,Normal,3910,5446,66,2,"[5445, 5446]"
Normal-3.zip,0,Normal,742,177,107,1,[177]
Normal-22.zip,0,Normal,2589,99,37,1,[99]
NCP-22.zip,2,NCP,88,1310,71,2,"[1309, 1310]"
CP-14.zip,1,CP,1521,4147,57,2,"[4146, 4147]"
CP-26.zip,1,CP,3729,5666,179,3,"[5665, 5666, 5667]"
CP-28.zip,1,CP,3793,5737,29,1,[5737]
Normal-3.zip,0,Normal,767,202,358,1,[202]
NCP-5.zip,2,NCP,198,1540,144,2,"[1540, 1541]"
CP-27.zip,1,CP,3738,5682,19,1,[5682]
CP-27.zip,1,CP,3750,5694,28,1,[5694]
CP-10.zip,1,CP,1416,3898,58,2,"[3897, 3898]"
CP-8.zip,1,CP,1322,3680,56,2,"[3680, 3681]"
Normal-23.zip,0,Normal,2607,117,38,1,[117]
NCP-3.zip,2,NCP,138,1420,124,2,"[1420, 1421]"
CP-11.zip,1,CP,1425,3916,185,3,"[3916, 3917, 3918]"
CP-15.zip,1,CP,1581,4269,19,1,[4269]
CP-24.zip,1,CP,706,3068,124,1,[3068]
CP-18.zip,1,CP,1666,4354,23,1,[4354]
NCP-4.zip,2,NCP,161,1466,135,2,"[1466, 1467]"
Normal-7.zip,0,Normal,1847,302,102,1,[302]
CP-19.zip,1,CP,1784,3593,69,4,"[3590, 3591, 3592, 3593]"
CP-21.zip,1,CP,605,2967,157,1,[2967]
CP-5.zip,1,CP,12,3168,291,2,"[3168, 3169]"
Normal-9.zip,0,Normal,1909,364,102,1,[364]
NCP-22.zip,2,NCP,850,2369,52,1,[2369]
CP-24.zip,1,CP,687,3049,135,1,[3049]
NCP-1.zip,2,NCP,1033,2604,39,1,[2604]
Normal-2.zip,0,Normal,1750,1074,65,3,"[1074, 1077, 1078]"
CP-9.zip,1,CP,1365,3779,200,3,"[3779, 3780, 3781]"
NCP-18.zip,2,NCP,502,2161,59,2,"[2160, 2161]"
Normal-3.zip,0,Normal,1767,1162,76,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-1.zip,0,Normal,1672,799,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
Normal-3.zip,0,Normal,747,182,100,1,[182]
NCP-12.zip,2,NCP,319,1787,66,2,"[1785, 1787]"
NCP-15.zip,2,NCP,405,1959,143,2,"[1959, 1960]"
CP-18.zip,1,CP,1781,3574,64,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-21.zip,1,CP,600,2962,202,1,[2962]
CP-12.zip,1,CP,1479,4039,60,3,"[4039, 4040, 4041]"
NCP-27.zip,2,NCP,827,2340,173,1,[2340]
NCP-24.zip,2,NCP,983,2528,67,1,[2528]
CP-11.zip,1,CP,1424,3915,60,2,"[3914, 3915]"
CP-2.zip,1,CP,1105,3323,220,1,[3323]
CP-10.zip,1,CP,1412,3888,66,2,"[3887, 3888]"
NCP-18.zip,2,NCP,495,2147,65,2,"[2146, 2147]"
NCP-3.zip,2,NCP,134,1412,128,2,"[1412, 1413]"
Normal-10.zip,0,Normal,1940,395,74,1,[395]
Normal-17.zip,0,Normal,2163,618,89,1,[618]
CP-9.zip,1,CP,1358,3761,249,3,"[3761, 3762, 3763]"
CP-23.zip,1,CP,658,3020,273,1,[3020]
NCP-12.zip,2,NCP,341,1830,129,3,"[1830, 1832, 1834]"
CP-14.zip,1,CP,1520,4145,57,3,"[4143, 4144, 4145]"
CP-19.zip,1,CP,1783,3588,62,2,"[3588, 3589]"
Normal-3.zip,0,Normal,1767,1158,66,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-7.zip,1,CP,1301,3598,55,4,"[3598, 3599, 3600, 3601]"
Normal-6.zip,0,Normal,1810,265,85,1,[265]
NCP-12.zip,2,NCP,321,1791,51,2,"[1790, 1791]"
NCP-12.zip,2,NCP,341,1834,54,3,"[1830, 1832, 1834]"
CP-11.zip,1,CP,1435,3938,46,2,"[3938, 3939]"
Normal-26.zip,0,Normal,3876,5388,30,1,[5388]
Normal-16.zip,0,Normal,2123,578,90,1,[578]
Normal-6.zip,0,Normal,1816,271,76,1,[271]
NCP-26.zip,2,NCP,3992,5516,48,1,[5516]
CP-18.zip,1,CP,1777,3544,66,5,"[3540, 3541, 3542, 3543, 3544]"
NCP-5.zip,2,NCP,173,1491,58,2,"[1490, 1491]"
NCP-11.zip,2,NCP,312,1771,148,2,"[1771, 1772]"
NCP-19.zip,2,NCP,525,2207,61,2,"[2206, 2207]"
Normal-3.zip,0,Normal,752,187,103,1,[187]
NCP-7.zip,2,NCP,23,1178,63,2,"[1177, 1178]"
CP-27.zip,1,CP,3762,5706,26,1,[5706]
CP-18.zip,1,CP,1659,4347,26,1,[4347]
CP-20.zip,1,CP,2667,3248,46,3,"[3246, 3247, 3248]"
Normal-24.zip,0,Normal,2653,163,39,1,[163]
Normal-4.zip,0,Normal,801,236,107,1,[236]
Normal-20.zip,0,Normal,2272,727,79,1,[727]
NCP-30.zip,2,NCP,988,2539,56,2,"[2538, 2539]"
CP-18.zip,1,CP,1774,3527,58,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
Normal-17.zip,0,Normal,2165,620,95,1,[620]
CP-12.zip,1,CP,1479,4041,60,3,"[4039, 4040, 4041]"
Normal-21.zip,0,Normal,2299,754,90,1,[754]
CP-22.zip,1,CP,637,2999,118,1,[2999]
NCP-6.zip,2,NCP,217,1578,139,2,"[1578, 1579]"
CP-30.zip,1,CP,3919,5544,73,4,"[5543, 5544, 5545, 5546]"
CP-13.zip,1,CP,1511,4123,57,2,"[4123, 4124]"
Normal-13.zip,0,Normal,2035,490,82,1,[490]
CP-10.zip,1,CP,1417,3899,59,1,[3899]
NCP-8.zip,2,NCP,261,1668,155,2,"[1668, 1669]"
CP-20.zip,1,CP,2667,3247,92,3,"[3246, 3247, 3248]"
CP-26.zip,1,CP,3636,5595,290,1,[5595]
Normal-2.zip,0,Normal,1763,1136,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
Normal-9.zip,0,Normal,1913,368,88,1,[368]
CP-9.zip,1,CP,1375,3805,58,2,"[3804, 3805]"
CP-16.zip,1,CP,1606,4294,26,1,[4294]
CP-18.zip,1,CP,1777,3543,68,5,"[3540, 3541, 3542, 3543, 3544]"
Normal-21.zip,0,Normal,2287,742,77,1,[742]
CP-11.zip,1,CP,1422,3908,140,3,"[3908, 3909, 3910]"
NCP-22.zip,2,NCP,859,2381,268,2,"[2380, 2381]"
Normal-24.zip,0,Normal,2645,155,38,1,[155]
CP-7.zip,1,CP,1302,3605,201,4,"[3602, 3603, 3604, 3605]"
CP-23.zip,1,CP,646,3008,128,1,[3008]
CP-11.zip,1,CP,1425,3918,49,3,"[3916, 3917, 3918]"
CP-18.zip,1,CP,1781,3569,65,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-16.zip,2,NCP,436,2025,146,2,"[2025, 2026]"
NCP-18.zip,2,NCP,503,2163,61,2,"[2162, 2163]"
NCP-4.zip,2,NCP,167,1478,143,2,"[1478, 1479]"
Normal-26.zip,0,Normal,3880,5392,32,1,[5392]
NCP-25.zip,2,NCP,3709,5536,65,1,[5536]
Normal-2.zip,0,Normal,1734,1028,66,2,"[1028, 1029]"
Normal-17.zip,0,Normal,2169,624,92,1,[624]
NCP-20.zip,2,NCP,546,2249,134,2,"[2249, 2250]"
NCP-4.zip,2,NCP,146,1437,52,2,"[1436, 1437]"
NCP-26.zip,2,NCP,3995,5493,47,1,[5493]
CP-20.zip,1,CP,2763,3294,119,1,[3294]
NCP-13.zip,2,NCP,349,1850,57,2,"[1849, 1850]"
CP-26.zip,1,CP,3644,5604,284,1,[5604]
CP-8.zip,1,CP,1327,3690,253,3,"[3690, 3691, 3692]"
CP-20.zip,1,CP,2770,3301,38,1,[3301]
CP-12.zip,1,CP,1471,4023,55,2,"[4022, 4023]"
Normal-27.zip,0,Normal,3912,5453,68,1,[5453]
NCP-23.zip,2,NCP,93,1322,157,2,"[1322, 1323]"
CP-18.zip,1,CP,1781,3576,64,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-13.zip,2,NCP,347,1845,126,2,"[1845, 1846]"
CP-20.zip,1,CP,2454,2935,120,2,"[2935, 2936]"
Normal-1.zip,0,Normal,1670,788,58,6,"[787, 788, 789, 790, 791, 792]"
Normal-8.zip,0,Normal,1880,335,83,1,[335]
Normal-10.zip,0,Normal,1937,392,90,1,[392]
CP-20.zip,1,CP,2768,3299,38,1,[3299]
Normal-18.zip,0,Normal,2212,667,89,1,[667]
Normal-1.zip,0,Normal,1677,826,65,4,"[823, 824, 825, 826]"
CP-26.zip,1,CP,3721,5654,43,2,"[5654, 5655]"
NCP-16.zip,2,NCP,439,2033,66,2,"[2032, 2033]"
Normal-13.zip,0,Normal,2031,486,81,1,[486]
CP-19.zip,1,CP,1783,3589,62,2,"[3588, 3589]"
CP-2.zip,1,CP,1121,3339,156,1,[3339]
CP-22.zip,1,CP,612,2974,84,1,[2974]
Normal-26.zip,0,Normal,3867,5379,29,1,[5379]
NCP-1.zip,2,NCP,102,1342,56,2,"[1341, 1342]"
NCP-18.zip,2,NCP,493,2142,133,2,"[2142, 2143]"
NCP-12.zip,2,NCP,339,1826,120,2,"[1826, 1827]"
Normal-14.zip,0,Normal,2085,540,95,1,[540]
NCP-27.zip,2,NCP,238,1622,57,2,"[1621, 1622]"
Normal-2.zip,0,Normal,1737,1039,80,4,"[1037, 1038, 1039, 1040]"
CP-30.zip,1,CP,3919,5546,70,4,"[5543, 5544, 5545, 5546]"
NCP-1.zip,2,NCP,1012,2576,249,1,[2576]
NCP-17.zip,2,NCP,463,2080,144,2,"[2080, 2081]"
NCP-2.zip,2,NCP,127,1400,58,2,"[1399, 1400]"
Normal-21.zip,0,Normal,2291,746,96,1,[746]
NCP-8.zip,2,NCP,25,1183,45,2,"[1181, 1183]"
CP-9.zip,1,CP,1382,3820,60,3,"[3818, 3819, 3820]"
NCP-30.zip,2,NCP,967,2510,168,1,[2510]
Normal-27.zip,0,Normal,3910,5445,66,2,"[5445, 5446]"
NCP-4.zip,2,NCP,156,1456,138,2,"[1456, 1457]"
CP-12.zip,1,CP,1464,4007,63,2,"[4007, 4008]"
NCP-4.zip,2,NCP,162,1469,62,2,"[1468, 1469]"
CP-13.zip,1,CP,1493,4081,53,3,"[4080, 4081, 4082]"
CP-16.zip,1,CP,1602,4290,17,1,[4290]
NCP-6.zip,2,NCP,216,1576,139,2,"[1576, 1577]"
CP-25.zip,1,CP,723,3085,104,1,[3085]
NCP-15.zip,2,NCP,411,1971,149,2,"[1971, 1972]"
NCP-15.zip,2,NCP,425,2003,139,2,"[2003, 2004]"
CP-24.zip,1,CP,688,3050,127,1,[3050]
Normal-13.zip,0,Normal,2033,488,77,1,[488]
NCP-23.zip,2,NCP,96,1329,61,2,"[1328, 1329]"
Normal-5.zip,0,Normal,803,238,343,1,[238]
CP-16.zip,1,CP,1595,4283,23,1,[4283]
NCP-27.zip,2,NCP,238,1621,134,2,"[1621, 1622]"
NCP-19.zip,2,NCP,529,2214,141,3,"[2214, 2215, 2217]"
CP-25.zip,1,CP,710,3072,78,1,[3072]
Normal-19.zip,0,Normal,2243,698,86,1,[698]
CP-11.zip,1,CP,1440,3949,51,3,"[3948, 3949, 3950]"
CP-7.zip,1,CP,1260,3478,235,1,[3478]
Normal-1.zip,0,Normal,1672,797,76,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
CP-26.zip,1,CP,3719,5649,52,3,"[5649, 5650, 5651]"
NCP-23.zip,2,NCP,969,2512,68,1,[2512]
NCP-5.zip,2,NCP,186,1516,113,2,"[1516, 1517]"
CP-13.zip,1,CP,1507,4114,62,2,"[4113, 4114]"
CP-19.zip,1,CP,2443,2916,310,3,"[2915, 2916, 2917]"
CP-13.zip,1,CP,1503,4105,64,3,"[4104, 4105, 4106]"
Normal-10.zip,0,Normal,1934,389,85,1,[389]
CP-20.zip,1,CP,2760,3291,281,1,[3291]
Normal-19.zip,0,Normal,2242,697,86,1,[697]
NCP-22.zip,2,NCP,864,2388,214,2,"[2388, 2389]"
NCP-14.zip,2,NCP,377,1906,147,2,"[1906, 1907]"
CP-29.zip,1,CP,3818,5762,29,1,[5762]
CP-23.zip,1,CP,676,3038,291,1,[3038]
NCP-14.zip,2,NCP,389,1928,150,2,"[1928, 1929]"
CP-27.zip,1,CP,3761,5705,16,1,[5705]
NCP-27.zip,2,NCP,1016,2581,179,3,"[2580, 2581, 2582]"
Normal-22.zip,0,Normal,2321,776,90,1,[776]
CP-7.zip,1,CP,1304,3639,212,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-16.zip,2,NCP,438,2030,62,2,"[2029, 2030]"
NCP-2.zip,2,NCP,107,1352,61,2,"[1351, 1352]"
NCP-11.zip,2,NCP,295,1736,97,2,"[1735, 1736]"
CP-2.zip,1,CP,1122,3340,229,1,[3340]
Normal-25.zip,0,Normal,3849,5361,205,1,[5361]
CP-4.zip,1,CP,1189,3407,284,1,[3407]
NCP-4.zip,2,NCP,152,1449,61,2,"[1448, 1449]"
Normal-13.zip,0,Normal,2044,499,103,1,[499]
Normal-2.zip,0,Normal,1756,1103,65,4,"[1101, 1102, 1103, 1104]"
CP-9.zip,1,CP,1379,3812,52,2,"[3812, 3813]"
CP-20.zip,1,CP,2454,2936,116,2,"[2935, 2936]"
NCP-3.zip,2,NCP,1294,2735,62,1,[2735]
CP-6.zip,1,CP,1230,3448,37,1,[3448]
Normal-5.zip,0,Normal,815,250,120,1,[250]
CP-13.zip,1,CP,1488,4066,66,3,"[4064, 4065, 4066]"
NCP-7.zip,2,NCP,241,1627,131,2,"[1627, 1628]"
NCP-6.zip,2,NCP,220,1584,160,2,"[1584, 1585]"
NCP-30.zip,2,NCP,982,2527,242,1,[2527]
Normal-2.zip,0,Normal,1735,1030,76,2,"[1030, 1031]"
CP-18.zip,1,CP,1781,3573,65,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-26.zip,1,CP,3642,5601,29,1,[5601]
NCP-5.zip,2,NCP,186,1517,48,2,"[1516, 1517]"
Normal-7.zip,0,Normal,1846,301,105,1,[301]
CP-6.zip,1,CP,1252,3470,180,1,[3470]
NCP-8.zip,2,NCP,254,1655,58,2,"[1654, 1655]"
NCP-17.zip,2,NCP,460,2075,45,2,"[2074, 2075]"
NCP-3.zip,2,NCP,138,1421,52,2,"[1420, 1421]"
CP-29.zip,1,CP,3798,5742,21,1,[5742]
NCP-14.zip,2,NCP,389,1929,63,2,"[1928, 1929]"
NCP-22.zip,2,NCP,858,2379,52,1,[2379]
NCP-10.zip,2,NCP,2721,2677,37,1,[2677]
NCP-29.zip,2,NCP,882,2418,257,2,"[2417, 2418]"
NCP-18.zip,2,NCP,495,2146,156,2,"[2146, 2147]"
Normal-18.zip,0,Normal,2210,665,88,1,[665]
CP-7.zip,1,CP,1304,3632,18,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-18.zip,2,NCP,512,2180,149,2,"[2180, 2181]"
Normal-1.zip,0,Normal,1672,803,75,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
CP-21.zip,1,CP,2774,3305,31,1,[3305]
CP-9.zip,1,CP,1372,3797,193,3,"[3797, 3798, 3799]"
CP-22.zip,1,CP,615,2977,104,1,[2977]
CP-12.zip,1,CP,1469,4019,47,2,"[4018, 4019]"
CP-18.zip,1,CP,1774,3522,65,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-17.zip,2,NCP,472,2100,63,2,"[2099, 2100]"
Normal-14.zip,0,Normal,2069,524,81,1,[524]
CP-18.zip,1,CP,1774,3529,58,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-27.zip,2,NCP,1031,2601,216,2,"[2601, 2602]"
NCP-22.zip,2,NCP,857,2378,53,1,[2378]
Normal-3.zip,0,Normal,1767,1156,139,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-24.zip,0,Normal,2641,151,41,1,[151]
NCP-9.zip,2,NCP,2696,2662,44,1,[2662]
CP-17.zip,1,CP,1620,4308,24,1,[4308]
NCP-4.zip,2,NCP,149,1443,66,2,"[1442, 1443]"
CP-13.zip,1,CP,1488,4064,158,3,"[4064, 4065, 4066]"
Normal-22.zip,0,Normal,2315,770,82,1,[770]
NCP-12.zip,2,NCP,316,1780,58,2,"[1779, 1780]"
CP-9.zip,1,CP,1360,3767,67,3,"[3767, 3768, 3769]"
NCP-18.zip,2,NCP,512,2181,62,2,"[2180, 2181]"
NCP-20.zip,2,NCP,547,2252,66,2,"[2251, 2252]"
Normal-10.zip,0,Normal,1942,397,81,1,[397]
NCP-5.zip,2,NCP,198,1541,60,2,"[1540, 1541]"
NCP-6.zip,2,NCP,199,1542,138,2,"[1542, 1543]"
CP-17.zip,1,CP,1631,4319,23,1,[4319]
NCP-13.zip,2,NCP,353,1858,69,2,"[1857, 1858]"
NCP-17.zip,2,NCP,463,2081,60,2,"[2080, 2081]"
NCP-1.zip,2,NCP,1019,2585,363,1,[2585]
NCP-22.zip,2,NCP,845,2362,48,4,"[2360, 2361, 2362, 2363]"
NCP-15.zip,2,NCP,425,2004,58,2,"[2003, 2004]"
NCP-28.zip,2,NCP,873,2405,52,2,"[2405, 2406]"
NCP-4.zip,2,NCP,152,1448,145,2,"[1448, 1449]"
NCP-19.zip,2,NCP,543,2244,54,2,"[2243, 2244]"
Normal-14.zip,0,Normal,2062,517,84,1,[517]
NCP-17.zip,2,NCP,465,2086,61,3,"[2084, 2085, 2086]"
Normal-25.zip,0,Normal,3717,5347,25,1,[5347]
CP-4.zip,1,CP,1178,3396,133,1,[3396]
CP-22.zip,1,CP,620,2982,64,1,[2982]
Normal-1.zip,0,Normal,1677,825,65,4,"[823, 824, 825, 826]"
Normal-9.zip,0,Normal,1908,363,81,1,[363]
CP-30.zip,1,CP,3940,5646,33,1,[5646]
NCP-30.zip,2,NCP,942,2485,45,1,[2485]
CP-18.zip,1,CP,1781,3578,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-9.zip,1,CP,1358,3762,126,3,"[3761, 3762, 3763]"
CP-27.zip,1,CP,3764,5708,23,1,[5708]
NCP-8.zip,2,NCP,2673,2692,48,1,[2692]
NCP-19.zip,2,NCP,534,2226,49,2,"[2225, 2226]"
CP-11.zip,1,CP,1440,3950,51,3,"[3948, 3949, 3950]"
NCP-17.zip,2,NCP,465,2084,145,3,"[2084, 2085, 2086]"
NCP-19.zip,2,NCP,522,2200,137,2,"[2200, 2201]"
CP-12.zip,1,CP,1468,4015,54,3,"[4015, 4016, 4017]"
CP-13.zip,1,CP,1498,4095,60,2,"[4095, 4096]"
CP-18.zip,1,CP,1778,3548,65,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
Normal-1.zip,0,Normal,1670,791,66,6,"[787, 788, 789, 790, 791, 792]"
CP-16.zip,1,CP,1611,4299,19,1,[4299]
Normal-14.zip,0,Normal,2080,535,100,1,[535]
NCP-25.zip,2,NCP,3968,5477,44,1,[5477]
Normal-3.zip,0,Normal,755,190,107,1,[190]
Normal-16.zip,0,Normal,2151,606,93,1,[606]
NCP-4.zip,2,NCP,168,1481,58,2,"[1480, 1481]"
Normal-21.zip,0,Normal,2289,744,77,1,[744]
NCP-6.zip,2,NCP,224,1593,57,2,"[1592, 1593]"
CP-13.zip,1,CP,1502,4103,73,2,"[4102, 4103]"
NCP-22.zip,2,NCP,865,2390,34,2,"[2390, 2391]"
CP-28.zip,1,CP,3787,5731,27,1,[5731]
NCP-5.zip,2,NCP,170,1484,141,2,"[1484, 1485]"
Normal-20.zip,0,Normal,2271,726,81,1,[726]
NCP-7.zip,2,NCP,2485,2644,46,1,[2644]
NCP-17.zip,2,NCP,475,2106,63,2,"[2105, 2106]"
NCP-21.zip,2,NCP,74,1281,127,2,"[1281, 1282]"
CP-13.zip,1,CP,1507,4113,62,2,"[4113, 4114]"
CP-18.zip,1,CP,1781,3570,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-12.zip,1,CP,1462,4002,193,3,"[4002, 4003, 4004]"
Normal-1.zip,0,Normal,1672,796,76,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
NCP-12.zip,2,NCP,337,1822,139,2,"[1822, 1823]"
CP-9.zip,1,CP,1353,3749,60,3,"[3748, 3749, 3750]"
Normal-15.zip,0,Normal,2087,542,83,1,[542]
NCP-12.zip,2,NCP,331,1811,66,2,"[1810, 1811]"
CP-22.zip,1,CP,617,2979,110,1,[2979]
CP-18.zip,1,CP,1771,3518,51,4,"[3518, 3519, 3520, 3521]"
CP-26.zip,1,CP,3730,5668,212,2,"[5668, 5669]"
Normal-24.zip,0,Normal,2660,170,38,1,[170]
Normal-11.zip,0,Normal,1967,422,97,1,[422]
NCP-4.zip,2,NCP,149,1442,159,2,"[1442, 1443]"
CP-30.zip,1,CP,3834,5778,26,1,[5778]
NCP-19.zip,2,NCP,540,2237,127,2,"[2237, 2238]"
Normal-26.zip,0,Normal,3862,5374,188,1,[5374]
Normal-7.zip,0,Normal,1842,297,77,1,[297]
Normal-26.zip,0,Normal,3868,5380,30,1,[5380]
Normal-12.zip,0,Normal,2003,458,85,1,[458]
NCP-5.zip,2,NCP,17,1167,58,2,"[1166, 1167]"
NCP-2.zip,2,NCP,117,1375,130,2,"[1375, 1376]"
CP-13.zip,1,CP,1511,4124,57,2,"[4123, 4124]"
CP-18.zip,1,CP,1778,3546,66,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
NCP-19.zip,2,NCP,529,2217,58,3,"[2214, 2215, 2217]"
CP-14.zip,1,CP,1520,4143,57,3,"[4143, 4144, 4145]"
Normal-16.zip,0,Normal,2131,586,95,1,[586]
NCP-28.zip,2,NCP,873,2406,228,2,"[2405, 2406]"
NCP-3.zip,2,NCP,137,1418,126,2,"[1418, 1419]"
NCP-10.zip,2,NCP,279,1705,58,2,"[1704, 1705]"
CP-28.zip,1,CP,3796,5740,28,1,[5740]
NCP-19.zip,2,NCP,54,1241,147,2,"[1241, 1242]"
CP-28.zip,1,CP,3768,5712,19,1,[5712]
NCP-2.zip,2,NCP,120,1382,58,2,"[1381, 1382]"
CP-16.zip,1,CP,1603,4291,22,1,[4291]
CP-2.zip,1,CP,1118,3336,173,1,[3336]
NCP-30.zip,2,NCP,939,2482,49,1,[2482]
Normal-8.zip,0,Normal,1874,329,90,1,[329]
Normal-3.zip,0,Normal,746,181,110,1,[181]
CP-21.zip,1,CP,608,2970,86,1,[2970]
Normal-22.zip,0,Normal,2601,111,37,1,[111]
NCP-4.zip,2,NCP,16,1165,48,2,"[1164, 1165]"
NCP-1.zip,2,NCP,1036,2607,441,1,[2607]
NCP-19.zip,2,NCP,528,2213,59,2,"[2212, 2213]"
NCP-6.zip,2,NCP,217,1579,58,2,"[1578, 1579]"
CP-10.zip,1,CP,1416,3897,58,2,"[3897, 3898]"
CP-30.zip,1,CP,4043,5592,41,1,[5592]
CP-30.zip,1,CP,3933,5636,69,2,"[5636, 5637]"
CP-20.zip,1,CP,2667,3246,24,3,"[3246, 3247, 3248]"
Normal-1.zip,0,Normal,1677,824,64,4,"[823, 824, 825, 826]"
CP-18.zip,1,CP,1779,3552,59,2,"[3551, 3552]"
Normal-25.zip,0,Normal,3855,5367,209,1,[5367]
CP-24.zip,1,CP,691,3053,72,1,[3053]
CP-6.zip,1,CP,1239,3457,134,1,[3457]
CP-21.zip,1,CP,602,2964,84,1,[2964]
NCP-1.zip,2,NCP,105,1348,61,2,"[1347, 1348]"
CP-3.zip,1,CP,1151,3369,158,1,[3369]
NCP-15.zip,2,NCP,413,1975,110,4,"[1975, 1976, 1977, 1979]"
CP-8.zip,1,CP,1327,3691,64,3,"[3690, 3691, 3692]"
CP-6.zip,1,CP,1237,3455,178,1,[3455]
Normal-11.zip,0,Normal,1959,414,97,1,[414]
Normal-25.zip,0,Normal,3713,5343,27,1,[5343]
CP-21.zip,1,CP,597,2959,305,1,[2959]
CP-9.zip,1,CP,1356,3757,60,2,"[3756, 3757]"
NCP-7.zip,2,NCP,2483,2686,40,1,[2686]
NCP-27.zip,2,NCP,1048,2620,58,2,"[2620, 2621]"
Normal-3.zip,0,Normal,1767,1159,28,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-5.zip,1,CP,1219,3437,179,1,[3437]
NCP-4.zip,2,NCP,145,1434,139,2,"[1434, 1435]"
CP-15.zip,1,CP,1575,4263,20,1,[4263]
NCP-18.zip,2,NCP,516,2188,135,2,"[2188, 2189]"
CP-9.zip,1,CP,1360,3768,67,3,"[3767, 3768, 3769]"
CP-13.zip,1,CP,1488,4065,66,3,"[4064, 4065, 4066]"
CP-1.zip,1,CP,1077,3122,74,2,"[3121, 3122]"
Normal-14.zip,0,Normal,2084,539,92,1,[539]
Normal-3.zip,0,Normal,1767,1163,76,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-2.zip,0,Normal,1746,1063,68,2,"[1063, 1064]"
NCP-12.zip,2,NCP,332,1812,167,2,"[1812, 1813]"
Normal-12.zip,0,Normal,1990,445,97,1,[445]
CP-7.zip,1,CP,1301,3599,294,4,"[3598, 3599, 3600, 3601]"
CP-1.zip,1,CP,1070,3112,104,1,[3112]
CP-13.zip,1,CP,1493,4082,53,3,"[4080, 4081, 4082]"
NCP-19.zip,2,NCP,520,2196,129,2,"[2196, 2197]"
NCP-3.zip,2,NCP,137,1419,53,2,"[1418, 1419]"
NCP-30.zip,2,NCP,937,2479,22,1,[2479]
NCP-22.zip,2,NCP,865,2391,260,2,"[2390, 2391]"
NCP-7.zip,2,NCP,230,1605,58,2,"[1604, 1605]"
CP-7.zip,1,CP,1302,3603,207,4,"[3602, 3603, 3604, 3605]"
CP-16.zip,1,CP,1588,4276,20,1,[4276]
Normal-18.zip,0,Normal,2195,650,79,1,[650]
Normal-17.zip,0,Normal,2173,628,96,1,[628]
NCP-22.zip,2,NCP,878,2413,117,2,"[2412, 2413]"
Normal-18.zip,0,Normal,2188,643,88,1,[643]
CP-18.zip,1,CP,1774,3526,66,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
Normal-6.zip,0,Normal,1815,270,91,1,[270]
CP-5.zip,1,CP,1208,3426,321,1,[3426]
NCP-6.zip,2,NCP,22,1176,68,2,"[1175, 1176]"
NCP-15.zip,2,NCP,413,1979,54,4,"[1975, 1976, 1977, 1979]"
CP-18.zip,1,CP,1771,3521,51,4,"[3518, 3519, 3520, 3521]"
CP-4.zip,1,CP,1172,3390,195,1,[3390]
CP-26.zip,1,CP,3721,5655,206,2,"[5654, 5655]"
CP-27.zip,1,CP,3754,5698,21,1,[5698]
CP-19.zip,1,CP,1784,3592,69,4,"[3590, 3591, 3592, 3593]"
CP-9.zip,1,CP,1372,3799,49,3,"[3797, 3798, 3799]"
NCP-2.zip,2,NCP,113,1367,137,2,"[1367, 1368]"
Normal-22.zip,0,Normal,2318,773,105,1,[773]
CP-18.zip,1,CP,1770,3517,57,1,[3517]
Normal-21.zip,0,Normal,2293,748,88,1,[748]
Normal-22.zip,0,Normal,2595,105,43,1,[105]
NCP-18.zip,2,NCP,50,1234,59,2,"[1233, 1234]"
Normal-2.zip,0,Normal,1757,1106,71,4,"[1105, 1106, 1107, 1108]"
CP-8.zip,1,CP,1327,3692,64,3,"[3690, 3691, 3692]"
CP-18.zip,1,CP,1781,3577,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-30.zip,1,CP,3932,5635,67,2,"[5634, 5635]"
NCP-20.zip,2,NCP,566,2291,67,2,"[2290, 2291]"
NCP-10.zip,2,NCP,2715,2708,51,1,[2708]
CP-23.zip,1,CP,660,3022,82,1,[3022]
Normal-9.zip,0,Normal,1916,371,106,1,[371]
CP-20.zip,1,CP,2757,3288,211,1,[3288]
Normal-7.zip,0,Normal,1845,300,99,1,[300]
Normal-13.zip,0,Normal,2050,505,74,1,[505]
CP-1.zip,1,CP,1092,3310,216,1,[3310]
Normal-2.zip,0,Normal,1763,1135,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
Normal-9.zip,0,Normal,1898,353,72,1,[353]
NCP-21.zip,2,NCP,576,2310,124,1,[2310]
Normal-1.zip,0,Normal,1701,956,70,2,"[955, 956]"
Normal-17.zip,0,Normal,2178,633,85,1,[633]
CP-8.zip,1,CP,1322,3681,56,2,"[3680, 3681]"
Normal-6.zip,0,Normal,1802,257,107,1,[257]
NCP-20.zip,2,NCP,547,2251,159,2,"[2251, 2252]"
NCP-3.zip,2,NCP,1285,2726,66,1,[2726]
Normal-7.zip,0,Normal,1828,283,96,1,[283]
NCP-20.zip,2,NCP,546,2250,57,2,"[2249, 2250]"
Normal-2.zip,0,Normal,1750,1078,69,3,"[1074, 1077, 1078]"
Normal-9.zip,0,Normal,1892,347,77,1,[347]
NCP-19.zip,2,NCP,534,2225,115,2,"[2225, 2226]"
CP-29.zip,1,CP,3806,5750,20,1,[5750]
NCP-13.zip,2,NCP,355,1861,125,2,"[1861, 1862]"
Normal-6.zip,0,Normal,1813,268,80,1,[268]
Normal-2.zip,0,Normal,1756,1101,66,4,"[1101, 1102, 1103, 1104]"
CP-20.zip,1,CP,2759,3290,36,1,[3290]
Normal-17.zip,0,Normal,2183,638,110,1,[638]
NCP-6.zip,2,NCP,228,1601,67,2,"[1600, 1601]"
NCP-5.zip,2,NCP,197,1539,53,2,"[1538, 1539]"
CP-28.zip,1,CP,3766,5710,24,1,[5710]
CP-10.zip,1,CP,1399,3858,45,2,"[3858, 3859]"
Normal-14.zip,0,Normal,2074,529,82,1,[529]
Normal-2.zip,0,Normal,1733,1026,71,2,"[1026, 1027]"
NCP-11.zip,2,NCP,300,1747,58,2,"[1746, 1747]"
CP-17.zip,1,CP,1650,4338,31,1,[4338]
CP-20.zip,1,CP,2455,2937,116,1,[2937]
Normal-20.zip,0,Normal,2279,734,78,1,[734]
CP-8.zip,1,CP,1329,3697,45,3,"[3695, 3696, 3697]"
NCP-16.zip,2,NCP,444,2042,146,2,"[2042, 2043]"
Normal-12.zip,0,Normal,1999,454,78,1,[454]
CP-17.zip,1,CP,1624,4312,20,1,[4312]
NCP-10.zip,2,NCP,2720,2676,45,1,[2676]
CP-2.zip,1,CP,1107,3325,183,1,[3325]
CP-18.zip,1,CP,1777,3542,62,5,"[3540, 3541, 3542, 3543, 3544]"
NCP-15.zip,2,NCP,403,1955,110,2,"[1955, 1956]"
NCP-3.zip,2,NCP,134,1413,54,2,"[1412, 1413]"
CP-13.zip,1,CP,1500,4099,97,1,[4099]
CP-25.zip,1,CP,712,3074,118,1,[3074]
CP-23.zip,1,CP,648,3010,104,1,[3010]
CP-19.zip,1,CP,2443,2917,98,3,"[2915, 2916, 2917]"
NCP-16.zip,2,NCP,441,2036,115,2,"[2036, 2037]"
Normal-23.zip,0,Normal,2628,138,34,1,[138]
CP-19.zip,1,CP,1795,3596,41,2,"[3596, 3597]"
NCP-27.zip,2,NCP,1016,2580,20,3,"[2580, 2581, 2582]"
Normal-24.zip,0,Normal,2659,169,39,1,[169]
CP-17.zip,1,CP,1619,4307,29,1,[4307]
Normal-26.zip,0,Normal,3861,5373,211,1,[5373]
NCP-19.zip,2,NCP,519,2195,53,2,"[2194, 2195]"
NCP-6.zip,2,NCP,213,1571,66,2,"[1570, 1571]"
Normal-25.zip,0,Normal,3860,5372,212,1,[5372]
NCP-5.zip,2,NCP,192,1529,57,2,"[1528, 1529]"
CP-3.zip,1,CP,1153,3371,179,1,[3371]
CP-3.zip,1,CP,1159,3377,287,1,[3377]
NCP-30.zip,2,NCP,931,2473,21,1,[2473]
CP-6.zip,1,CP,1255,3473,107,1,[3473]
NCP-4.zip,2,NCP,169,1482,133,2,"[1482, 1483]"
NCP-12.zip,2,NCP,340,1828,128,2,"[1828, 1829]"
CP-26.zip,1,CP,3729,5665,36,3,"[5665, 5666, 5667]"
Normal-11.zip,0,Normal,1976,431,74,1,[431]
CP-9.zip,1,CP,1372,3798,49,3,"[3797, 3798, 3799]"
NCP-4.zip,2,NCP,161,1467,57,2,"[1466, 1467]"
CP-22.zip,1,CP,613,2975,78,1,[2975]
NCP-17.zip,2,NCP,460,2074,106,2,"[2074, 2075]"
NCP-21.zip,2,NCP,577,2311,145,2,"[2311, 2312]"
CP-25.zip,1,CP,741,3103,523,1,[3103]
CP-14.zip,1,CP,1532,4172,50,2,"[4171, 4172]"
NCP-11.zip,2,NCP,299,1744,139,2,"[1744, 1745]"
NCP-14.zip,2,NCP,372,1896,45,2,"[1895, 1896]"
CP-9.zip,1,CP,1356,3756,60,2,"[3756, 3757]"
Normal-11.zip,0,Normal,1968,423,96,1,[423]
CP-14.zip,1,CP,1525,4156,60,2,"[4155, 4156]"
CP-22.zip,1,CP,618,2980,166,1,[2980]
CP-17.zip,1,CP,1639,4327,26,1,[4327]
Normal-19.zip,0,Normal,2245,700,83,1,[700]
CP-13.zip,1,CP,1518,4139,67,3,"[4138, 4139, 4140]"
NCP-11.zip,2,NCP,29,1190,132,2,"[1190, 1191]"
CP-16.zip,1,CP,1615,4303,29,1,[4303]
CP-29.zip,1,CP,3823,5767,26,1,[5767]
NCP-20.zip,2,NCP,574,2306,139,2,"[2306, 2307]"
NCP-12.zip,2,NCP,340,1829,54,2,"[1828, 1829]"
Normal-21.zip,0,Normal,2285,740,68,1,[740]
NCP-16.zip,2,NCP,455,2065,56,2,"[2064, 2065]"
NCP-16.zip,2,NCP,436,2026,61,2,"[2025, 2026]"
NCP-14.zip,2,NCP,383,1918,139,2,"[1918, 1919]"
NCP-30.zip,2,NCP,988,2538,287,2,"[2538, 2539]"
NCP-7.zip,2,NCP,247,1641,66,2,"[1640, 1641]"
CP-15.zip,1,CP,1571,4259,16,1,[4259]
Normal-16.zip,0,Normal,2137,592,94,1,[592]
CP-7.zip,1,CP,1304,3637,218,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
CP-6.zip,1,CP,1235,3453,155,1,[3453]
Normal-4.zip,0,Normal,776,211,353,1,[211]
Normal-18.zip,0,Normal,2189,644,82,1,[644]
Normal-6.zip,0,Normal,1799,254,97,1,[254]
Normal-15.zip,0,Normal,2113,568,93,1,[568]
CP-3.zip,1,CP,1131,3349,157,1,[3349]
Normal-6.zip,0,Normal,1819,274,91,1,[274]
CP-18.zip,1,CP,1781,3571,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-16.zip,2,NCP,455,2064,132,2,"[2064, 2065]"
Normal-8.zip,0,Normal,1888,343,99,1,[343]
NCP-20.zip,2,NCP,571,2301,68,2,"[2300, 2301]"
NCP-7.zip,2,NCP,247,1640,159,2,"[1640, 1641]"
CP-3.zip,1,CP,1137,3355,147,1,[3355]
CP-11.zip,1,CP,1423,3913,53,3,"[3911, 3912, 3913]"
NCP-1.zip,2,NCP,105,1347,145,2,"[1347, 1348]"
NCP-14.zip,2,NCP,377,1907,62,2,"[1906, 1907]"
CP-14.zip,1,CP,1535,4178,53,2,"[4178, 4179]"
Normal-9.zip,0,Normal,1900,355,93,1,[355]
CP-2.zip,1,CP,1125,3343,115,1,[3343]
CP-6.zip,1,CP,1243,3461,176,1,[3461]
NCP-6.zip,2,NCP,203,1550,140,2,"[1550, 1551]"
Normal-1.zip,0,Normal,1670,789,63,6,"[787, 788, 789, 790, 791, 792]"
NCP-29.zip,2,NCP,909,2451,401,1,[2451]
NCP-25.zip,2,NCP,3949,5463,35,1,[5463]
Normal-26.zip,0,Normal,3879,5391,28,1,[5391]
NCP-11.zip,2,NCP,29,1191,56,2,"[1190, 1191]"
CP-3.zip,1,CP,1129,3347,158,1,[3347]
NCP-15.zip,2,NCP,406,1961,146,2,"[1961, 1962]"
NCP-4.zip,2,NCP,151,1446,129,2,"[1446, 1447]"
CP-19.zip,1,CP,1789,3206,64,4,"[3204, 3205, 3206, 3207]"
NCP-6.zip,2,NCP,227,1599,61,2,"[1598, 1599]"
CP-12.zip,1,CP,1462,4003,51,3,"[4002, 4003, 4004]"
CP-3.zip,1,CP,1147,3365,164,1,[3365]
Normal-23.zip,0,Normal,2629,139,36,1,[139]
Normal-1.zip,0,Normal,1700,953,64,2,"[953, 954]"
CP-15.zip,1,CP,1561,4241,49,2,"[4241, 4242]"
NCP-16.zip,2,NCP,437,2028,60,2,"[2027, 2028]"
CP-18.zip,1,CP,1654,4342,23,1,[4342]
Normal-20.zip,0,Normal,2273,728,75,1,[728]
Normal-14.zip,0,Normal,2067,522,94,1,[522]
NCP-29.zip,2,NCP,911,2453,48,1,[2453]
Normal-2.zip,0,Normal,1756,1104,65,4,"[1101, 1102, 1103, 1104]"
Normal-11.zip,0,Normal,1989,444,105,1,[444]
NCP-15.zip,2,NCP,403,1956,47,2,"[1955, 1956]"
NCP-13.zip,2,NCP,348,1848,48,2,"[1847, 1848]"
NCP-28.zip,2,NCP,844,2359,594,1,[2359]
NCP-18.zip,2,NCP,51,1235,141,2,"[1235, 1236]"
CP-28.zip,1,CP,3789,5733,26,1,[5733]
Normal-2.zip,0,Normal,1763,1138,65,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-10.zip,2,NCP,278,1702,137,2,"[1702, 1703]"
CP-28.zip,1,CP,3770,5714,23,1,[5714]
NCP-23.zip,2,NCP,93,1323,66,2,"[1322, 1323]"
NCP-14.zip,2,NCP,390,1930,126,2,"[1930, 1931]"
NCP-8.zip,2,NCP,26,1184,82,2,"[1184, 1185]"
Normal-2.zip,0,Normal,1763,1142,71,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-6.zip,2,NCP,201,1547,62,2,"[1546, 1547]"
Normal-23.zip,0,Normal,2626,136,33,1,[136]
NCP-25.zip,2,NCP,3707,5534,50,1,[5534]
Normal-21.zip,0,Normal,2305,760,104,1,[760]
Normal-6.zip,0,Normal,1818,273,87,1,[273]
CP-22.zip,1,CP,641,3003,136,1,[3003]
Normal-7.zip,0,Normal,1836,291,104,1,[291]
Normal-27.zip,0,Normal,3894,5417,287,1,[5417]
NCP-30.zip,2,NCP,981,2526,23,2,"[2525, 2526]"
NCP-1.zip,2,NCP,102,1341,132,2,"[1341, 1342]"
NCP-14.zip,2,NCP,387,1924,128,2,"[1924, 1925]"
NCP-2.zip,2,NCP,117,1376,55,2,"[1375, 1376]"
NCP-5.zip,2,NCP,190,1524,152,2,"[1524, 1525]"
CP-26.zip,1,CP,3639,5598,241,1,[5598]
Normal-1.zip,0,Normal,1670,787,58,6,"[787, 788, 789, 790, 791, 792]"
Normal-2.zip,0,Normal,1757,1108,68,4,"[1105, 1106, 1107, 1108]"
Normal-13.zip,0,Normal,2043,498,84,1,[498]
CP-2.zip,1,CP,1099,3317,198,1,[3317]
CP-7.zip,1,CP,1318,3673,56,1,[3673]
Normal-9.zip,0,Normal,1899,354,88,1,[354]
CP-12.zip,1,CP,1467,4014,60,2,"[4013, 4014]"
NCP-5.zip,2,NCP,197,1538,124,2,"[1538, 1539]"
CP-26.zip,1,CP,3730,5669,202,2,"[5668, 5669]"
NCP-22.zip,2,NCP,845,2363,428,4,"[2360, 2361, 2362, 2363]"
NCP-2.zip,2,NCP,127,1399,139,2,"[1399, 1400]"
Normal-26.zip,0,Normal,3893,5416,63,1,[5416]
NCP-8.zip,2,NCP,2669,2689,37,1,[2689]
CP-18.zip,1,CP,1778,3549,64,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
CP-25.zip,1,CP,722,3084,70,1,[3084]
NCP-4.zip,2,NCP,157,1458,114,2,"[1458, 1459]"
NCP-23.zip,2,NCP,92,1320,87,2,"[1320, 1321]"
CP-11.zip,1,CP,1424,3914,60,2,"[3914, 3915]"
NCP-19.zip,2,NCP,529,2215,33,3,"[2214, 2215, 2217]"
CP-24.zip,1,CP,704,3066,417,1,[3066]
NCP-6.zip,2,NCP,201,1546,149,2,"[1546, 1547]"
Normal-17.zip,0,Normal,2177,632,88,1,[632]
NCP-14.zip,2,NCP,383,1919,58,2,"[1918, 1919]"
Normal-2.zip,0,Normal,1737,1040,80,4,"[1037, 1038, 1039, 1040]"
Normal-26.zip,0,Normal,3881,5393,22,1,[5393]
Normal-3.zip,0,Normal,1767,1161,71,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-14.zip,1,CP,1525,4155,60,2,"[4155, 4156]"
NCP-12.zip,2,NCP,341,1832,55,3,"[1830, 1832, 1834]"
Normal-13.zip,0,Normal,2034,489,91,1,[489]
NCP-26.zip,2,NCP,3978,5485,49,1,[5485]
NCP-22.zip,2,NCP,864,2389,221,2,"[2388, 2389]"
NCP-9.zip,2,NCP,2682,2652,47,1,[2652]
NCP-7.zip,2,NCP,2461,2642,42,1,[2642]
Normal-21.zip,0,Normal,2303,758,110,1,[758]
NCP-8.zip,2,NCP,2670,2690,41,1,[2690]
CP-7.zip,1,CP,1315,3666,59,2,"[3665, 3666]"
CP-19.zip,1,CP,2449,2927,118,1,[2927]
CP-19.zip,1,CP,1789,3204,59,4,"[3204, 3205, 3206, 3207]"
Normal-6.zip,0,Normal,1803,258,100,1,[258]
Normal-1.zip,0,Normal,1675,812,73,1,[812]
NCP-25.zip,2,NCP,3705,5532,63,1,[5532]
Normal-1.zip,0,Normal,1727,1010,63,4,"[1009, 1010, 1011, 1012]"
NCP-3.zip,2,NCP,1283,2724,70,1,[2724]
CP-18.zip,1,CP,1774,3524,66,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
CP-18.zip,1,CP,1774,3525,66,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
CP-30.zip,1,CP,3919,5543,66,4,"[5543, 5544, 5545, 5546]"
NCP-22.zip,2,NCP,85,1304,58,2,"[1303, 1304]"
Normal-18.zip,0,Normal,2192,647,79,1,[647]
CP-30.zip,1,CP,3935,5641,70,1,[5641]
NCP-6.zip,2,NCP,227,1598,146,2,"[1598, 1599]"
Normal-20.zip,0,Normal,2250,705,76,1,[705]
CP-12.zip,1,CP,1464,4008,63,2,"[4007, 4008]"
CP-29.zip,1,CP,3807,5751,20,1,[5751]
Normal-12.zip,0,Normal,1993,448,97,1,[448]
NCP-19.zip,2,NCP,528,2212,140,2,"[2212, 2213]"
NCP-26.zip,2,NCP,3987,5511,60,1,[5511]
NCP-25.zip,2,NCP,3969,5478,50,1,[5478]
CP-17.zip,1,CP,1638,4326,25,1,[4326]
CP-17.zip,1,CP,1643,4331,24,1,[4331]
CP-17.zip,1,CP,1629,4317,23,1,[4317]
CP-11.zip,1,CP,1423,3912,53,3,"[3911, 3912, 3913]"
Normal-2.zip,0,Normal,1743,1056,73,2,"[1056, 1057]"
Normal-9.zip,0,Normal,1915,370,91,1,[370]
Normal-22.zip,0,Normal,2590,100,41,1,[100]
NCP-11.zip,2,NCP,297,1741,60,2,"[1739, 1741]"
CP-30.zip,1,CP,3919,5545,70,4,"[5543, 5544, 5545, 5546]"
NCP-25.zip,2,NCP,3971,5480,50,1,[5480]
CP-11.zip,1,CP,1454,3983,53,3,"[3982, 3983, 3984]"
Normal-21.zip,0,Normal,2282,737,69,1,[737]
NCP-12.zip,2,NCP,318,1783,150,2,"[1783, 1784]"
NCP-10.zip,2,NCP,279,1704,139,2,"[1704, 1705]"
CP-2.zip,1,CP,1108,3326,135,1,[3326]
Normal-2.zip,0,Normal,1733,1027,71,2,"[1026, 1027]"
CP-32.zip,1,CP,1781,3567,67,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-10.zip,2,NCP,2725,2681,51,1,[2681]
CP-11.zip,1,CP,1425,3917,49,3,"[3916, 3917, 3918]"
Normal-1.zip,0,Normal,1701,955,70,2,"[955, 956]"
CP-19.zip,1,CP,1787,3195,59,1,[3195]
================================================
FILE: Finetune/CC-CCII/csv/CC_CCII_fold0_valid.csv
================================================
zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids
Normal-12.zip,0,Normal,2015,470,94,1,[470]
NCP-6.zip,2,NCP,206,1557,58,2,"[1556, 1557]"
CP-1.zip,1,CP,1096,3314,196,1,[3314]
NCP-16.zip,2,NCP,43,1220,65,2,"[1219, 1220]"
NCP-18.zip,2,NCP,499,2155,58,2,"[2154, 2155]"
CP-10.zip,1,CP,1409,3881,66,2,"[3881, 3882]"
Normal-4.zip,0,Normal,777,212,83,1,[212]
NCP-9.zip,2,NCP,2708,2701,59,1,[2701]
CP-11.zip,1,CP,1432,3933,60,2,"[3932, 3933]"
NCP-4.zip,2,NCP,141,1426,129,2,"[1426, 1427]"
CP-23.zip,1,CP,673,3035,76,1,[3035]
NCP-29.zip,2,NCP,879,2414,173,1,[2414]
NCP-19.zip,2,NCP,536,2229,145,2,"[2229, 2230]"
NCP-18.zip,2,NCP,504,2165,65,2,"[2164, 2165]"
Normal-1.zip,0,Normal,1678,829,34,6,"[827, 828, 829, 830, 831, 832]"
NCP-8.zip,2,NCP,264,1674,179,2,"[1674, 1675]"
NCP-4.zip,2,NCP,155,1454,139,2,"[1454, 1455]"
CP-11.zip,1,CP,1418,3900,180,3,"[3900, 3901, 3902]"
NCP-5.zip,2,NCP,194,1532,133,2,"[1532, 1533]"
NCP-13.zip,2,NCP,361,1873,143,2,"[1873, 1874]"
Normal-1.zip,0,Normal,1710,976,78,2,"[975, 976]"
Normal-15.zip,0,Normal,2091,546,106,1,[546]
NCP-19.zip,2,NCP,518,2192,135,2,"[2192, 2193]"
Normal-18.zip,0,Normal,2190,645,90,1,[645]
Normal-12.zip,0,Normal,2013,468,87,1,[468]
NCP-11.zip,2,NCP,302,1751,62,2,"[1750, 1751]"
Normal-15.zip,0,Normal,2109,564,103,1,[564]
NCP-8.zip,2,NCP,264,1675,75,2,"[1674, 1675]"
CP-23.zip,1,CP,653,3015,285,1,[3015]
NCP-7.zip,2,NCP,235,1615,139,2,"[1615, 1616]"
CP-19.zip,1,CP,1786,3194,77,3,"[3192, 3193, 3194]"
CP-1.zip,1,CP,0,3137,37,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-15.zip,2,NCP,423,1999,133,2,"[1999, 2000]"
CP-6.zip,1,CP,1232,3450,91,1,[3450]
CP-14.zip,1,CP,1526,4158,51,3,"[4157, 4158, 4159]"
CP-4.zip,1,CP,1184,3402,193,1,[3402]
NCP-17.zip,2,NCP,483,2122,56,2,"[2121, 2122]"
CP-12.zip,1,CP,1459,3996,69,3,"[3995, 3996, 3997]"
CP-17.zip,1,CP,1637,4325,20,1,[4325]
CP-10.zip,1,CP,1411,3885,66,2,"[3885, 3886]"
NCP-9.zip,2,NCP,2707,2673,44,1,[2673]
NCP-29.zip,2,NCP,892,2431,20,1,[2431]
CP-26.zip,1,CP,3720,5653,243,2,"[5652, 5653]"
Normal-13.zip,0,Normal,2023,478,96,1,[478]
CP-11.zip,1,CP,1439,3947,62,2,"[3946, 3947]"
Normal-6.zip,0,Normal,1801,256,89,1,[256]
NCP-16.zip,2,NCP,442,2038,131,2,"[2038, 2039]"
Normal-9.zip,0,Normal,1920,375,100,1,[375]
CP-13.zip,1,CP,1489,4067,457,4,"[4067, 4068, 4069, 4070]"
CP-9.zip,1,CP,1378,3811,50,2,"[3810, 3811]"
NCP-12.zip,2,NCP,336,1821,50,2,"[1820, 1821]"
NCP-3.zip,2,NCP,1295,2736,61,1,[2736]
Normal-20.zip,0,Normal,2268,723,85,1,[723]
Normal-20.zip,0,Normal,2281,736,84,1,[736]
CP-1.zip,1,CP,1083,3128,71,2,"[3128, 3129]"
CP-14.zip,1,CP,1545,4207,65,2,"[4206, 4207]"
Normal-21.zip,0,Normal,2306,761,103,1,[761]
NCP-13.zip,2,NCP,350,1852,47,2,"[1851, 1852]"
CP-8.zip,1,CP,1326,3688,53,2,"[3688, 3689]"
NCP-7.zip,2,NCP,236,1617,283,2,"[1617, 1618]"
Normal-1.zip,0,Normal,1722,1001,73,2,"[1001, 1002]"
NCP-5.zip,2,NCP,177,1498,139,2,"[1498, 1499]"
Normal-1.zip,0,Normal,1708,971,74,2,"[971, 972]"
NCP-8.zip,2,NCP,2680,2651,46,1,[2651]
NCP-20.zip,2,NCP,570,2298,139,2,"[2298, 2299]"
Normal-1.zip,0,Normal,1723,1004,77,2,"[1003, 1004]"
NCP-10.zip,2,NCP,2723,2679,40,1,[2679]
Normal-21.zip,0,Normal,2302,757,96,1,[757]
Normal-18.zip,0,Normal,2199,654,85,1,[654]
Normal-25.zip,0,Normal,3858,5370,234,1,[5370]
Normal-21.zip,0,Normal,2286,741,84,1,[741]
Normal-1.zip,0,Normal,1720,995,74,2,"[995, 996]"
Normal-3.zip,0,Normal,769,204,138,1,[204]
NCP-9.zip,2,NCP,2687,2654,51,1,[2654]
Normal-16.zip,0,Normal,2124,579,101,1,[579]
NCP-6.zip,2,NCP,206,1556,139,2,"[1556, 1557]"
Normal-20.zip,0,Normal,2256,711,86,1,[711]
CP-10.zip,1,CP,1411,3886,66,2,"[3885, 3886]"
CP-11.zip,1,CP,1418,3901,54,3,"[3900, 3901, 3902]"
NCP-4.zip,2,NCP,155,1455,58,2,"[1454, 1455]"
NCP-19.zip,2,NCP,536,2230,61,2,"[2229, 2230]"
CP-13.zip,1,CP,1489,4068,229,4,"[4067, 4068, 4069, 4070]"
Normal-1.zip,0,Normal,1722,1002,73,2,"[1001, 1002]"
CP-14.zip,1,CP,1526,4157,124,3,"[4157, 4158, 4159]"
CP-13.zip,1,CP,1489,4069,58,4,"[4067, 4068, 4069, 4070]"
CP-1.zip,1,CP,0,3134,37,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-5.zip,2,NCP,177,1499,58,2,"[1498, 1499]"
NCP-13.zip,2,NCP,350,1851,109,2,"[1851, 1852]"
Normal-1.zip,0,Normal,1678,827,58,6,"[827, 828, 829, 830, 831, 832]"
CP-1.zip,1,CP,1083,3129,71,2,"[3128, 3129]"
CP-1.zip,1,CP,0,3140,269,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-9.zip,1,CP,1378,3810,50,2,"[3810, 3811]"
CP-8.zip,1,CP,1326,3689,53,2,"[3688, 3689]"
CP-1.zip,1,CP,0,3133,290,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-4.zip,2,NCP,141,1427,54,2,"[1426, 1427]"
Normal-1.zip,0,Normal,1723,1003,77,2,"[1003, 1004]"
NCP-15.zip,2,NCP,423,2000,56,2,"[1999, 2000]"
NCP-11.zip,2,NCP,302,1750,152,2,"[1750, 1751]"
NCP-20.zip,2,NCP,570,2299,58,2,"[2298, 2299]"
CP-12.zip,1,CP,1459,3995,164,3,"[3995, 3996, 3997]"
NCP-16.zip,2,NCP,442,2039,53,2,"[2038, 2039]"
CP-1.zip,1,CP,0,3136,290,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-1.zip,1,CP,0,3135,269,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-14.zip,1,CP,1526,4159,51,3,"[4157, 4158, 4159]"
CP-1.zip,1,CP,0,3131,285,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-12.zip,2,NCP,336,1820,117,2,"[1820, 1821]"
NCP-7.zip,2,NCP,235,1616,58,2,"[1615, 1616]"
CP-11.zip,1,CP,1418,3902,54,3,"[3900, 3901, 3902]"
NCP-7.zip,2,NCP,236,1618,119,2,"[1617, 1618]"
CP-11.zip,1,CP,1439,3946,62,2,"[3946, 3947]"
CP-1.zip,1,CP,0,3139,39,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-1.zip,1,CP,0,3132,42,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
Normal-1.zip,0,Normal,1708,972,74,2,"[971, 972]"
CP-12.zip,1,CP,1459,3997,69,3,"[3995, 3996, 3997]"
NCP-18.zip,2,NCP,504,2164,155,2,"[2164, 2165]"
Normal-1.zip,0,Normal,1720,996,74,2,"[995, 996]"
CP-19.zip,1,CP,1786,3192,81,3,"[3192, 3193, 3194]"
Normal-1.zip,0,Normal,1678,830,34,6,"[827, 828, 829, 830, 831, 832]"
CP-26.zip,1,CP,3720,5652,48,2,"[5652, 5653]"
CP-19.zip,1,CP,1786,3193,81,3,"[3192, 3193, 3194]"
CP-13.zip,1,CP,1489,4070,58,4,"[4067, 4068, 4069, 4070]"
NCP-13.zip,2,NCP,361,1874,60,2,"[1873, 1874]"
NCP-17.zip,2,NCP,483,2121,137,2,"[2121, 2122]"
Normal-1.zip,0,Normal,1678,832,62,6,"[827, 828, 829, 830, 831, 832]"
NCP-16.zip,2,NCP,43,1219,156,2,"[1219, 1220]"
NCP-18.zip,2,NCP,499,2154,139,2,"[2154, 2155]"
CP-10.zip,1,CP,1409,3882,66,2,"[3881, 3882]"
NCP-5.zip,2,NCP,194,1533,56,2,"[1532, 1533]"
NCP-19.zip,2,NCP,518,2193,57,2,"[2192, 2193]"
CP-11.zip,1,CP,1432,3932,60,2,"[3932, 3933]"
Normal-1.zip,0,Normal,1678,828,58,6,"[827, 828, 829, 830, 831, 832]"
Normal-1.zip,0,Normal,1678,831,62,6,"[827, 828, 829, 830, 831, 832]"
CP-1.zip,1,CP,0,3138,245,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
Normal-1.zip,0,Normal,1710,975,78,2,"[975, 976]"
CP-14.zip,1,CP,1545,4206,65,2,"[4206, 4207]"
NCP-5.zip,2,NCP,18,1169,57,2,"[1168, 1169]"
Normal-15.zip,0,Normal,2096,551,93,1,[551]
CP-21.zip,1,CP,2776,3307,31,1,[3307]
NCP-16.zip,2,NCP,449,2053,61,2,"[2052, 2053]"
NCP-15.zip,2,NCP,404,1958,46,2,"[1957, 1958]"
NCP-6.zip,2,NCP,210,1565,55,2,"[1564, 1565]"
CP-3.zip,1,CP,1144,3362,159,1,[3362]
Normal-8.zip,0,Normal,1879,334,88,1,[334]
Normal-1.zip,0,Normal,1721,1000,75,4,"[1000, 997, 998, 999]"
NCP-21.zip,2,NCP,583,2323,147,2,"[2323, 2324]"
NCP-1.zip,2,NCP,1039,2610,45,1,[2610]
Normal-8.zip,0,Normal,1882,337,86,1,[337]
Normal-21.zip,0,Normal,2307,762,80,1,[762]
CP-14.zip,1,CP,1528,4163,61,2,"[4163, 4164]"
CP-11.zip,1,CP,1443,3958,58,3,"[3957, 3958, 3959]"
NCP-18.zip,2,NCP,496,2149,70,2,"[2148, 2149]"
CP-7.zip,1,CP,1270,3489,204,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-7.zip,0,Normal,1834,289,82,1,[289]
NCP-13.zip,2,NCP,351,1853,145,2,"[1853, 1854]"
CP-18.zip,1,CP,1782,3584,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-1.zip,0,Normal,1676,816,65,7,"[816, 817, 818, 819, 820, 821, 822]"
CP-11.zip,1,CP,1428,3923,221,3,"[3923, 3924, 3925]"
CP-8.zip,1,CP,1330,3699,58,3,"[3698, 3699, 3700]"
Normal-19.zip,0,Normal,2233,688,76,1,[688]
NCP-18.zip,2,NCP,514,2184,160,2,"[2184, 2185]"
Normal-6.zip,0,Normal,1804,259,102,1,[259]
Normal-22.zip,0,Normal,2598,108,38,1,[108]
CP-14.zip,1,CP,1534,4176,58,2,"[4176, 4177]"
CP-5.zip,1,CP,1217,3435,320,1,[3435]
NCP-14.zip,2,NCP,378,1908,168,2,"[1908, 1909]"
CP-18.zip,1,CP,1782,3582,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
NCP-25.zip,2,NCP,3963,5474,56,1,[5474]
NCP-22.zip,2,NCP,82,1298,55,2,"[1297, 1298]"
NCP-2.zip,2,NCP,1274,2715,55,1,[2715]
CP-22.zip,1,CP,619,2981,102,1,[2981]
Normal-24.zip,0,Normal,2661,171,31,1,[171]
CP-14.zip,1,CP,1540,4192,58,3,"[4191, 4192, 4193]"
NCP-10.zip,2,NCP,2724,2680,43,1,[2680]
Normal-2.zip,0,Normal,1742,1055,60,1,[1055]
CP-12.zip,1,CP,1486,4060,63,2,"[4059, 4060]"
NCP-19.zip,2,NCP,527,2211,48,2,"[2210, 2211]"
CP-10.zip,1,CP,1393,3846,60,2,"[3845, 3846]"
Normal-1.zip,0,Normal,1721,997,68,4,"[1000, 997, 998, 999]"
Normal-25.zip,0,Normal,3839,5351,220,1,[5351]
Normal-12.zip,0,Normal,1991,446,306,1,[446]
CP-19.zip,1,CP,1794,3595,38,2,"[3594, 3595]"
Normal-1.zip,0,Normal,1669,785,54,5,"[782, 783, 784, 785, 786]"
CP-18.zip,1,CP,1782,3580,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-11.zip,0,Normal,1963,418,95,1,[418]
CP-11.zip,1,CP,1428,3924,56,3,"[3923, 3924, 3925]"
Normal-9.zip,0,Normal,1918,373,85,1,[373]
Normal-16.zip,0,Normal,2118,573,89,1,[573]
NCP-4.zip,2,NCP,140,1424,128,2,"[1424, 1425]"
Normal-16.zip,0,Normal,2142,597,84,1,[597]
NCP-15.zip,2,NCP,410,1969,143,2,"[1969, 1970]"
Normal-3.zip,0,Normal,749,184,89,1,[184]
Normal-1.zip,0,Normal,1718,991,66,2,"[991, 992]"
NCP-5.zip,2,NCP,176,1497,53,2,"[1496, 1497]"
NCP-8.zip,2,NCP,265,1677,50,2,"[1676, 1677]"
CP-7.zip,1,CP,1270,3495,148,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-26.zip,2,NCP,3982,5489,34,1,[5489]
NCP-8.zip,2,NCP,2677,2695,51,1,[2695]
NCP-13.zip,2,NCP,357,1866,63,2,"[1865, 1866]"
NCP-13.zip,2,NCP,346,1843,139,2,"[1843, 1844]"
Normal-1.zip,0,Normal,1676,820,72,7,"[816, 817, 818, 819, 820, 821, 822]"
NCP-14.zip,2,NCP,379,1911,62,2,"[1910, 1911]"
NCP-1.zip,2,NCP,104,1345,139,2,"[1345, 1346]"
NCP-2.zip,2,NCP,116,1373,127,2,"[1373, 1374]"
NCP-17.zip,2,NCP,466,2087,145,2,"[2087, 2088]"
CP-11.zip,1,CP,1443,3957,139,3,"[3957, 3958, 3959]"
NCP-5.zip,2,NCP,181,1507,58,2,"[1506, 1507]"
NCP-18.zip,2,NCP,496,2148,168,2,"[2148, 2149]"
NCP-8.zip,2,NCP,265,1676,119,2,"[1676, 1677]"
Normal-1.zip,0,Normal,1669,782,62,5,"[782, 783, 784, 785, 786]"
CP-7.zip,1,CP,1270,3501,420,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-1.zip,0,Normal,1676,822,69,7,"[816, 817, 818, 819, 820, 821, 822]"
NCP-21.zip,2,NCP,583,2324,62,2,"[2323, 2324]"
NCP-19.zip,2,NCP,527,2210,114,2,"[2210, 2211]"
NCP-15.zip,2,NCP,404,1957,108,2,"[1957, 1958]"
NCP-17.zip,2,NCP,466,2088,61,2,"[2087, 2088]"
NCP-4.zip,2,NCP,140,1425,54,2,"[1424, 1425]"
NCP-13.zip,2,NCP,346,1844,58,2,"[1843, 1844]"
CP-7.zip,1,CP,1270,3494,129,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-7.zip,1,CP,1270,3497,133,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-18.zip,1,CP,1782,3579,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-1.zip,0,Normal,1676,818,65,7,"[816, 817, 818, 819, 820, 821, 822]"
CP-11.zip,1,CP,1428,3925,56,3,"[3923, 3924, 3925]"
CP-7.zip,1,CP,1270,3488,287,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-7.zip,1,CP,1270,3500,160,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-1.zip,0,Normal,1669,784,196,5,"[782, 783, 784, 785, 786]"
Normal-1.zip,0,Normal,1669,783,62,5,"[782, 783, 784, 785, 786]"
CP-18.zip,1,CP,1782,3586,69,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-1.zip,0,Normal,1721,998,68,4,"[1000, 997, 998, 999]"
Normal-1.zip,0,Normal,1676,817,65,7,"[816, 817, 818, 819, 820, 821, 822]"
CP-14.zip,1,CP,1540,4193,58,3,"[4191, 4192, 4193]"
Normal-1.zip,0,Normal,1676,821,72,7,"[816, 817, 818, 819, 820, 821, 822]"
Normal-1.zip,0,Normal,1676,819,65,7,"[816, 817, 818, 819, 820, 821, 822]"
NCP-22.zip,2,NCP,82,1297,129,2,"[1297, 1298]"
Normal-1.zip,0,Normal,1718,992,66,2,"[991, 992]"
CP-7.zip,1,CP,1270,3496,154,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-5.zip,2,NCP,181,1506,139,2,"[1506, 1507]"
CP-7.zip,1,CP,1270,3492,137,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-13.zip,2,NCP,357,1865,150,2,"[1865, 1866]"
CP-11.zip,1,CP,1443,3959,58,3,"[3957, 3958, 3959]"
Normal-1.zip,0,Normal,1669,786,54,5,"[782, 783, 784, 785, 786]"
NCP-6.zip,2,NCP,210,1564,131,2,"[1564, 1565]"
CP-19.zip,1,CP,1794,3594,38,2,"[3594, 3595]"
NCP-15.zip,2,NCP,410,1970,60,2,"[1969, 1970]"
NCP-14.zip,2,NCP,379,1910,147,2,"[1910, 1911]"
CP-7.zip,1,CP,1270,3491,142,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-1.zip,0,Normal,1721,999,75,4,"[1000, 997, 998, 999]"
CP-14.zip,1,CP,1540,4191,221,3,"[4191, 4192, 4193]"
CP-12.zip,1,CP,1486,4059,63,2,"[4059, 4060]"
CP-14.zip,1,CP,1528,4164,61,2,"[4163, 4164]"
NCP-16.zip,2,NCP,449,2052,145,2,"[2052, 2053]"
NCP-13.zip,2,NCP,351,1854,61,2,"[1853, 1854]"
CP-7.zip,1,CP,1270,3498,247,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-8.zip,1,CP,1330,3698,58,3,"[3698, 3699, 3700]"
NCP-2.zip,2,NCP,116,1374,54,2,"[1373, 1374]"
NCP-18.zip,2,NCP,514,2185,67,2,"[2184, 2185]"
CP-18.zip,1,CP,1782,3587,69,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
CP-8.zip,1,CP,1330,3700,58,3,"[3698, 3699, 3700]"
NCP-14.zip,2,NCP,378,1909,69,2,"[1908, 1909]"
NCP-1.zip,2,NCP,104,1346,58,2,"[1345, 1346]"
CP-14.zip,1,CP,1534,4177,58,2,"[4176, 4177]"
CP-7.zip,1,CP,1270,3490,237,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-7.zip,1,CP,1270,3493,193,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-18.zip,1,CP,1782,3583,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
CP-7.zip,1,CP,1270,3502,21,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-7.zip,1,CP,1270,3499,363,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-5.zip,2,NCP,18,1168,135,2,"[1168, 1169]"
CP-18.zip,1,CP,1782,3585,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
NCP-5.zip,2,NCP,176,1496,126,2,"[1496, 1497]"
CP-18.zip,1,CP,1782,3581,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
CP-10.zip,1,CP,1393,3845,60,2,"[3845, 3846]"
Normal-2.zip,0,Normal,1740,1050,21,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-10.zip,1,CP,1387,3830,51,2,"[3829, 3830]"
NCP-10.zip,2,NCP,2719,2675,44,1,[2675]
CP-1.zip,1,CP,1065,3104,58,1,[3104]
CP-10.zip,1,CP,1392,3843,62,2,"[3843, 3844]"
CP-13.zip,1,CP,1508,4117,57,3,"[4115, 4116, 4117]"
NCP-22.zip,2,NCP,863,2387,282,2,"[2386, 2387]"
Normal-3.zip,0,Normal,763,198,102,1,[198]
Normal-23.zip,0,Normal,2635,145,27,1,[145]
NCP-20.zip,2,NCP,572,2303,58,2,"[2302, 2303]"
Normal-1.zip,0,Normal,1683,862,65,6,"[861, 862, 864, 865, 868, 869]"
CP-10.zip,1,CP,1398,3856,44,2,"[3856, 3857]"
CP-15.zip,1,CP,1566,4252,54,2,"[4252, 4253]"
NCP-10.zip,2,NCP,280,1707,51,2,"[1706, 1707]"
CP-19.zip,1,CP,1785,3187,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
CP-15.zip,1,CP,1570,4258,22,1,[4258]
CP-10.zip,1,CP,1413,3890,66,2,"[3889, 3890]"
CP-7.zip,1,CP,1303,3618,42,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-19.zip,1,CP,2435,2903,295,3,"[2901, 2902, 2903]"
CP-6.zip,1,CP,1229,3447,144,1,[3447]
CP-26.zip,1,CP,3718,5647,51,2,"[5647, 5648]"
NCP-22.zip,2,NCP,860,2382,212,2,"[2382, 2383]"
NCP-22.zip,2,NCP,883,2419,52,2,"[2419, 2420]"
Normal-2.zip,0,Normal,1751,1079,61,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
CP-3.zip,1,CP,1148,3366,158,1,[3366]
CP-5.zip,1,CP,1200,3418,309,1,[3418]
Normal-25.zip,0,Normal,3852,5364,195,1,[5364]
CP-1.zip,1,CP,1088,3221,54,4,"[3220, 3221, 3222, 3223]"
CP-21.zip,1,CP,585,2947,94,1,[2947]
CP-18.zip,1,CP,1772,3178,72,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
NCP-20.zip,2,NCP,559,2275,127,2,"[2275, 2276]"
NCP-18.zip,2,NCP,498,2153,58,2,"[2152, 2153]"
Normal-27.zip,0,Normal,3911,5448,64,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
CP-1.zip,1,CP,1078,3123,68,1,[3123]
CP-12.zip,1,CP,1473,4028,51,3,"[4026, 4027, 4028]"
Normal-17.zip,0,Normal,2158,613,100,1,[613]
NCP-7.zip,2,NCP,246,1639,58,2,"[1638, 1639]"
NCP-17.zip,2,NCP,473,2102,61,2,"[2101, 2102]"
Normal-2.zip,0,Normal,1732,1025,73,1,[1025]
CP-15.zip,1,CP,1559,4237,53,2,"[4237, 4238]"
CP-7.zip,1,CP,1259,3477,162,1,[3477]
NCP-10.zip,2,NCP,271,1688,146,2,"[1688, 1689]"
CP-7.zip,1,CP,1303,3627,252,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-11.zip,2,NCP,286,1717,121,2,"[1717, 1718]"
CP-14.zip,1,CP,1541,4194,142,3,"[4194, 4195, 4196]"
Normal-19.zip,0,Normal,2223,678,95,1,[678]
CP-18.zip,1,CP,1658,4346,29,1,[4346]
NCP-22.zip,2,NCP,822,2333,31,2,"[2332, 2333]"
NCP-28.zip,2,NCP,870,2400,47,2,"[2399, 2400]"
CP-9.zip,1,CP,1373,3801,55,2,"[3800, 3801]"
CP-12.zip,1,CP,1456,3990,52,3,"[3988, 3989, 3990]"
NCP-21.zip,2,NCP,75,1284,54,2,"[1283, 1284]"
CP-9.zip,1,CP,1367,3787,58,3,"[3785, 3786, 3787]"
NCP-17.zip,2,NCP,469,2094,66,2,"[2093, 2094]"
CP-1.zip,1,CP,1097,3315,119,1,[3315]
NCP-8.zip,2,NCP,255,1656,139,2,"[1656, 1657]"
CP-11.zip,1,CP,1438,3944,46,2,"[3944, 3945]"
NCP-6.zip,2,NCP,211,1566,137,2,"[1566, 1567]"
Normal-2.zip,0,Normal,1759,1115,64,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-25.zip,2,NCP,3966,5476,43,1,[5476]
NCP-21.zip,2,NCP,575,2309,61,2,"[2308, 2309]"
CP-14.zip,1,CP,1523,4150,65,2,"[4150, 4151]"
CP-19.zip,1,CP,2447,2923,83,2,"[2923, 2924]"
CP-19.zip,1,CP,1788,3203,57,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
Normal-2.zip,0,Normal,1740,1045,102,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
Normal-1.zip,0,Normal,1681,845,69,1,[845]
NCP-11.zip,2,NCP,310,1768,70,2,"[1767, 1768]"
CP-5.zip,1,CP,1220,3438,200,1,[3438]
NCP-22.zip,2,NCP,87,1307,145,2,"[1307, 1308]"
Normal-4.zip,0,Normal,786,221,124,1,[221]
Normal-20.zip,0,Normal,2270,725,86,1,[725]
CP-16.zip,1,CP,1593,4281,22,1,[4281]
Normal-18.zip,0,Normal,2200,655,94,1,[655]
NCP-18.zip,2,NCP,515,2187,58,2,"[2186, 2187]"
CP-28.zip,1,CP,3784,5728,29,1,[5728]
NCP-5.zip,2,NCP,172,1488,139,2,"[1488, 1489]"
CP-2.zip,1,CP,1109,3327,210,1,[3327]
NCP-20.zip,2,NCP,551,2260,65,2,"[2259, 2260]"
CP-19.zip,1,CP,2444,2918,124,2,"[2918, 2919]"
Normal-2.zip,0,Normal,1760,1122,137,4,"[1121, 1122, 1123, 1124]"
CP-12.zip,1,CP,1476,4033,106,2,"[4033, 4034]"
CP-14.zip,1,CP,1538,4186,66,3,"[4185, 4186, 4187]"
NCP-21.zip,2,NCP,61,1256,60,2,"[1255, 1256]"
Normal-13.zip,0,Normal,2046,501,79,1,[501]
CP-15.zip,1,CP,1565,4250,66,2,"[4250, 4251]"
CP-10.zip,1,CP,1407,3876,58,2,"[3876, 3877]"
CP-13.zip,1,CP,1508,4116,57,3,"[4115, 4116, 4117]"
Normal-27.zip,0,Normal,3905,5437,288,2,"[5437, 5438]"
NCP-13.zip,2,NCP,36,1204,141,2,"[1204, 1205]"
NCP-30.zip,2,NCP,941,2484,169,1,[2484]
Normal-2.zip,0,Normal,1758,1109,291,2,"[1109, 1110]"
CP-8.zip,1,CP,1342,3723,139,3,"[3723, 3724, 3725]"
CP-3.zip,1,CP,1132,3350,180,1,[3350]
CP-18.zip,1,CP,1773,3184,67,4,"[3182, 3183, 3184, 3185]"
NCP-17.zip,2,NCP,464,2083,60,2,"[2082, 2083]"
NCP-16.zip,2,NCP,447,2048,139,2,"[2048, 2049]"
NCP-3.zip,2,NCP,136,1416,126,2,"[1416, 1417]"
NCP-18.zip,2,NCP,501,2158,146,2,"[2158, 2159]"
CP-19.zip,1,CP,2439,2909,409,1,[2909]
NCP-19.zip,2,NCP,538,2233,142,2,"[2233, 2234]"
Normal-27.zip,0,Normal,3907,5440,63,2,"[5440, 5441]"
CP-18.zip,1,CP,1773,3182,61,4,"[3182, 3183, 3184, 3185]"
CP-8.zip,1,CP,1320,3677,62,2,"[3676, 3677]"
CP-9.zip,1,CP,1366,3782,138,3,"[3782, 3783, 3784]"
CP-7.zip,1,CP,1309,3651,49,2,"[3651, 3652]"
NCP-22.zip,2,NCP,863,2386,228,2,"[2386, 2387]"
NCP-18.zip,2,NCP,492,2140,139,2,"[2140, 2141]"
NCP-21.zip,2,NCP,69,1271,48,2,"[1270, 1271]"
CP-13.zip,1,CP,1515,4131,137,3,"[4131, 4132, 4133]"
Normal-11.zip,0,Normal,1980,435,83,1,[435]
Normal-14.zip,0,Normal,2073,528,87,1,[528]
CP-3.zip,1,CP,1149,3367,157,1,[3367]
NCP-14.zip,2,NCP,376,1905,60,2,"[1904, 1905]"
NCP-8.zip,2,NCP,253,1653,58,2,"[1652, 1653]"
CP-10.zip,1,CP,1413,3889,67,2,"[3889, 3890]"
NCP-27.zip,2,NCP,1061,2638,75,1,[2638]
Normal-9.zip,0,Normal,1921,376,80,1,[376]
NCP-16.zip,2,NCP,453,2061,51,2,"[2060, 2061]"
NCP-10.zip,2,NCP,275,1697,64,2,"[1696, 1697]"
CP-24.zip,1,CP,708,3070,80,1,[3070]
NCP-20.zip,2,NCP,560,2277,124,2,"[2277, 2279]"
NCP-6.zip,2,NCP,207,1558,109,2,"[1558, 1559]"
NCP-2.zip,2,NCP,114,1370,53,2,"[1369, 1370]"
CP-10.zip,1,CP,1407,3877,58,2,"[3876, 3877]"
Normal-1.zip,0,Normal,1682,858,70,6,"[847, 848, 852, 853, 857, 858]"
CP-14.zip,1,CP,1548,4214,51,2,"[4213, 4214]"
Normal-2.zip,0,Normal,1760,1124,74,4,"[1121, 1122, 1123, 1124]"
NCP-14.zip,2,NCP,374,1900,58,2,"[1899, 1900]"
NCP-7.zip,2,NCP,2486,2645,50,1,[2645]
NCP-19.zip,2,NCP,542,2242,55,2,"[2241, 2242]"
Normal-25.zip,0,Normal,3836,5348,202,1,[5348]
Normal-11.zip,0,Normal,1961,416,91,1,[416]
NCP-27.zip,2,NCP,819,2329,33,1,[2329]
NCP-5.zip,2,NCP,184,1512,112,2,"[1512, 1513]"
NCP-15.zip,2,NCP,416,1984,139,2,"[1984, 1986]"
Normal-2.zip,0,Normal,1740,1047,60,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-14.zip,1,CP,1538,4187,65,3,"[4185, 4186, 4187]"
CP-8.zip,1,CP,1351,3746,56,1,[3746]
NCP-10.zip,2,NCP,281,1709,51,2,"[1708, 1709]"
CP-10.zip,1,CP,1415,3895,65,3,"[3894, 3895, 3896]"
Normal-1.zip,0,Normal,1682,848,67,6,"[847, 848, 852, 853, 857, 858]"
NCP-17.zip,2,NCP,485,2126,64,2,"[2125, 2126]"
NCP-18.zip,2,NCP,501,2159,61,2,"[2158, 2159]"
Normal-8.zip,0,Normal,1863,318,82,1,[318]
CP-18.zip,1,CP,1772,3176,81,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
CP-26.zip,1,CP,3652,5551,53,2,"[5551, 5552]"
Normal-5.zip,0,Normal,808,243,134,1,[243]
CP-28.zip,1,CP,3771,5715,23,1,[5715]
CP-19.zip,1,CP,1785,3188,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
CP-26.zip,1,CP,3637,5596,35,1,[5596]
CP-12.zip,1,CP,1455,3987,58,3,"[3985, 3986, 3987]"
CP-8.zip,1,CP,1336,3712,60,2,"[3712, 3713]"
CP-7.zip,1,CP,1303,3624,224,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-30.zip,1,CP,4015,5564,226,1,[5564]
Normal-8.zip,0,Normal,1883,338,91,1,[338]
Normal-3.zip,0,Normal,1764,1145,62,4,"[1143, 1144, 1145, 1146]"
NCP-15.zip,2,NCP,42,1218,61,2,"[1216, 1218]"
NCP-7.zip,2,NCP,245,1636,149,2,"[1636, 1637]"
Normal-14.zip,0,Normal,2066,521,74,1,[521]
Normal-20.zip,0,Normal,2275,730,85,1,[730]
NCP-8.zip,2,NCP,268,1682,126,2,"[1682, 1683]"
CP-7.zip,1,CP,1307,3647,49,4,"[3645, 3646, 3647, 3648]"
Normal-15.zip,0,Normal,2106,561,93,1,[561]
CP-20.zip,1,CP,2772,3303,261,1,[3303]
NCP-25.zip,2,NCP,3970,5479,48,1,[5479]
CP-28.zip,1,CP,3772,5716,23,1,[5716]
Normal-1.zip,0,Normal,1683,868,64,6,"[861, 862, 864, 865, 868, 869]"
NCP-5.zip,2,NCP,175,1494,131,2,"[1494, 1495]"
NCP-18.zip,2,NCP,507,2171,58,2,"[2170, 2171]"
CP-7.zip,1,CP,1303,3611,257,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-19.zip,2,NCP,537,2231,143,2,"[2231, 2232]"
Normal-1.zip,0,Normal,1728,1014,66,4,"[1013, 1014, 1015, 1016]"
Normal-23.zip,0,Normal,2608,118,25,1,[118]
NCP-23.zip,2,NCP,90,1317,43,2,"[1316, 1317]"
NCP-2.zip,2,NCP,123,1388,62,2,"[1387, 1388]"
NCP-18.zip,2,NCP,507,2170,138,2,"[2170, 2171]"
NCP-14.zip,2,NCP,395,1940,171,2,"[1940, 1941]"
NCP-23.zip,2,NCP,946,2489,26,1,[2489]
CP-7.zip,1,CP,1308,3649,43,2,"[3649, 3650]"
NCP-17.zip,2,NCP,462,2078,161,2,"[2078, 2079]"
Normal-16.zip,0,Normal,2145,600,86,1,[600]
NCP-20.zip,2,NCP,560,2279,51,2,"[2277, 2279]"
CP-30.zip,1,CP,3931,5630,82,4,"[5630, 5631, 5632, 5633]"
CP-13.zip,1,CP,1501,4101,55,2,"[4100, 4101]"
CP-1.zip,1,CP,1,3144,248,5,"[3143, 3144, 3145, 3146, 3147]"
CP-25.zip,1,CP,713,3075,120,1,[3075]
CP-15.zip,1,CP,1562,4244,55,2,"[4243, 4244]"
CP-26.zip,1,CP,3643,5602,298,2,"[5602, 5603]"
CP-27.zip,1,CP,3748,5692,17,1,[5692]
CP-7.zip,1,CP,1303,3610,51,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-14.zip,1,CP,1524,4152,229,3,"[4152, 4153, 4154]"
Normal-6.zip,0,Normal,1800,255,92,1,[255]
Normal-1.zip,0,Normal,1711,978,63,2,"[977, 978]"
Normal-17.zip,0,Normal,2157,612,78,1,[612]
CP-8.zip,1,CP,1334,3707,133,2,"[3707, 3708]"
NCP-19.zip,2,NCP,545,2247,135,2,"[2247, 2248]"
CP-28.zip,1,CP,3790,5734,23,1,[5734]
NCP-21.zip,2,NCP,61,1255,142,2,"[1255, 1256]"
NCP-30.zip,2,NCP,993,2546,203,1,[2546]
NCP-9.zip,2,NCP,2689,2656,47,1,[2656]
Normal-27.zip,0,Normal,3907,5441,66,2,"[5440, 5441]"
CP-26.zip,1,CP,3652,5552,52,2,"[5551, 5552]"
NCP-11.zip,2,NCP,287,1719,142,2,"[1719, 1720]"
NCP-2.zip,2,NCP,114,1369,125,2,"[1369, 1370]"
NCP-21.zip,2,NCP,581,2320,58,2,"[2319, 2320]"
Normal-26.zip,0,Normal,3887,5404,78,3,"[5400, 5401, 5404]"
NCP-12.zip,2,NCP,325,1799,50,2,"[1798, 1799]"
NCP-27.zip,2,NCP,1060,2637,81,1,[2637]
CP-13.zip,1,CP,1516,4135,62,2,"[4134, 4135]"
CP-15.zip,1,CP,1580,4268,21,1,[4268]
NCP-15.zip,2,NCP,428,2009,125,2,"[2009, 2010]"
NCP-19.zip,2,NCP,52,1237,135,2,"[1237, 1238]"
NCP-9.zip,2,NCP,2691,2658,44,1,[2658]
NCP-12.zip,2,NCP,34,1200,156,2,"[1200, 1201]"
NCP-19.zip,2,NCP,539,2235,131,2,"[2235, 2236]"
Normal-1.zip,0,Normal,1728,1015,72,4,"[1013, 1014, 1015, 1016]"
NCP-6.zip,2,NCP,222,1588,122,2,"[1588, 1589]"
NCP-10.zip,2,NCP,273,1693,54,2,"[1692, 1693]"
CP-29.zip,1,CP,3822,5766,20,1,[5766]
CP-10.zip,1,CP,1401,3864,51,3,"[3862, 3863, 3864]"
Normal-13.zip,0,Normal,2030,485,66,1,[485]
NCP-4.zip,2,NCP,164,1473,63,2,"[1472, 1473]"
CP-21.zip,1,CP,3,3504,35,1,[3504]
CP-9.zip,1,CP,1368,3788,69,2,"[3788, 3789]"
Normal-1.zip,0,Normal,1683,865,72,6,"[861, 862, 864, 865, 868, 869]"
Normal-1.zip,0,Normal,1704,963,69,4,"[961, 962, 963, 964]"
CP-12.zip,1,CP,1466,4012,52,2,"[4011, 4012]"
Normal-11.zip,0,Normal,1971,426,100,1,[426]
NCP-16.zip,2,NCP,450,2055,34,2,"[2054, 2055]"
NCP-30.zip,2,NCP,962,2505,38,1,[2505]
NCP-8.zip,2,NCP,2675,2648,44,1,[2648]
CP-7.zip,1,CP,1303,3630,49,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-25.zip,2,NCP,3955,5468,46,1,[5468]
NCP-18.zip,2,NCP,488,2131,139,2,"[2131, 2133]"
CP-12.zip,1,CP,1484,4053,181,3,"[4053, 4054, 4055]"
CP-9.zip,1,CP,1368,3789,69,2,"[3788, 3789]"
NCP-10.zip,2,NCP,28,1188,145,2,"[1188, 1189]"
CP-30.zip,1,CP,3931,5631,82,4,"[5630, 5631, 5632, 5633]"
NCP-10.zip,2,NCP,277,1701,64,2,"[1700, 1701]"
NCP-4.zip,2,NCP,148,1441,63,2,"[1440, 1441]"
CP-12.zip,1,CP,1481,4044,139,3,"[4044, 4045, 4046]"
Normal-21.zip,0,Normal,2288,743,96,1,[743]
CP-30.zip,1,CP,4017,5566,41,1,[5566]
CP-13.zip,1,CP,1499,4098,53,2,"[4097, 4098]"
CP-13.zip,1,CP,1516,4134,62,2,"[4134, 4135]"
Normal-13.zip,0,Normal,2049,504,88,1,[504]
CP-18.zip,1,CP,1772,3179,72,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
NCP-20.zip,2,NCP,57,1248,56,2,"[1247, 1248]"
Normal-1.zip,0,Normal,1704,961,71,4,"[961, 962, 963, 964]"
CP-9.zip,1,CP,1366,3783,57,3,"[3782, 3783, 3784]"
CP-32.zip,1,CP,2464,3228,66,1,[3228]
CP-15.zip,1,CP,1555,4228,62,2,"[4228, 4229]"
Normal-3.zip,0,Normal,758,193,122,1,[193]
NCP-12.zip,2,NCP,329,1806,157,2,"[1806, 1807]"
CP-7.zip,1,CP,1307,3646,259,4,"[3645, 3646, 3647, 3648]"
CP-26.zip,1,CP,3722,5657,205,2,"[5656, 5657]"
NCP-14.zip,2,NCP,382,1916,139,2,"[1916, 1917]"
CP-27.zip,1,CP,3752,5696,20,1,[5696]
Normal-16.zip,0,Normal,2129,584,75,1,[584]
NCP-13.zip,2,NCP,367,1885,158,2,"[1885, 1886]"
NCP-6.zip,2,NCP,204,1553,58,2,"[1552, 1553]"
CP-30.zip,1,CP,3918,5542,71,1,[5542]
CP-10.zip,1,CP,1392,3844,62,2,"[3843, 3844]"
Normal-11.zip,0,Normal,1979,434,87,1,[434]
Normal-2.zip,0,Normal,1741,1053,61,2,"[1053, 1054]"
Normal-10.zip,0,Normal,1945,400,87,1,[400]
Normal-26.zip,0,Normal,3882,5394,27,1,[5394]
CP-20.zip,1,CP,2456,2940,126,1,[2940]
NCP-5.zip,2,NCP,184,1513,48,2,"[1512, 1513]"
NCP-9.zip,2,NCP,2693,2659,49,1,[2659]
CP-8.zip,1,CP,1348,3739,197,3,"[3739, 3740, 3741]"
Normal-18.zip,0,Normal,2214,669,102,1,[669]
CP-10.zip,1,CP,1415,3896,65,3,"[3894, 3895, 3896]"
NCP-3.zip,2,NCP,1290,2731,66,1,[2731]
Normal-2.zip,0,Normal,1759,1111,62,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
CP-15.zip,1,CP,1566,4253,54,2,"[4252, 4253]"
Normal-27.zip,0,Normal,3911,5447,65,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
NCP-15.zip,2,NCP,401,1951,139,2,"[1951, 1952]"
CP-7.zip,1,CP,1309,3652,49,2,"[3651, 3652]"
Normal-4.zip,0,Normal,787,222,320,1,[222]
NCP-20.zip,2,NCP,550,2258,60,2,"[2257, 2258]"
NCP-5.zip,2,NCP,195,1534,143,2,"[1534, 1535]"
NCP-13.zip,2,NCP,367,1886,66,2,"[1885, 1886]"
NCP-19.zip,2,NCP,530,2218,132,1,[2218]
Normal-6.zip,0,Normal,1811,266,95,1,[266]
NCP-30.zip,2,NCP,963,2506,21,1,[2506]
Normal-2.zip,0,Normal,1759,1112,62,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-13.zip,2,NCP,369,1890,58,2,"[1889, 1890]"
NCP-16.zip,2,NCP,457,2068,134,2,"[2068, 2069]"
NCP-26.zip,2,NCP,3981,5488,45,1,[5488]
NCP-22.zip,2,NCP,816,2325,50,1,[2325]
Normal-1.zip,0,Normal,1730,1019,63,5,"[1019, 1020, 1021, 1022, 1023]"
NCP-15.zip,2,NCP,419,1991,130,2,"[1991, 1992]"
CP-30.zip,1,CP,4016,5565,37,1,[5565]
CP-24.zip,1,CP,694,3056,135,1,[3056]
NCP-17.zip,2,NCP,470,2095,154,2,"[2095, 2096]"
Normal-4.zip,0,Normal,781,216,118,1,[216]
Normal-27.zip,0,Normal,3911,5449,64,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
CP-13.zip,1,CP,1497,4093,68,3,"[4092, 4093, 4094]"
NCP-26.zip,2,NCP,3991,5515,43,1,[5515]
CP-8.zip,1,CP,1331,3701,62,2,"[3701, 3702]"
Normal-9.zip,0,Normal,1910,365,91,1,[365]
NCP-27.zip,2,NCP,820,2330,34,1,[2330]
CP-7.zip,1,CP,13,3171,65,4,"[3170, 3171, 3172, 3173]"
CP-20.zip,1,CP,2764,3295,39,1,[3295]
Normal-1.zip,0,Normal,1714,984,71,3,"[982, 983, 984]"
CP-13.zip,1,CP,1501,4100,55,2,"[4100, 4101]"
Normal-15.zip,0,Normal,2117,572,87,1,[572]
NCP-22.zip,2,NCP,87,1308,61,2,"[1307, 1308]"
CP-30.zip,1,CP,3929,5627,70,2,"[5626, 5627]"
NCP-4.zip,2,NCP,158,1461,52,2,"[1460, 1461]"
CP-10.zip,1,CP,1389,3834,52,3,"[3833, 3834, 3835]"
CP-13.zip,1,CP,1497,4094,68,3,"[4092, 4093, 4094]"
CP-10.zip,1,CP,1415,3894,155,3,"[3894, 3895, 3896]"
CP-30.zip,1,CP,4014,5563,35,1,[5563]
NCP-17.zip,2,NCP,462,2079,67,2,"[2078, 2079]"
CP-29.zip,1,CP,3803,5747,23,1,[5747]
CP-1.zip,1,CP,1,3143,300,5,"[3143, 3144, 3145, 3146, 3147]"
NCP-11.zip,2,NCP,305,1756,157,2,"[1756, 1758]"
Normal-1.zip,0,Normal,1668,781,63,4,"[778, 779, 780, 781]"
CP-4.zip,1,CP,1174,3392,175,1,[3392]
Normal-14.zip,0,Normal,2060,515,77,1,[515]
Normal-22.zip,0,Normal,2602,112,32,1,[112]
CP-14.zip,1,CP,1541,4196,58,3,"[4194, 4195, 4196]"
Normal-12.zip,0,Normal,2019,474,87,1,[474]
CP-25.zip,1,CP,733,3095,84,1,[3095]
CP-13.zip,1,CP,1499,4097,53,2,"[4097, 4098]"
Normal-1.zip,0,Normal,1683,861,65,6,"[861, 862, 864, 865, 868, 869]"
CP-19.zip,1,CP,1788,3201,55,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
Normal-1.zip,0,Normal,1704,964,69,4,"[961, 962, 963, 964]"
NCP-3.zip,2,NCP,1289,2730,62,1,[2730]
NCP-20.zip,2,NCP,567,2292,148,2,"[2292, 2293]"
CP-7.zip,1,CP,1303,3613,232,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-13.zip,0,Normal,2027,482,89,1,[482]
Normal-2.zip,0,Normal,1759,1114,59,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
Normal-27.zip,0,Normal,3897,5424,75,4,"[5423, 5424, 5426, 5427]"
Normal-22.zip,0,Normal,2317,772,77,1,[772]
Normal-2.zip,0,Normal,1758,1110,59,2,"[1109, 1110]"
NCP-2.zip,2,NCP,121,1384,43,2,"[1383, 1384]"
NCP-13.zip,2,NCP,356,1864,53,2,"[1863, 1864]"
Normal-2.zip,0,Normal,1760,1121,85,4,"[1121, 1122, 1123, 1124]"
CP-29.zip,1,CP,3825,5769,25,1,[5769]
NCP-17.zip,2,NCP,46,1226,52,2,"[1225, 1226]"
NCP-19.zip,2,NCP,53,1240,60,2,"[1239, 1240]"
NCP-12.zip,2,NCP,314,1776,58,2,"[1775, 1776]"
Normal-21.zip,0,Normal,2290,745,88,1,[745]
Normal-2.zip,0,Normal,1760,1123,74,4,"[1121, 1122, 1123, 1124]"
Normal-24.zip,0,Normal,2666,176,35,1,[176]
CP-8.zip,1,CP,1346,3735,53,3,"[3733, 3734, 3735]"
Normal-17.zip,0,Normal,2164,619,84,1,[619]
NCP-8.zip,2,NCP,2672,2647,47,1,[2647]
NCP-2.zip,2,NCP,1277,2718,57,1,[2718]
CP-9.zip,1,CP,1370,3793,62,2,"[3792, 3793]"
NCP-20.zip,2,NCP,551,2259,154,2,"[2259, 2260]"
Normal-10.zip,0,Normal,1928,383,87,1,[383]
CP-21.zip,1,CP,598,2960,646,1,[2960]
CP-20.zip,1,CP,2755,3286,34,1,[3286]
Normal-16.zip,0,Normal,2141,596,100,1,[596]
CP-14.zip,1,CP,1544,4205,50,3,"[4203, 4204, 4205]"
NCP-9.zip,2,NCP,270,1687,62,2,"[1686, 1687]"
CP-5.zip,1,CP,1222,3440,157,1,[3440]
CP-19.zip,1,CP,1791,3210,100,4,"[3210, 3211, 3212, 3213]"
NCP-16.zip,2,NCP,450,2054,78,2,"[2054, 2055]"
NCP-20.zip,2,NCP,557,2272,56,2,"[2271, 2272]"
NCP-3.zip,2,NCP,1284,2725,50,1,[2725]
CP-5.zip,1,CP,1205,3423,146,1,[3423]
CP-19.zip,1,CP,1785,3191,79,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-1.zip,2,NCP,1017,2583,452,1,[2583]
Normal-2.zip,0,Normal,1736,1033,25,5,"[1032, 1033, 1034, 1035, 1036]"
NCP-1.zip,2,NCP,1021,2589,183,4,"[2587, 2588, 2589, 2590]"
NCP-7.zip,2,NCP,232,1608,146,2,"[1608, 1609]"
NCP-23.zip,2,NCP,950,2493,34,1,[2493]
CP-6.zip,1,CP,1246,3464,175,1,[3464]
Normal-1.zip,0,Normal,1671,794,67,3,"[793, 794, 795]"
CP-1.zip,1,CP,1095,3313,161,1,[3313]
CP-25.zip,1,CP,714,3076,98,1,[3076]
NCP-17.zip,2,NCP,479,2114,58,2,"[2113, 2114]"
NCP-18.zip,2,NCP,515,2186,139,2,"[2186, 2187]"
Normal-24.zip,0,Normal,2652,162,35,1,[162]
Normal-2.zip,0,Normal,1752,1085,66,1,[1085]
Normal-24.zip,0,Normal,2638,148,38,1,[148]
NCP-8.zip,2,NCP,260,1666,163,2,"[1666, 1667]"
NCP-18.zip,2,NCP,509,2175,58,2,"[2174, 2175]"
Normal-14.zip,0,Normal,2079,534,92,1,[534]
Normal-3.zip,0,Normal,751,186,119,1,[186]
NCP-8.zip,2,NCP,263,1673,74,2,"[1672, 1673]"
CP-22.zip,1,CP,626,2988,174,1,[2988]
Normal-23.zip,0,Normal,2619,129,43,1,[129]
CP-1.zip,1,CP,1069,3109,77,4,"[3108, 3109, 3110, 3111]"
NCP-13.zip,2,NCP,360,1872,51,2,"[1871, 1872]"
NCP-23.zip,2,NCP,915,2457,31,1,[2457]
Normal-2.zip,0,Normal,1740,1048,60,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
NCP-3.zip,2,NCP,131,1407,117,2,"[1407, 1408]"
NCP-21.zip,2,NCP,79,1292,55,2,"[1291, 1292]"
Normal-4.zip,0,Normal,779,214,290,1,[214]
CP-27.zip,1,CP,3734,5676,32,3,"[5676, 5677, 5678]"
Normal-15.zip,0,Normal,2104,559,101,1,[559]
CP-5.zip,1,CP,1218,3436,213,1,[3436]
NCP-3.zip,2,NCP,1291,2732,55,1,[2732]
NCP-19.zip,2,NCP,537,2232,60,2,"[2231, 2232]"
NCP-21.zip,2,NCP,71,1274,126,2,"[1274, 1275]"
NCP-5.zip,2,NCP,195,1535,60,2,"[1534, 1535]"
CP-9.zip,1,CP,1359,3766,46,3,"[3764, 3765, 3766]"
NCP-2.zip,2,NCP,119,1380,62,2,"[1379, 1380]"
Normal-19.zip,0,Normal,2241,696,86,1,[696]
CP-7.zip,1,CP,1303,3626,51,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-15.zip,0,Normal,2112,567,84,1,[567]
NCP-20.zip,2,NCP,569,2296,142,2,"[2296, 2297]"
CP-7.zip,1,CP,1303,3606,49,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-9.zip,2,NCP,2700,2666,43,1,[2666]
NCP-9.zip,2,NCP,2697,2663,46,1,[2663]
CP-19.zip,1,CP,2435,2902,100,3,"[2901, 2902, 2903]"
CP-29.zip,1,CP,3809,5753,19,1,[5753]
NCP-10.zip,2,NCP,2718,2674,42,1,[2674]
Normal-1.zip,0,Normal,1668,778,60,4,"[778, 779, 780, 781]"
NCP-18.zip,2,NCP,509,2174,138,2,"[2174, 2175]"
NCP-16.zip,2,NCP,456,2066,135,2,"[2066, 2067]"
NCP-5.zip,2,NCP,187,1519,57,2,"[1518, 1519]"
CP-7.zip,1,CP,1303,3612,49,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-20.zip,2,NCP,57,1247,132,2,"[1247, 1248]"
Normal-1.zip,0,Normal,1715,986,71,2,"[985, 986]"
Normal-2.zip,0,Normal,1749,1069,61,4,"[1069, 1070, 1071, 1072]"
NCP-24.zip,2,NCP,984,2530,241,2,"[2529, 2530]"
Normal-1.zip,0,Normal,1682,847,67,6,"[847, 848, 852, 853, 857, 858]"
CP-7.zip,1,CP,1303,3619,213,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-1.zip,1,CP,1069,3110,77,4,"[3108, 3109, 3110, 3111]"
Normal-12.zip,0,Normal,2017,472,99,1,[472]
CP-10.zip,1,CP,1400,3861,54,2,"[3860, 3861]"
NCP-22.zip,2,NCP,881,2416,225,1,[2416]
CP-11.zip,1,CP,1420,3906,59,2,"[3905, 3906]"
NCP-5.zip,2,NCP,172,1489,59,2,"[1488, 1489]"
NCP-6.zip,2,NCP,20,1172,127,2,"[1172, 1173]"
NCP-28.zip,2,NCP,846,2364,269,1,[2364]
Normal-14.zip,0,Normal,2075,530,93,1,[530]
CP-6.zip,1,CP,1238,3456,191,1,[3456]
CP-7.zip,1,CP,1263,3481,120,1,[3481]
CP-7.zip,1,CP,1303,3617,27,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-10.zip,1,CP,1398,3857,44,2,"[3856, 3857]"
CP-1.zip,1,CP,1088,3220,54,4,"[3220, 3221, 3222, 3223]"
CP-8.zip,1,CP,1320,3676,62,2,"[3676, 3677]"
CP-7.zip,1,CP,1303,3608,55,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-15.zip,2,NCP,426,2005,139,2,"[2005, 2006]"
NCP-28.zip,2,NCP,869,2397,58,1,[2397]
NCP-11.zip,2,NCP,288,1721,114,2,"[1721, 1722]"
NCP-21.zip,2,NCP,581,2319,139,2,"[2319, 2320]"
Normal-26.zip,0,Normal,3878,5390,24,1,[5390]
Normal-13.zip,0,Normal,2041,496,95,1,[496]
Normal-25.zip,0,Normal,3845,5357,182,1,[5357]
Normal-22.zip,0,Normal,2599,109,39,1,[109]
Normal-4.zip,0,Normal,789,224,120,1,[224]
Normal-1.zip,0,Normal,1714,982,40,3,"[982, 983, 984]"
NCP-16.zip,2,NCP,434,2022,51,2,"[2021, 2022]"
NCP-28.zip,2,NCP,830,2343,120,1,[2343]
Normal-1.zip,0,Normal,1704,962,71,4,"[961, 962, 963, 964]"
NCP-5.zip,2,NCP,196,1537,55,2,"[1536, 1537]"
CP-8.zip,1,CP,1336,3713,60,2,"[3712, 3713]"
NCP-29.zip,2,NCP,895,2436,140,2,"[2435, 2436]"
NCP-29.zip,2,NCP,930,2472,23,1,[2472]
CP-12.zip,1,CP,1482,4047,181,3,"[4047, 4048, 4049]"
CP-10.zip,1,CP,1401,3862,201,3,"[3862, 3863, 3864]"
NCP-5.zip,2,NCP,182,1509,55,2,"[1508, 1509]"
CP-12.zip,1,CP,1483,4050,148,3,"[4050, 4051, 4052]"
NCP-28.zip,2,NCP,870,2399,247,2,"[2399, 2400]"
Normal-2.zip,0,Normal,1741,1054,61,2,"[1053, 1054]"
CP-8.zip,1,CP,1324,3684,58,2,"[3684, 3685]"
NCP-9.zip,2,NCP,2681,2696,58,1,[2696]
CP-9.zip,1,CP,1367,3786,58,3,"[3785, 3786, 3787]"
CP-19.zip,1,CP,1790,3209,69,2,"[3208, 3209]"
CP-11.zip,1,CP,1430,3928,77,2,"[3928, 3929]"
Normal-18.zip,0,Normal,2207,662,99,1,[662]
Normal-11.zip,0,Normal,1972,427,97,1,[427]
CP-5.zip,1,CP,1221,3439,295,1,[3439]
NCP-15.zip,2,NCP,42,1216,146,2,"[1216, 1218]"
CP-22.zip,1,CP,640,3002,136,1,[3002]
NCP-7.zip,2,NCP,245,1637,62,2,"[1636, 1637]"
NCP-6.zip,2,NCP,215,1574,155,2,"[1574, 1575]"
NCP-29.zip,2,NCP,903,2445,87,1,[2445]
NCP-7.zip,2,NCP,232,1609,61,2,"[1608, 1609]"
NCP-2.zip,2,NCP,119,1379,147,2,"[1379, 1380]"
Normal-2.zip,0,Normal,1739,1042,278,3,"[1042, 1043, 1044]"
CP-28.zip,1,CP,3791,5735,26,1,[5735]
NCP-27.zip,2,NCP,828,2341,45,1,[2341]
NCP-12.zip,2,NCP,314,1775,139,2,"[1775, 1776]"
NCP-6.zip,2,NCP,20,1173,54,2,"[1172, 1173]"
CP-13.zip,1,CP,1490,4073,69,3,"[4071, 4072, 4073]"
NCP-20.zip,2,NCP,569,2297,60,2,"[2296, 2297]"
Normal-2.zip,0,Normal,1759,1113,59,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-30.zip,2,NCP,987,2536,71,2,"[2536, 2537]"
CP-15.zip,1,CP,1579,4267,20,1,[4267]
CP-19.zip,1,CP,1790,3208,69,2,"[3208, 3209]"
NCP-22.zip,2,NCP,883,2420,200,2,"[2419, 2420]"
NCP-20.zip,2,NCP,568,2295,61,2,"[2294, 2295]"
Normal-13.zip,0,Normal,2036,491,102,1,[491]
NCP-26.zip,2,NCP,3973,5482,48,1,[5482]
CP-7.zip,1,CP,1303,3609,271,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-27.zip,1,CP,3743,5687,22,1,[5687]
Normal-11.zip,0,Normal,1981,436,91,1,[436]
NCP-2.zip,2,NCP,125,1391,127,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-9.zip,2,NCP,270,1686,147,2,"[1686, 1687]"
Normal-23.zip,0,Normal,2636,146,42,1,[146]
NCP-3.zip,2,NCP,1286,2727,64,1,[2727]
CP-10.zip,1,CP,1386,3828,66,2,"[3827, 3828]"
Normal-19.zip,0,Normal,2230,685,91,1,[685]
Normal-5.zip,0,Normal,805,240,327,1,[240]
Normal-26.zip,0,Normal,3891,5412,62,2,"[5411, 5412]"
NCP-27.zip,2,NCP,2671,2691,51,1,[2691]
NCP-27.zip,2,NCP,1059,2636,52,1,[2636]
CP-8.zip,1,CP,1344,3730,58,3,"[3728, 3729, 3730]"
Normal-24.zip,0,Normal,2662,172,41,1,[172]
Normal-3.zip,0,Normal,744,179,278,1,[179]
CP-5.zip,1,CP,1202,3420,207,1,[3420]
NCP-27.zip,2,NCP,1006,2567,19,2,"[2566, 2567]"
Normal-19.zip,0,Normal,2248,703,87,1,[703]
Normal-2.zip,0,Normal,1736,1034,25,5,"[1032, 1033, 1034, 1035, 1036]"
Normal-8.zip,0,Normal,1867,322,87,1,[322]
Normal-23.zip,0,Normal,2609,119,40,1,[119]
CP-11.zip,1,CP,1453,3980,56,3,"[3979, 3980, 3981]"
Normal-26.zip,0,Normal,3875,5387,24,1,[5387]
CP-26.zip,1,CP,3647,5607,32,1,[5607]
Normal-12.zip,0,Normal,2006,461,77,1,[461]
Normal-6.zip,0,Normal,1827,282,99,1,[282]
NCP-19.zip,2,NCP,533,2224,156,1,[2224]
NCP-11.zip,2,NCP,287,1720,60,2,"[1719, 1720]"
NCP-7.zip,2,NCP,2487,2687,38,1,[2687]
CP-3.zip,1,CP,1160,3378,318,1,[3378]
Normal-7.zip,0,Normal,1858,313,95,1,[313]
CP-13.zip,1,CP,1514,4129,61,2,"[4129, 4130]"
NCP-20.zip,2,NCP,561,2280,139,2,"[2280, 2281]"
CP-14.zip,1,CP,1527,4161,58,3,"[4160, 4161, 4162]"
CP-25.zip,1,CP,721,3083,86,1,[3083]
CP-13.zip,1,CP,1496,4091,55,2,"[4090, 4091]"
Normal-1.zip,0,Normal,1728,1013,66,4,"[1013, 1014, 1015, 1016]"
NCP-12.zip,2,NCP,317,1781,117,2,"[1781, 1782]"
CP-19.zip,1,CP,2437,2906,132,3,"[2905, 2906, 2907]"
NCP-5.zip,2,NCP,196,1536,131,2,"[1536, 1537]"
CP-11.zip,1,CP,1437,3942,57,2,"[3942, 3943]"
NCP-5.zip,2,NCP,182,1508,130,2,"[1508, 1509]"
CP-9.zip,1,CP,1363,3774,64,2,"[3774, 3775]"
CP-10.zip,1,CP,1401,3863,51,3,"[3862, 3863, 3864]"
NCP-10.zip,2,NCP,275,1696,153,2,"[1696, 1697]"
CP-10.zip,1,CP,1387,3829,51,2,"[3829, 3830]"
CP-30.zip,1,CP,3931,5633,68,4,"[5630, 5631, 5632, 5633]"
NCP-7.zip,2,NCP,234,1614,58,2,"[1613, 1614]"
NCP-3.zip,2,NCP,1296,2737,66,1,[2737]
NCP-11.zip,2,NCP,283,1712,62,1,[1712]
CP-9.zip,1,CP,1363,3775,64,2,"[3774, 3775]"
NCP-8.zip,2,NCP,255,1657,58,2,"[1656, 1657]"
NCP-17.zip,2,NCP,464,2082,144,2,"[2082, 2083]"
CP-12.zip,1,CP,1473,4027,51,3,"[4026, 4027, 4028]"
CP-28.zip,1,CP,3781,5725,20,1,[5725]
NCP-14.zip,2,NCP,391,1933,55,2,"[1932, 1933]"
Normal-13.zip,0,Normal,2032,487,85,1,[487]
NCP-28.zip,2,NCP,872,2403,183,2,"[2403, 2404]"
NCP-17.zip,2,NCP,479,2113,139,2,"[2113, 2114]"
NCP-11.zip,2,NCP,305,1758,65,2,"[1756, 1758]"
NCP-1.zip,2,NCP,1021,2587,201,4,"[2587, 2588, 2589, 2590]"
NCP-30.zip,2,NCP,957,2500,50,1,[2500]
Normal-17.zip,0,Normal,2172,627,91,1,[627]
CP-7.zip,1,CP,1316,3667,147,3,"[3667, 3668, 3669]"
NCP-24.zip,2,NCP,971,2514,74,1,[2514]
Normal-2.zip,0,Normal,1740,1051,59,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
NCP-18.zip,2,NCP,494,2145,65,2,"[2144, 2145]"
NCP-30.zip,2,NCP,987,2537,368,2,"[2536, 2537]"
Normal-13.zip,0,Normal,2048,503,94,1,[503]
CP-8.zip,1,CP,1347,3736,265,3,"[3736, 3737, 3738]"
Normal-2.zip,0,Normal,1751,1081,62,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-15.zip,2,NCP,41,1215,63,2,"[1214, 1215]"
CP-12.zip,1,CP,1456,3989,52,3,"[3988, 3989, 3990]"
NCP-21.zip,2,NCP,80,1294,54,2,"[1293, 1294]"
CP-29.zip,1,CP,3808,5752,23,1,[5752]
CP-26.zip,1,CP,3732,5671,53,2,"[5671, 5672]"
NCP-8.zip,2,NCP,251,1648,131,2,"[1648, 1649]"
Normal-2.zip,0,Normal,1755,1099,71,4,"[1097, 1098, 1099, 1100]"
Normal-2.zip,0,Normal,1759,1120,66,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-28.zip,2,NCP,874,2407,341,1,[2407]
NCP-17.zip,2,NCP,469,2093,159,2,"[2093, 2094]"
Normal-1.zip,0,Normal,1730,1023,59,5,"[1019, 1020, 1021, 1022, 1023]"
Normal-8.zip,0,Normal,1891,346,96,1,[346]
CP-7.zip,1,CP,1303,3621,230,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3607,247,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-4.zip,1,CP,1170,3388,180,1,[3388]
CP-10.zip,1,CP,1395,3849,63,2,"[3849, 3850]"
NCP-2.zip,2,NCP,123,1387,148,2,"[1387, 1388]"
NCP-16.zip,2,NCP,446,2047,61,2,"[2046, 2047]"
NCP-2.zip,2,NCP,111,1364,56,2,"[1363, 1364]"
CP-15.zip,1,CP,1568,4256,22,1,[4256]
NCP-21.zip,2,NCP,79,1291,131,2,"[1291, 1292]"
CP-10.zip,1,CP,1395,3850,63,2,"[3849, 3850]"
CP-14.zip,1,CP,1542,4199,54,3,"[4197, 4198, 4199]"
CP-15.zip,1,CP,1555,4229,62,2,"[4228, 4229]"
CP-21.zip,1,CP,606,2968,255,1,[2968]
CP-12.zip,1,CP,1480,4042,54,2,"[4042, 4043]"
NCP-27.zip,2,NCP,1063,2640,82,1,[2640]
Normal-7.zip,0,Normal,1831,286,99,1,[286]
CP-14.zip,1,CP,1552,4221,62,2,"[4221, 4222]"
NCP-19.zip,2,NCP,541,2240,51,2,"[2239, 2240]"
NCP-23.zip,2,NCP,91,1318,100,2,"[1318, 1319]"
Normal-2.zip,0,Normal,1739,1043,56,3,"[1042, 1043, 1044]"
CP-19.zip,1,CP,2437,2907,183,3,"[2905, 2906, 2907]"
Normal-27.zip,0,Normal,3911,5452,65,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
CP-10.zip,1,CP,1396,3851,139,3,"[3851, 3852, 3853]"
CP-13.zip,1,CP,1490,4072,69,3,"[4071, 4072, 4073]"
CP-6.zip,1,CP,1242,3460,229,1,[3460]
NCP-17.zip,2,NCP,471,2098,59,2,"[2097, 2098]"
NCP-16.zip,2,NCP,434,2021,119,2,"[2021, 2022]"
NCP-16.zip,2,NCP,446,2046,146,2,"[2046, 2047]"
NCP-17.zip,2,NCP,473,2101,145,2,"[2101, 2102]"
NCP-21.zip,2,NCP,69,1270,113,2,"[1270, 1271]"
Normal-9.zip,0,Normal,1896,351,98,1,[351]
NCP-9.zip,2,NCP,2709,2702,44,1,[2702]
NCP-29.zip,2,NCP,907,2449,287,1,[2449]
NCP-2.zip,2,NCP,106,1349,150,2,"[1349, 1350]"
NCP-17.zip,2,NCP,477,2109,139,2,"[2109, 2110]"
CP-27.zip,1,CP,3734,5677,163,3,"[5676, 5677, 5678]"
Normal-8.zip,0,Normal,1877,332,88,1,[332]
Normal-7.zip,0,Normal,1853,308,94,1,[308]
NCP-2.zip,2,NCP,1272,2713,62,1,[2713]
CP-13.zip,1,CP,1515,4132,57,3,"[4131, 4132, 4133]"
NCP-21.zip,2,NCP,68,1269,49,2,"[1268, 1269]"
CP-25.zip,1,CP,719,3081,128,1,[3081]
NCP-10.zip,2,NCP,276,1698,139,2,"[1698, 1699]"
NCP-11.zip,2,NCP,294,1734,57,2,"[1733, 1734]"
CP-8.zip,1,CP,1342,3724,58,3,"[3723, 3724, 3725]"
Normal-4.zip,0,Normal,783,218,118,1,[218]
Normal-11.zip,0,Normal,1977,432,96,1,[432]
CP-12.zip,1,CP,1460,3998,60,2,"[3998, 3999]"
NCP-12.zip,2,NCP,32,1197,61,2,"[1196, 1197]"
Normal-2.zip,0,Normal,1736,1035,55,5,"[1032, 1033, 1034, 1035, 1036]"
CP-7.zip,1,CP,1308,3650,219,2,"[3649, 3650]"
NCP-17.zip,2,NCP,485,2125,153,2,"[2125, 2126]"
Normal-24.zip,0,Normal,2649,159,26,1,[159]
CP-1.zip,1,CP,1082,3127,74,1,[3127]
CP-28.zip,1,CP,3788,5732,26,1,[5732]
Normal-3.zip,0,Normal,1764,1143,66,4,"[1143, 1144, 1145, 1146]"
NCP-2.zip,2,NCP,125,1392,132,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-1.zip,2,NCP,1013,2577,524,1,[2577]
CP-22.zip,1,CP,630,2992,118,1,[2992]
Normal-27.zip,0,Normal,3897,5423,70,4,"[5423, 5424, 5426, 5427]"
CP-1.zip,1,CP,1088,3222,50,4,"[3220, 3221, 3222, 3223]"
NCP-11.zip,2,NCP,294,1733,136,2,"[1733, 1734]"
CP-3.zip,1,CP,1135,3353,202,1,[3353]
CP-10.zip,1,CP,1408,3879,59,3,"[3878, 3879, 3880]"
CP-19.zip,1,CP,1791,3213,71,4,"[3210, 3211, 3212, 3213]"
Normal-1.zip,0,Normal,1709,974,61,2,"[973, 974]"
CP-11.zip,1,CP,1438,3945,46,2,"[3944, 3945]"
CP-8.zip,1,CP,1325,3687,64,2,"[3686, 3687]"
CP-20.zip,1,CP,2761,3292,38,1,[3292]
NCP-17.zip,2,NCP,470,2096,64,2,"[2095, 2096]"
NCP-4.zip,2,NCP,164,1472,150,2,"[1472, 1473]"
NCP-14.zip,2,NCP,380,1912,148,2,"[1912, 1913]"
CP-7.zip,1,CP,1266,3484,134,1,[3484]
CP-10.zip,1,CP,1400,3860,54,2,"[3860, 3861]"
NCP-10.zip,2,NCP,281,1708,121,2,"[1708, 1709]"
NCP-14.zip,2,NCP,397,1944,158,2,"[1944, 1945]"
CP-27.zip,1,CP,3734,5678,32,3,"[5676, 5677, 5678]"
CP-15.zip,1,CP,1559,4238,53,2,"[4237, 4238]"
Normal-26.zip,0,Normal,3888,5406,63,1,[5406]
NCP-11.zip,2,NCP,308,1764,49,2,"[1763, 1764]"
NCP-16.zip,2,NCP,435,2024,62,2,"[2023, 2024]"
NCP-11.zip,2,NCP,285,1715,149,2,"[1715, 1716]"
NCP-20.zip,2,NCP,568,2294,144,2,"[2294, 2295]"
NCP-20.zip,2,NCP,550,2257,143,2,"[2257, 2258]"
NCP-6.zip,2,NCP,218,1581,58,2,"[1580, 1581]"
Normal-15.zip,0,Normal,2092,547,87,1,[547]
CP-10.zip,1,CP,1396,3853,58,3,"[3851, 3852, 3853]"
Normal-12.zip,0,Normal,2010,465,91,1,[465]
Normal-18.zip,0,Normal,2194,649,89,1,[649]
NCP-10.zip,2,NCP,276,1699,58,2,"[1698, 1699]"
CP-27.zip,1,CP,3746,5690,17,1,[5690]
Normal-24.zip,0,Normal,2656,166,34,1,[166]
CP-29.zip,1,CP,3802,5746,26,1,[5746]
CP-17.zip,1,CP,1641,4329,26,1,[4329]
Normal-2.zip,0,Normal,1749,1072,66,4,"[1069, 1070, 1071, 1072]"
CP-9.zip,1,CP,1373,3800,55,2,"[3800, 3801]"
Normal-22.zip,0,Normal,2596,106,44,1,[106]
Normal-14.zip,0,Normal,2072,527,77,1,[527]
Normal-20.zip,0,Normal,2251,706,89,1,[706]
CP-19.zip,1,CP,2435,2901,104,3,"[2901, 2902, 2903]"
CP-12.zip,1,CP,1482,4049,75,3,"[4047, 4048, 4049]"
CP-6.zip,1,CP,1231,3449,375,1,[3449]
CP-28.zip,1,CP,3797,5741,28,1,[5741]
CP-7.zip,1,CP,1307,3648,242,4,"[3645, 3646, 3647, 3648]"
NCP-1.zip,2,NCP,1030,2600,279,1,[2600]
CP-11.zip,1,CP,1448,3970,62,2,"[3969, 3970]"
Normal-20.zip,0,Normal,2255,710,95,1,[710]
CP-2.zip,1,CP,1124,3342,215,1,[3342]
NCP-28.zip,2,NCP,872,2404,46,2,"[2403, 2404]"
Normal-3.zip,0,Normal,1765,1147,60,2,"[1147, 1148]"
NCP-11.zip,2,NCP,289,1724,47,2,"[1723, 1724]"
CP-11.zip,1,CP,1442,3956,58,3,"[3954, 3955, 3956]"
CP-1.zip,1,CP,1081,3126,68,1,[3126]
Normal-20.zip,0,Normal,2263,718,108,1,[718]
NCP-19.zip,2,NCP,524,2204,191,1,[2204]
Normal-4.zip,0,Normal,784,219,105,1,[219]
CP-8.zip,1,CP,1337,3715,60,2,"[3714, 3715]"
NCP-28.zip,2,NCP,841,2356,282,1,[2356]
NCP-26.zip,2,NCP,3983,5510,40,1,[5510]
CP-20.zip,1,CP,2767,3298,35,1,[3298]
Normal-19.zip,0,Normal,2229,684,87,1,[684]
NCP-15.zip,2,NCP,429,2012,55,2,"[2011, 2012]"
CP-19.zip,1,CP,1788,3197,52,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-15.zip,1,CP,1558,4235,62,3,"[4234, 4235, 4236]"
NCP-3.zip,2,NCP,1280,2721,50,1,[2721]
NCP-4.zip,2,NCP,148,1440,150,2,"[1440, 1441]"
Normal-22.zip,0,Normal,2582,92,39,1,[92]
Normal-23.zip,0,Normal,2623,133,35,1,[133]
CP-13.zip,1,CP,1496,4090,55,2,"[4090, 4091]"
Normal-2.zip,0,Normal,1740,1049,21,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-30.zip,1,CP,3835,5779,23,1,[5779]
CP-11.zip,1,CP,1442,3954,139,3,"[3954, 3955, 3956]"
NCP-15.zip,2,NCP,429,2011,131,2,"[2011, 2012]"
CP-17.zip,1,CP,1621,4309,29,1,[4309]
CP-6.zip,1,CP,1244,3462,87,1,[3462]
NCP-1.zip,2,NCP,1021,2590,181,4,"[2587, 2588, 2589, 2590]"
NCP-9.zip,2,NCP,2706,2672,51,1,[2672]
NCP-14.zip,2,NCP,391,1932,131,2,"[1932, 1933]"
CP-19.zip,1,CP,1785,3189,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
CP-3.zip,1,CP,1134,3352,330,1,[3352]
CP-8.zip,1,CP,1346,3734,53,3,"[3733, 3734, 3735]"
NCP-12.zip,2,NCP,320,1789,58,2,"[1788, 1789]"
NCP-21.zip,2,NCP,77,1287,126,2,"[1287, 1288]"
CP-17.zip,1,CP,1647,4335,23,1,[4335]
CP-11.zip,1,CP,1453,3979,221,3,"[3979, 3980, 3981]"
Normal-2.zip,0,Normal,1759,1117,65,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-17.zip,2,NCP,481,2118,68,2,"[2117, 2118]"
NCP-3.zip,2,NCP,1279,2720,66,1,[2720]
CP-8.zip,1,CP,1346,3733,53,3,"[3733, 3734, 3735]"
Normal-10.zip,0,Normal,1954,409,88,1,[409]
CP-17.zip,1,CP,1648,4336,29,1,[4336]
CP-14.zip,1,CP,1524,4154,58,3,"[4152, 4153, 4154]"
Normal-18.zip,0,Normal,2216,671,97,1,[671]
NCP-27.zip,2,NCP,179,1503,43,2,"[1503, 1502]"
CP-19.zip,1,CP,1788,3202,55,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-12.zip,1,CP,1482,4048,75,3,"[4047, 4048, 4049]"
NCP-29.zip,2,NCP,913,2455,268,1,[2455]
CP-19.zip,1,CP,2444,2919,112,2,"[2918, 2919]"
CP-22.zip,1,CP,639,3001,136,1,[3001]
NCP-2.zip,2,NCP,121,1383,100,2,"[1383, 1384]"
CP-8.zip,1,CP,1324,3685,58,2,"[3684, 3685]"
CP-11.zip,1,CP,1430,3929,77,2,"[3928, 3929]"
NCP-10.zip,2,NCP,271,1689,61,2,"[1688, 1689]"
NCP-15.zip,2,NCP,401,1952,58,2,"[1951, 1952]"
Normal-4.zip,0,Normal,788,223,336,1,[223]
Normal-27.zip,0,Normal,3898,5428,74,1,[5428]
Normal-21.zip,0,Normal,2312,767,88,1,[767]
Normal-17.zip,0,Normal,2170,625,62,1,[625]
NCP-3.zip,2,NCP,130,1406,59,2,"[1405, 1406]"
CP-3.zip,1,CP,1154,3372,169,1,[3372]
Normal-3.zip,0,Normal,1765,1148,60,2,"[1147, 1148]"
Normal-11.zip,0,Normal,1962,417,78,1,[417]
CP-18.zip,1,CP,1667,4355,26,1,[4355]
CP-1.zip,1,CP,1066,3105,59,1,[3105]
NCP-1.zip,2,NCP,1047,2619,473,1,[2619]
NCP-10.zip,2,NCP,2711,2704,44,1,[2704]
Normal-19.zip,0,Normal,2237,692,85,1,[692]
NCP-11.zip,2,NCP,289,1723,110,2,"[1723, 1724]"
NCP-7.zip,2,NCP,240,1626,66,2,"[1625, 1626]"
Normal-11.zip,0,Normal,1974,429,96,1,[429]
Normal-26.zip,0,Normal,3887,5401,67,3,"[5400, 5401, 5404]"
Normal-26.zip,0,Normal,3891,5411,67,2,"[5411, 5412]"
Normal-18.zip,0,Normal,2191,646,106,1,[646]
NCP-28.zip,2,NCP,840,2355,55,1,[2355]
Normal-6.zip,0,Normal,1814,269,88,1,[269]
NCP-12.zip,2,NCP,329,1807,66,2,"[1806, 1807]"
CP-24.zip,1,CP,686,3048,133,1,[3048]
CP-19.zip,1,CP,2432,2894,124,1,[2894]
Normal-10.zip,0,Normal,1952,407,107,1,[407]
CP-13.zip,1,CP,1515,4133,57,3,"[4131, 4132, 4133]"
CP-8.zip,1,CP,1347,3737,34,3,"[3736, 3737, 3738]"
Normal-2.zip,0,Normal,1754,1095,69,4,"[1093, 1094, 1095, 1096]"
CP-22.zip,1,CP,622,2984,459,1,[2984]
CP-7.zip,1,CP,1303,3629,244,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-3.zip,1,CP,1141,3359,350,1,[3359]
CP-7.zip,1,CP,1303,3631,242,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-27.zip,0,Normal,3911,5451,65,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
CP-14.zip,1,CP,1533,4173,100,3,"[4173, 4174, 4175]"
Normal-10.zip,0,Normal,1935,390,91,1,[390]
NCP-22.zip,2,NCP,822,2332,36,2,"[2332, 2333]"
CP-7.zip,1,CP,1303,3622,28,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-22.zip,0,Normal,2320,775,91,1,[775]
Normal-25.zip,0,Normal,3859,5371,216,1,[5371]
Normal-12.zip,0,Normal,2018,473,93,1,[473]
CP-9.zip,1,CP,1359,3764,181,3,"[3764, 3765, 3766]"
CP-20.zip,1,CP,2452,2931,298,1,[2931]
NCP-23.zip,2,NCP,90,1316,100,2,"[1316, 1317]"
Normal-2.zip,0,Normal,1744,1058,71,2,"[1058, 1059]"
NCP-18.zip,2,NCP,492,2141,58,2,"[2140, 2141]"
Normal-13.zip,0,Normal,2053,508,81,1,[508]
Normal-17.zip,0,Normal,2156,611,82,1,[611]
CP-19.zip,1,CP,1785,3190,79,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-19.zip,2,NCP,541,2239,121,2,"[2239, 2240]"
NCP-19.zip,2,NCP,531,2221,58,2,"[2220, 2221]"
CP-19.zip,1,CP,2448,2925,104,2,"[2925, 2926]"
CP-31.zip,1,CP,4044,5593,276,1,[5593]
CP-8.zip,1,CP,1345,3732,55,2,"[3731, 3732]"
Normal-3.zip,0,Normal,743,178,340,1,[178]
Normal-23.zip,0,Normal,2613,123,40,1,[123]
Normal-1.zip,0,Normal,1714,983,71,3,"[982, 983, 984]"
NCP-8.zip,2,NCP,268,1683,53,2,"[1682, 1683]"
CP-8.zip,1,CP,1347,3738,34,3,"[3736, 3737, 3738]"
CP-25.zip,1,CP,718,3080,466,1,[3080]
Normal-13.zip,0,Normal,2024,479,86,1,[479]
NCP-11.zip,2,NCP,310,1767,169,2,"[1767, 1768]"
Normal-1.zip,0,Normal,1668,780,63,4,"[778, 779, 780, 781]"
CP-17.zip,1,CP,1636,4324,26,1,[4324]
NCP-20.zip,2,NCP,55,1244,63,2,"[1243, 1244]"
CP-32.zip,1,CP,2463,3227,77,1,[3227]
NCP-16.zip,2,NCP,435,2023,153,2,"[2023, 2024]"
NCP-2.zip,2,NCP,106,1350,63,2,"[1349, 1350]"
CP-27.zip,1,CP,3753,5697,20,1,[5697]
NCP-15.zip,2,NCP,415,1983,63,2,"[1982, 1983]"
NCP-5.zip,2,NCP,191,1527,54,2,"[1526, 1527]"
CP-3.zip,1,CP,1142,3360,138,1,[3360]
NCP-19.zip,2,NCP,531,2220,139,2,"[2220, 2221]"
CP-14.zip,1,CP,1524,4153,58,3,"[4152, 4153, 4154]"
CP-19.zip,1,CP,1788,3196,49,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-14.zip,1,CP,1533,4175,42,3,"[4173, 4174, 4175]"
NCP-7.zip,2,NCP,244,1635,69,2,"[1634, 1635]"
Normal-7.zip,0,Normal,1848,303,95,1,[303]
CP-24.zip,1,CP,703,3065,120,1,[3065]
Normal-1.zip,0,Normal,1731,1024,36,1,[1024]
CP-11.zip,1,CP,1448,3969,62,2,"[3969, 3970]"
Normal-25.zip,0,Normal,3850,5362,180,1,[5362]
CP-7.zip,1,CP,13,3172,255,4,"[3170, 3171, 3172, 3173]"
CP-14.zip,1,CP,1533,4174,42,3,"[4173, 4174, 4175]"
NCP-3.zip,2,NCP,130,1405,140,2,"[1405, 1406]"
CP-23.zip,1,CP,647,3009,384,1,[3009]
Normal-24.zip,0,Normal,2637,147,36,1,[147]
NCP-28.zip,2,NCP,848,2367,283,2,"[2366, 2367]"
Normal-9.zip,0,Normal,1903,358,86,1,[358]
Normal-26.zip,0,Normal,3889,5408,65,2,"[5407, 5408]"
NCP-20.zip,2,NCP,567,2293,60,2,"[2292, 2293]"
CP-22.zip,1,CP,621,2983,174,1,[2983]
CP-10.zip,1,CP,1389,3835,51,3,"[3833, 3834, 3835]"
CP-9.zip,1,CP,1362,3773,61,2,"[3772, 3773]"
Normal-27.zip,0,Normal,3897,5426,72,4,"[5423, 5424, 5426, 5427]"
NCP-15.zip,2,NCP,428,2010,53,2,"[2009, 2010]"
Normal-3.zip,0,Normal,759,194,297,1,[194]
CP-13.zip,1,CP,1497,4092,68,3,"[4092, 4093, 4094]"
Normal-19.zip,0,Normal,2246,701,87,1,[701]
CP-3.zip,1,CP,1130,3348,166,1,[3348]
CP-14.zip,1,CP,1552,4222,62,2,"[4221, 4222]"
NCP-26.zip,2,NCP,3994,5518,52,1,[5518]
NCP-27.zip,2,NCP,328,1805,43,2,"[1804, 1805]"
NCP-13.zip,2,NCP,369,1889,138,2,"[1889, 1890]"
CP-20.zip,1,CP,2756,3287,56,1,[3287]
CP-22.zip,1,CP,638,3000,116,1,[3000]
CP-6.zip,1,CP,1250,3468,451,1,[3468]
CP-19.zip,1,CP,2437,2905,316,3,"[2905, 2906, 2907]"
Normal-16.zip,0,Normal,2130,585,88,1,[585]
NCP-14.zip,2,NCP,376,1904,142,2,"[1904, 1905]"
Normal-10.zip,0,Normal,1932,387,91,1,[387]
NCP-16.zip,2,NCP,453,2060,121,2,"[2060, 2061]"
NCP-5.zip,2,NCP,191,1526,128,2,"[1526, 1527]"
CP-12.zip,1,CP,1476,4034,53,2,"[4033, 4034]"
NCP-5.zip,2,NCP,175,1495,55,2,"[1494, 1495]"
NCP-21.zip,2,NCP,71,1275,53,2,"[1274, 1275]"
Normal-10.zip,0,Normal,1925,380,90,1,[380]
NCP-30.zip,2,NCP,994,2548,226,2,"[2547, 2548]"
CP-4.zip,1,CP,1192,3410,184,1,[3410]
Normal-23.zip,0,Normal,2631,141,38,1,[141]
Normal-2.zip,0,Normal,1751,1080,61,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-9.zip,2,NCP,2684,2697,50,1,[2697]
CP-27.zip,1,CP,3757,5701,22,1,[5701]
NCP-3.zip,2,NCP,1288,2729,61,1,[2729]
NCP-18.zip,2,NCP,505,2166,157,2,"[2166, 2167]"
CP-8.zip,1,CP,1348,3741,59,3,"[3739, 3740, 3741]"
Normal-24.zip,0,Normal,2651,161,34,1,[161]
Normal-23.zip,0,Normal,2618,128,35,1,[128]
CP-8.zip,1,CP,1331,3702,62,2,"[3701, 3702]"
NCP-14.zip,2,NCP,398,1947,70,2,"[1946, 1947]"
NCP-4.zip,2,NCP,158,1460,122,2,"[1460, 1461]"
NCP-23.zip,2,NCP,89,1312,157,4,"[1311, 1312, 1313, 1315]"
Normal-2.zip,0,Normal,1759,1116,64,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
CP-17.zip,1,CP,1645,4333,26,1,[4333]
CP-10.zip,1,CP,1408,3880,59,3,"[3878, 3879, 3880]"
CP-30.zip,1,CP,3917,5541,62,1,[5541]
NCP-30.zip,2,NCP,933,2475,23,1,[2475]
CP-8.zip,1,CP,1344,3728,142,3,"[3728, 3729, 3730]"
NCP-17.zip,2,NCP,459,2072,133,2,"[2072, 2073]"
NCP-4.zip,2,NCP,150,1445,75,2,"[1444, 1445]"
CP-12.zip,1,CP,1455,3986,58,3,"[3985, 3986, 3987]"
Normal-27.zip,0,Normal,3897,5427,72,4,"[5423, 5424, 5426, 5427]"
CP-18.zip,1,CP,1772,3177,81,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
CP-27.zip,1,CP,3745,5689,23,1,[5689]
NCP-29.zip,2,NCP,920,2462,183,1,[2462]
NCP-9.zip,2,NCP,2688,2655,56,1,[2655]
Normal-8.zip,0,Normal,1887,342,94,1,[342]
CP-1.zip,1,CP,1076,3120,70,1,[3120]
NCP-21.zip,2,NCP,575,2308,144,2,"[2308, 2309]"
Normal-15.zip,0,Normal,2100,555,94,1,[555]
NCP-11.zip,2,NCP,285,1716,62,2,"[1715, 1716]"
CP-8.zip,1,CP,1344,3729,59,3,"[3728, 3729, 3730]"
Normal-12.zip,0,Normal,2021,476,85,1,[476]
Normal-15.zip,0,Normal,2105,560,87,1,[560]
CP-9.zip,1,CP,1366,3784,57,3,"[3782, 3783, 3784]"
CP-18.zip,1,CP,1772,3181,75,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
NCP-15.zip,2,NCP,426,2006,58,2,"[2005, 2006]"
NCP-1.zip,2,NCP,1020,2586,45,1,[2586]
NCP-13.zip,2,NCP,356,1863,124,2,"[1863, 1864]"
Normal-8.zip,0,Normal,1865,320,99,1,[320]
NCP-30.zip,2,NCP,994,2547,226,2,"[2547, 2548]"
Normal-12.zip,0,Normal,2011,466,93,1,[466]
CP-18.zip,1,CP,1773,3185,67,4,"[3182, 3183, 3184, 3185]"
NCP-18.zip,2,NCP,505,2167,66,2,"[2166, 2167]"
CP-8.zip,1,CP,1328,3694,69,2,"[3693, 3694]"
NCP-2.zip,2,NCP,1278,2719,61,1,[2719]
CP-25.zip,1,CP,736,3098,494,1,[3098]
CP-19.zip,1,CP,1785,3186,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
CP-24.zip,1,CP,7,3512,299,2,"[3511, 3512]"
Normal-27.zip,0,Normal,3913,5455,71,2,"[5454, 5455]"
NCP-6.zip,2,NCP,218,1580,139,2,"[1580, 1581]"
Normal-4.zip,0,Normal,795,230,120,1,[230]
NCP-6.zip,2,NCP,207,1559,46,2,"[1558, 1559]"
NCP-5.zip,2,NCP,189,1523,58,2,"[1522, 1523]"
Normal-22.zip,0,Normal,2314,769,84,1,[769]
CP-14.zip,1,CP,1541,4195,58,3,"[4194, 4195, 4196]"
Normal-26.zip,0,Normal,3866,5378,27,1,[5378]
NCP-30.zip,2,NCP,938,2481,78,2,"[2480, 2481]"
NCP-1.zip,2,NCP,1041,2612,126,1,[2612]
Normal-24.zip,0,Normal,2664,174,28,1,[174]
NCP-21.zip,2,NCP,75,1283,128,2,"[1283, 1284]"
CP-14.zip,1,CP,1542,4198,54,3,"[4197, 4198, 4199]"
NCP-11.zip,2,NCP,286,1718,51,2,"[1717, 1718]"
CP-8.zip,1,CP,1332,3704,41,2,"[3703, 3704]"
CP-14.zip,1,CP,1527,4160,142,3,"[4160, 4161, 4162]"
Normal-2.zip,0,Normal,1749,1071,66,4,"[1069, 1070, 1071, 1072]"
CP-7.zip,1,CP,13,3170,271,4,"[3170, 3171, 3172, 3173]"
CP-20.zip,1,CP,2769,3300,36,1,[3300]
Normal-11.zip,0,Normal,1973,428,90,1,[428]
CP-28.zip,1,CP,3783,5727,26,1,[5727]
NCP-12.zip,2,NCP,320,1788,139,2,"[1788, 1789]"
Normal-10.zip,0,Normal,1929,384,91,1,[384]
Normal-7.zip,0,Normal,1841,296,79,1,[296]
Normal-8.zip,0,Normal,1881,336,91,1,[336]
NCP-25.zip,2,NCP,3964,5475,41,1,[5475]
CP-12.zip,1,CP,1480,4043,54,2,"[4042, 4043]"
NCP-23.zip,2,NCP,91,1319,43,2,"[1318, 1319]"
NCP-11.zip,2,NCP,30,1193,56,1,[1193]
NCP-29.zip,2,NCP,924,2466,18,1,[2466]
CP-16.zip,1,CP,1614,4302,23,1,[4302]
Normal-14.zip,0,Normal,2061,516,88,1,[516]
NCP-27.zip,2,NCP,826,2339,54,1,[2339]
Normal-13.zip,0,Normal,2038,493,80,1,[493]
Normal-1.zip,0,Normal,1715,985,71,2,"[985, 986]"
CP-28.zip,1,CP,3782,5726,25,1,[5726]
CP-21.zip,1,CP,2777,3308,22,1,[3308]
CP-8.zip,1,CP,1328,3693,69,2,"[3693, 3694]"
CP-7.zip,1,CP,1303,3628,50,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-17.zip,2,NCP,468,2091,154,2,"[2091, 2092]"
NCP-13.zip,2,NCP,36,1205,59,2,"[1204, 1205]"
Normal-12.zip,0,Normal,2000,455,93,1,[455]
CP-19.zip,1,CP,2448,2926,102,2,"[2925, 2926]"
NCP-10.zip,2,NCP,2728,2711,54,1,[2711]
NCP-8.zip,2,NCP,263,1672,177,2,"[1672, 1673]"
CP-30.zip,1,CP,3831,5775,25,1,[5775]
Normal-1.zip,0,Normal,1709,973,61,2,"[973, 974]"
Normal-2.zip,0,Normal,1740,1046,300,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-27.zip,1,CP,3751,5695,22,1,[5695]
CP-11.zip,1,CP,1453,3981,56,3,"[3979, 3980, 3981]"
CP-16.zip,1,CP,1617,4305,23,1,[4305]
CP-19.zip,1,CP,1788,3198,53,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-19.zip,1,CP,2447,2924,86,2,"[2923, 2924]"
NCP-20.zip,2,NCP,55,1243,150,2,"[1243, 1244]"
CP-1.zip,1,CP,1069,3108,77,4,"[3108, 3109, 3110, 3111]"
CP-29.zip,1,CP,3827,5771,26,1,[5771]
CP-16.zip,1,CP,1599,4287,17,1,[4287]
NCP-12.zip,2,NCP,34,1201,64,2,"[1200, 1201]"
NCP-19.zip,2,NCP,523,2202,148,2,"[2202, 2203]"
CP-19.zip,1,CP,2429,2890,100,1,[2890]
NCP-9.zip,2,NCP,2695,2661,45,1,[2661]
Normal-1.zip,0,Normal,1730,1022,59,5,"[1019, 1020, 1021, 1022, 1023]"
CP-24.zip,1,CP,7,3511,298,2,"[3511, 3512]"
NCP-27.zip,2,NCP,1045,2617,30,1,[2617]
Normal-15.zip,0,Normal,2088,543,75,1,[543]
Normal-25.zip,0,Normal,3853,5365,205,1,[5365]
Normal-14.zip,0,Normal,2076,531,77,1,[531]
NCP-22.zip,2,NCP,84,1301,127,2,"[1301, 1302]"
CP-18.zip,1,CP,1660,4348,23,1,[4348]
NCP-26.zip,2,NCP,3980,5487,38,1,[5487]
CP-7.zip,1,CP,1303,3620,45,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-20.zip,1,CP,2758,3289,35,1,[3289]
Normal-6.zip,0,Normal,1808,263,95,1,[263]
Normal-2.zip,0,Normal,1739,1044,56,3,"[1042, 1043, 1044]"
CP-1.zip,1,CP,1068,3107,62,1,[3107]
Normal-14.zip,0,Normal,2083,538,87,1,[538]
CP-12.zip,1,CP,1484,4054,46,3,"[4053, 4054, 4055]"
CP-29.zip,1,CP,3811,5755,23,1,[5755]
CP-14.zip,1,CP,1548,4213,51,2,"[4213, 4214]"
NCP-20.zip,2,NCP,561,2281,58,2,"[2280, 2281]"
CP-14.zip,1,CP,1544,4204,51,3,"[4203, 4204, 4205]"
NCP-27.zip,2,NCP,1062,2639,176,1,[2639]
CP-25.zip,1,CP,735,3097,110,1,[3097]
CP-7.zip,1,CP,1303,3614,27,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-2.zip,1,CP,1115,3333,180,1,[3333]
CP-27.zip,1,CP,3756,5700,20,1,[5700]
Normal-5.zip,0,Normal,813,248,136,1,[248]
Normal-19.zip,0,Normal,2221,676,103,1,[676]
Normal-27.zip,0,Normal,3902,5434,73,1,[5434]
CP-11.zip,1,CP,1437,3943,57,2,"[3942, 3943]"
NCP-2.zip,2,NCP,126,1398,64,2,"[1396, 1398]"
Normal-20.zip,0,Normal,2265,720,87,1,[720]
CP-16.zip,1,CP,1589,4277,23,1,[4277]
Normal-16.zip,0,Normal,2149,604,85,1,[604]
NCP-19.zip,2,NCP,523,2203,62,2,"[2202, 2203]"
CP-12.zip,1,CP,1455,3985,138,3,"[3985, 3986, 3987]"
Normal-27.zip,0,Normal,3911,5450,68,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
CP-30.zip,1,CP,4040,5589,38,1,[5589]
NCP-1.zip,2,NCP,1049,2622,205,1,[2622]
Normal-1.zip,0,Normal,1674,811,74,2,"[810, 811]"
NCP-19.zip,2,NCP,539,2236,55,2,"[2235, 2236]"
Normal-1.zip,0,Normal,1668,779,60,4,"[778, 779, 780, 781]"
NCP-19.zip,2,NCP,542,2241,130,2,"[2241, 2242]"
CP-25.zip,1,CP,739,3101,112,1,[3101]
CP-9.zip,1,CP,1367,3785,140,3,"[3785, 3786, 3787]"
CP-14.zip,1,CP,1549,4215,61,2,"[4215, 4216]"
NCP-19.zip,2,NCP,53,1239,144,2,"[1239, 1240]"
Normal-1.zip,0,Normal,1730,1021,294,5,"[1019, 1020, 1021, 1022, 1023]"
NCP-29.zip,2,NCP,918,2460,213,1,[2460]
NCP-23.zip,2,NCP,89,1311,138,4,"[1311, 1312, 1313, 1315]"
Normal-2.zip,0,Normal,1759,1119,66,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
CP-9.zip,1,CP,1359,3765,46,3,"[3764, 3765, 3766]"
NCP-25.zip,2,NCP,3706,5533,54,1,[5533]
CP-8.zip,1,CP,1325,3686,65,2,"[3686, 3687]"
Normal-1.zip,0,Normal,1683,864,72,6,"[861, 862, 864, 865, 868, 869]"
NCP-19.zip,2,NCP,545,2248,57,2,"[2247, 2248]"
NCP-15.zip,2,NCP,418,1989,143,2,"[1989, 1990]"
CP-7.zip,1,CP,1261,3479,198,1,[3479]
NCP-29.zip,2,NCP,895,2435,143,2,"[2435, 2436]"
CP-12.zip,1,CP,1483,4051,62,3,"[4050, 4051, 4052]"
CP-12.zip,1,CP,1460,3999,60,2,"[3998, 3999]"
CP-12.zip,1,CP,1456,3988,122,3,"[3988, 3989, 3990]"
Normal-12.zip,0,Normal,2014,469,98,1,[469]
CP-14.zip,1,CP,1542,4197,180,3,"[4197, 4198, 4199]"
Normal-2.zip,0,Normal,1755,1098,73,4,"[1097, 1098, 1099, 1100]"
NCP-14.zip,2,NCP,382,1917,58,2,"[1916, 1917]"
NCP-4.zip,2,NCP,153,1451,58,2,"[1450, 1451]"
Normal-27.zip,0,Normal,3913,5454,68,2,"[5454, 5455]"
Normal-1.zip,0,Normal,1674,810,74,2,"[810, 811]"
Normal-2.zip,0,Normal,1736,1036,55,5,"[1032, 1033, 1034, 1035, 1036]"
Normal-1.zip,0,Normal,1682,852,81,6,"[847, 848, 852, 853, 857, 858]"
Normal-4.zip,0,Normal,796,231,287,1,[231]
NCP-11.zip,2,NCP,292,1729,138,2,"[1729, 1730]"
NCP-12.zip,2,NCP,327,1803,55,2,"[1802, 1803]"
Normal-25.zip,0,Normal,3712,5342,28,1,[5342]
CP-4.zip,1,CP,1182,3400,130,1,[3400]
CP-2.zip,1,CP,1113,3331,197,1,[3331]
NCP-22.zip,2,NCP,888,2426,55,1,[2426]
Normal-25.zip,0,Normal,3846,5358,209,1,[5358]
CP-9.zip,1,CP,1362,3772,61,2,"[3772, 3773]"
NCP-20.zip,2,NCP,572,2302,138,2,"[2302, 2303]"
CP-4.zip,1,CP,1193,3411,190,1,[3411]
Normal-5.zip,0,Normal,802,237,298,1,[237]
CP-23.zip,1,CP,655,3017,511,1,[3017]
Normal-2.zip,0,Normal,1751,1084,67,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-13.zip,2,NCP,360,1871,121,2,"[1871, 1872]"
NCP-30.zip,2,NCP,977,2521,257,1,[2521]
NCP-26.zip,2,NCP,3990,5514,51,1,[5514]
Normal-3.zip,0,Normal,768,203,130,1,[203]
Normal-1.zip,0,Normal,1713,980,71,2,"[980, 981]"
CP-26.zip,1,CP,3732,5672,53,2,"[5671, 5672]"
CP-7.zip,1,CP,1303,3625,32,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-20.zip,1,CP,2762,3293,33,1,[3293]
Normal-2.zip,0,Normal,1740,1052,59,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
Normal-20.zip,0,Normal,2267,722,100,1,[722]
NCP-5.zip,2,NCP,189,1522,139,2,"[1522, 1523]"
NCP-28.zip,2,NCP,848,2366,57,2,"[2366, 2367]"
NCP-6.zip,2,NCP,215,1575,65,2,"[1574, 1575]"
Normal-27.zip,0,Normal,3905,5438,58,2,"[5437, 5438]"
CP-4.zip,1,CP,1163,3381,239,1,[3381]
CP-18.zip,1,CP,1665,4353,25,1,[4353]
Normal-25.zip,0,Normal,3842,5354,189,1,[5354]
Normal-22.zip,0,Normal,2583,93,46,1,[93]
NCP-11.zip,2,NCP,308,1763,116,2,"[1763, 1764]"
CP-4.zip,1,CP,1180,3398,150,1,[3398]
CP-7.zip,1,CP,1316,3668,63,3,"[3667, 3668, 3669]"
CP-5.zip,1,CP,1213,3431,159,1,[3431]
Normal-10.zip,0,Normal,1947,402,89,1,[402]
CP-24.zip,1,CP,698,3060,124,1,[3060]
CP-15.zip,1,CP,1562,4243,55,2,"[4243, 4244]"
NCP-10.zip,2,NCP,280,1706,121,2,"[1706, 1707]"
NCP-18.zip,2,NCP,498,2152,139,2,"[2152, 2153]"
NCP-25.zip,2,NCP,3962,5473,58,1,[5473]
CP-18.zip,1,CP,1772,3180,75,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
Normal-13.zip,0,Normal,2029,484,94,1,[484]
NCP-16.zip,2,NCP,443,2041,50,2,"[2040, 2041]"
NCP-24.zip,2,NCP,984,2529,259,2,"[2529, 2530]"
CP-18.zip,1,CP,1773,3183,61,4,"[3182, 3183, 3184, 3185]"
CP-5.zip,1,CP,1194,3412,158,1,[3412]
NCP-14.zip,2,NCP,39,1211,58,2,"[1210, 1211]"
CP-13.zip,1,CP,15,3174,98,1,[3174]
CP-28.zip,1,CP,3775,5719,29,1,[5719]
NCP-17.zip,2,NCP,477,2110,58,2,"[2109, 2110]"
Normal-16.zip,0,Normal,2133,588,73,1,[588]
NCP-4.zip,2,NCP,150,1444,181,2,"[1444, 1445]"
CP-4.zip,1,CP,1188,3406,308,1,[3406]
NCP-8.zip,2,NCP,251,1649,55,2,"[1648, 1649]"
CP-1.zip,1,CP,1094,3312,329,1,[3312]
NCP-12.zip,2,NCP,327,1802,130,2,"[1802, 1803]"
Normal-7.zip,0,Normal,1830,285,84,1,[285]
CP-12.zip,1,CP,1481,4045,58,3,"[4044, 4045, 4046]"
NCP-19.zip,2,NCP,52,1238,57,2,"[1237, 1238]"
NCP-20.zip,2,NCP,557,2271,132,2,"[2271, 2272]"
NCP-14.zip,2,NCP,398,1946,167,2,"[1946, 1947]"
NCP-8.zip,2,NCP,260,1667,68,2,"[1666, 1667]"
Normal-2.zip,0,Normal,1754,1094,73,4,"[1093, 1094, 1095, 1096]"
Normal-2.zip,0,Normal,1736,1032,124,5,"[1032, 1033, 1034, 1035, 1036]"
NCP-11.zip,2,NCP,292,1730,58,2,"[1729, 1730]"
CP-25.zip,1,CP,725,3087,80,1,[3087]
CP-15.zip,1,CP,1558,4234,62,3,"[4234, 4235, 4236]"
Normal-17.zip,0,Normal,2161,616,99,1,[616]
NCP-23.zip,2,NCP,970,2513,62,1,[2513]
NCP-10.zip,2,NCP,277,1700,152,2,"[1700, 1701]"
NCP-14.zip,2,NCP,395,1941,71,2,"[1940, 1941]"
Normal-2.zip,0,Normal,1755,1100,71,4,"[1097, 1098, 1099, 1100]"
CP-26.zip,1,CP,3718,5648,254,2,"[5647, 5648]"
Normal-25.zip,0,Normal,3841,5353,188,1,[5353]
Normal-23.zip,0,Normal,2621,131,41,1,[131]
CP-7.zip,1,CP,1303,3623,45,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-20.zip,2,NCP,555,2267,133,2,"[2267, 2268]"
NCP-7.zip,2,NCP,244,1634,165,2,"[1634, 1635]"
Normal-6.zip,0,Normal,1821,276,102,1,[276]
NCP-17.zip,2,NCP,459,2073,56,2,"[2072, 2073]"
NCP-2.zip,2,NCP,124,1390,58,2,"[1389, 1390]"
Normal-18.zip,0,Normal,2185,640,100,1,[640]
NCP-5.zip,2,NCP,193,1530,124,2,"[1530, 1531]"
NCP-8.zip,2,NCP,253,1652,139,2,"[1652, 1653]"
NCP-23.zip,2,NCP,89,1313,58,4,"[1311, 1312, 1313, 1315]"
CP-5.zip,1,CP,1216,3434,307,1,[3434]
NCP-30.zip,2,NCP,979,2523,345,1,[2523]
NCP-23.zip,2,NCP,97,1331,41,2,"[1330, 1331]"
NCP-20.zip,2,NCP,555,2268,56,2,"[2267, 2268]"
Normal-16.zip,0,Normal,2126,581,84,1,[581]
NCP-18.zip,2,NCP,488,2133,58,2,"[2131, 2133]"
NCP-10.zip,2,NCP,28,1189,61,2,"[1188, 1189]"
NCP-15.zip,2,NCP,41,1214,151,2,"[1214, 1215]"
NCP-12.zip,2,NCP,32,1196,145,2,"[1196, 1197]"
CP-26.zip,1,CP,3722,5656,50,2,"[5656, 5657]"
CP-15.zip,1,CP,1573,4261,22,1,[4261]
NCP-27.zip,2,NCP,1028,2598,147,1,[2598]
Normal-18.zip,0,Normal,2197,652,105,1,[652]
Normal-16.zip,0,Normal,2152,607,66,1,[607]
NCP-14.zip,2,NCP,380,1913,62,2,"[1912, 1913]"
Normal-15.zip,0,Normal,2093,548,72,1,[548]
NCP-3.zip,2,NCP,1299,2740,63,1,[2740]
CP-8.zip,1,CP,1348,3740,59,3,"[3739, 3740, 3741]"
Normal-6.zip,0,Normal,1822,277,101,1,[277]
Normal-4.zip,0,Normal,800,235,116,1,[235]
CP-10.zip,1,CP,1386,3827,66,2,"[3827, 3828]"
Normal-12.zip,0,Normal,2004,459,106,1,[459]
CP-7.zip,1,CP,1303,3615,44,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-25.zip,2,NCP,3957,5470,47,1,[5470]
CP-5.zip,1,CP,1204,3422,294,1,[3422]
CP-11.zip,1,CP,1420,3905,59,2,"[3905, 3906]"
CP-17.zip,1,CP,1649,4337,23,1,[4337]
CP-28.zip,1,CP,3769,5713,18,1,[5713]
Normal-8.zip,0,Normal,1868,323,91,1,[323]
CP-1.zip,1,CP,1087,3219,400,1,[3219]
CP-26.zip,1,CP,3640,5599,295,1,[5599]
NCP-7.zip,2,NCP,248,1642,139,2,"[1642, 1643]"
NCP-2.zip,2,NCP,125,1395,55,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-25.zip,2,NCP,3941,5538,38,1,[5538]
CP-15.zip,1,CP,1558,4236,62,3,"[4234, 4235, 4236]"
NCP-16.zip,2,NCP,443,2040,117,2,"[2040, 2041]"
Normal-15.zip,0,Normal,2102,557,100,1,[557]
Normal-2.zip,0,Normal,1755,1097,73,4,"[1097, 1098, 1099, 1100]"
Normal-9.zip,0,Normal,1924,379,98,1,[379]
CP-7.zip,1,CP,1303,3616,209,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-13.zip,1,CP,1517,4136,64,2,"[4136, 4137]"
CP-1.zip,1,CP,1,3146,70,5,"[3143, 3144, 3145, 3146, 3147]"
Normal-19.zip,0,Normal,2226,681,99,1,[681]
CP-13.zip,1,CP,1517,4137,64,2,"[4136, 4137]"
NCP-23.zip,2,NCP,95,1326,165,2,"[1326, 1327]"
NCP-19.zip,2,NCP,538,2234,60,2,"[2233, 2234]"
CP-6.zip,1,CP,1253,3471,130,1,[3471]
NCP-7.zip,2,NCP,242,1629,133,2,"[1629, 1630]"
CP-8.zip,1,CP,1337,3714,60,2,"[3714, 3715]"
NCP-23.zip,2,NCP,912,2454,373,1,[2454]
Normal-23.zip,0,Normal,2622,132,38,1,[132]
Normal-8.zip,0,Normal,1871,326,73,1,[326]
NCP-5.zip,2,NCP,193,1531,52,2,"[1530, 1531]"
Normal-24.zip,0,Normal,2646,156,41,1,[156]
CP-14.zip,1,CP,1538,4185,159,3,"[4185, 4186, 4187]"
CP-23.zip,1,CP,667,3029,226,1,[3029]
CP-1.zip,1,CP,1,3147,70,5,"[3143, 3144, 3145, 3146, 3147]"
NCP-27.zip,2,NCP,1006,2566,42,2,"[2566, 2567]"
Normal-1.zip,0,Normal,1711,977,63,2,"[977, 978]"
NCP-14.zip,2,NCP,374,1899,139,2,"[1899, 1900]"
NCP-16.zip,2,NCP,457,2069,57,2,"[2068, 2069]"
CP-22.zip,1,CP,634,2996,680,1,[2996]
NCP-23.zip,2,NCP,905,2447,26,1,[2447]
Normal-2.zip,0,Normal,1759,1118,65,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-11.zip,2,NCP,290,1725,103,2,"[1725, 1726]"
NCP-21.zip,2,NCP,77,1288,53,2,"[1287, 1288]"
CP-30.zip,1,CP,4018,5567,33,1,[5567]
CP-12.zip,1,CP,1483,4052,62,3,"[4050, 4051, 4052]"
CP-24.zip,1,CP,692,3054,74,1,[3054]
NCP-6.zip,2,NCP,204,1552,139,2,"[1552, 1553]"
NCP-7.zip,2,NCP,24,1179,146,2,"[1179, 1180]"
CP-6.zip,1,CP,1251,3469,133,1,[3469]
Normal-1.zip,0,Normal,1682,857,70,6,"[847, 848, 852, 853, 857, 858]"
NCP-10.zip,2,NCP,2712,2705,42,1,[2705]
CP-2.zip,1,CP,1100,3318,201,1,[3318]
Normal-1.zip,0,Normal,1671,795,67,3,"[793, 794, 795]"
NCP-17.zip,2,NCP,461,2077,67,2,"[2076, 2077]"
CP-15.zip,1,CP,1564,4249,51,2,"[4248, 4249]"
NCP-4.zip,2,NCP,153,1450,137,2,"[1450, 1451]"
CP-4.zip,1,CP,1166,3384,202,1,[3384]
NCP-28.zip,2,NCP,851,2370,145,1,[2370]
NCP-23.zip,2,NCP,95,1327,69,2,"[1326, 1327]"
Normal-18.zip,0,Normal,2196,651,95,1,[651]
CP-27.zip,1,CP,3749,5693,20,1,[5693]
Normal-6.zip,0,Normal,1797,252,85,1,[252]
CP-14.zip,1,CP,1544,4203,122,3,"[4203, 4204, 4205]"
CP-8.zip,1,CP,1345,3731,55,2,"[3731, 3732]"
NCP-8.zip,2,NCP,2678,2649,55,1,[2649]
NCP-23.zip,2,NCP,89,1315,66,4,"[1311, 1312, 1313, 1315]"
Normal-17.zip,0,Normal,2167,622,76,1,[622]
CP-22.zip,1,CP,631,2993,130,1,[2993]
CP-16.zip,1,CP,1618,4306,26,1,[4306]
NCP-17.zip,2,NCP,471,2097,139,2,"[2097, 2098]"
NCP-15.zip,2,NCP,416,1986,58,2,"[1984, 1986]"
CP-10.zip,1,CP,1389,3833,121,3,"[3833, 3834, 3835]"
CP-24.zip,1,CP,696,3058,74,1,[3058]
NCP-26.zip,2,NCP,3996,5494,37,1,[5494]
CP-15.zip,1,CP,1565,4251,66,2,"[4250, 4251]"
NCP-7.zip,2,NCP,248,1643,58,2,"[1642, 1643]"
NCP-30.zip,2,NCP,932,2474,20,1,[2474]
CP-8.zip,1,CP,1332,3703,41,2,"[3703, 3704]"
Normal-2.zip,0,Normal,1754,1093,73,4,"[1093, 1094, 1095, 1096]"
NCP-3.zip,2,NCP,131,1408,50,2,"[1407, 1408]"
NCP-13.zip,2,NCP,37,1206,147,2,"[1206, 1207]"
NCP-7.zip,2,NCP,242,1630,56,2,"[1629, 1630]"
CP-26.zip,1,CP,3643,5603,257,2,"[5602, 5603]"
NCP-6.zip,2,NCP,211,1567,58,2,"[1566, 1567]"
Normal-24.zip,0,Normal,2639,149,28,1,[149]
Normal-13.zip,0,Normal,2037,492,82,1,[492]
CP-16.zip,1,CP,1610,4298,22,1,[4298]
NCP-15.zip,2,NCP,415,1982,149,2,"[1982, 1983]"
NCP-2.zip,2,NCP,125,1394,55,5,"[1391, 1392, 1393, 1394, 1395]"
Normal-23.zip,0,Normal,2616,126,39,1,[126]
CP-26.zip,1,CP,3635,5594,291,1,[5594]
Normal-18.zip,0,Normal,2211,666,85,1,[666]
NCP-17.zip,2,NCP,481,2117,163,2,"[2117, 2118]"
NCP-13.zip,2,NCP,37,1207,62,2,"[1206, 1207]"
Normal-2.zip,0,Normal,1749,1070,61,4,"[1069, 1070, 1071, 1072]"
NCP-29.zip,2,NCP,927,2469,20,1,[2469]
CP-6.zip,1,CP,1226,3444,190,1,[3444]
NCP-14.zip,2,NCP,394,1938,147,2,"[1938, 1939]"
CP-19.zip,1,CP,1791,3212,71,4,"[3210, 3211, 3212, 3213]"
CP-8.zip,1,CP,1334,3708,56,2,"[3707, 3708]"
NCP-12.zip,2,NCP,324,1796,120,2,"[1796, 1797]"
CP-30.zip,1,CP,3929,5626,71,2,"[5626, 5627]"
Normal-7.zip,0,Normal,1832,287,91,1,[287]
Normal-1.zip,0,Normal,1713,981,71,2,"[980, 981]"
NCP-2.zip,2,NCP,111,1363,133,2,"[1363, 1364]"
Normal-3.zip,0,Normal,1764,1144,66,4,"[1143, 1144, 1145, 1146]"
CP-15.zip,1,CP,1560,4239,63,2,"[4239, 4240]"
NCP-22.zip,2,NCP,84,1302,54,2,"[1301, 1302]"
Normal-2.zip,0,Normal,1744,1059,71,2,"[1058, 1059]"
CP-21.zip,1,CP,590,2952,86,1,[2952]
Normal-9.zip,0,Normal,1901,356,83,1,[356]
NCP-17.zip,2,NCP,461,2076,160,2,"[2076, 2077]"
CP-24.zip,1,CP,683,3045,138,1,[3045]
Normal-11.zip,0,Normal,1983,438,105,1,[438]
NCP-14.zip,2,NCP,39,1210,139,2,"[1210, 1211]"
NCP-18.zip,2,NCP,494,2144,156,2,"[2144, 2145]"
NCP-14.zip,2,NCP,388,1927,68,2,"[1926, 1927]"
NCP-28.zip,2,NCP,853,2373,664,1,[2373]
Normal-22.zip,0,Normal,2588,98,33,1,[98]
NCP-17.zip,2,NCP,46,1225,124,2,"[1225, 1226]"
NCP-2.zip,2,NCP,126,1396,152,2,"[1396, 1398]"
NCP-15.zip,2,NCP,418,1990,58,2,"[1989, 1990]"
Normal-3.zip,0,Normal,765,200,136,1,[200]
CP-9.zip,1,CP,1370,3792,62,2,"[3792, 3793]"
CP-13.zip,1,CP,1490,4071,166,3,"[4071, 4072, 4073]"
CP-5.zip,1,CP,1212,3430,187,1,[3430]
NCP-29.zip,2,NCP,894,2434,16,1,[2434]
Normal-2.zip,0,Normal,1751,1083,67,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
CP-19.zip,1,CP,1788,3199,58,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-12.zip,1,CP,1466,4011,52,2,"[4011, 4012]"
CP-1.zip,1,CP,1088,3223,50,4,"[3220, 3221, 3222, 3223]"
NCP-25.zip,2,NCP,3947,5503,41,1,[5503]
CP-30.zip,1,CP,3931,5632,143,4,"[5630, 5631, 5632, 5633]"
NCP-2.zip,2,NCP,124,1389,139,2,"[1389, 1390]"
CP-7.zip,1,CP,1307,3645,53,4,"[3645, 3646, 3647, 3648]"
NCP-27.zip,2,NCP,823,2334,183,1,[2334]
Normal-1.zip,0,Normal,1728,1016,72,4,"[1013, 1014, 1015, 1016]"
NCP-22.zip,2,NCP,860,2383,183,2,"[2382, 2383]"
NCP-20.zip,2,NCP,559,2276,54,2,"[2275, 2276]"
Normal-2.zip,0,Normal,1754,1096,69,4,"[1093, 1094, 1095, 1096]"
CP-12.zip,1,CP,1473,4026,51,3,"[4026, 4027, 4028]"
Normal-3.zip,0,Normal,1764,1146,62,4,"[1143, 1144, 1145, 1146]"
CP-2.zip,1,CP,1103,3321,180,1,[3321]
CP-4.zip,1,CP,1181,3399,238,1,[3399]
CP-19.zip,1,CP,2436,2904,138,1,[2904]
CP-28.zip,1,CP,3795,5739,23,1,[5739]
CP-29.zip,1,CP,3805,5749,20,1,[5749]
NCP-3.zip,2,NCP,1300,2741,60,1,[2741]
NCP-23.zip,2,NCP,898,2439,48,1,[2439]
Normal-23.zip,0,Normal,2612,122,31,1,[122]
NCP-7.zip,2,NCP,24,1180,61,2,"[1179, 1180]"
Normal-6.zip,0,Normal,1807,262,95,1,[262]
NCP-30.zip,2,NCP,996,2551,189,2,"[2551, 2552]"
Normal-9.zip,0,Normal,1893,348,82,1,[348]
NCP-11.zip,2,NCP,290,1726,44,2,"[1725, 1726]"
NCP-21.zip,2,NCP,80,1293,129,2,"[1293, 1294]"
Normal-24.zip,0,Normal,2655,165,37,1,[165]
NCP-30.zip,2,NCP,996,2552,218,2,"[2551, 2552]"
CP-18.zip,1,CP,1653,4341,29,1,[4341]
NCP-5.zip,2,NCP,187,1518,136,2,"[1518, 1519]"
NCP-26.zip,2,NCP,3993,5517,39,1,[5517]
NCP-10.zip,2,NCP,273,1692,128,2,"[1692, 1693]"
Normal-2.zip,0,Normal,1751,1082,62,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-5.zip,2,NCP,179,1502,122,2,"[1503, 1502]"
Normal-26.zip,0,Normal,3887,5400,67,3,"[5400, 5401, 5404]"
NCP-7.zip,2,NCP,234,1613,139,2,"[1613, 1614]"
Normal-1.zip,0,Normal,1725,1006,60,1,[1006]
NCP-15.zip,2,NCP,419,1992,55,2,"[1991, 1992]"
CP-14.zip,1,CP,1523,4151,65,2,"[4150, 4151]"
NCP-23.zip,2,NCP,938,2480,195,2,"[2480, 2481]"
NCP-13.zip,2,NCP,342,1835,149,2,"[1835, 1836]"
CP-24.zip,1,CP,680,3042,86,1,[3042]
NCP-14.zip,2,NCP,394,1939,62,2,"[1938, 1939]"
NCP-11.zip,2,NCP,288,1722,49,2,"[1721, 1722]"
CP-14.zip,1,CP,1527,4162,58,3,"[4160, 4161, 4162]"
CP-6.zip,1,CP,1241,3459,132,1,[3459]
CP-10.zip,1,CP,1408,3878,198,3,"[3878, 3879, 3880]"
NCP-14.zip,2,NCP,397,1945,66,2,"[1944, 1945]"
CP-1.zip,1,CP,1,3145,248,5,"[3143, 3144, 3145, 3146, 3147]"
Normal-15.zip,0,Normal,2111,566,95,1,[566]
Normal-12.zip,0,Normal,2007,462,85,1,[462]
NCP-6.zip,2,NCP,222,1589,52,2,"[1588, 1589]"
Normal-25.zip,0,Normal,3856,5368,220,1,[5368]
CP-6.zip,1,CP,1245,3463,306,1,[3463]
CP-9.zip,1,CP,1380,3814,56,1,[3814]
CP-11.zip,1,CP,1442,3955,58,3,"[3954, 3955, 3956]"
Normal-26.zip,0,Normal,3889,5407,68,2,"[5407, 5408]"
Normal-4.zip,0,Normal,773,208,321,1,[208]
CP-23.zip,1,CP,671,3033,448,1,[3033]
CP-23.zip,1,CP,674,3036,126,1,[3036]
CP-19.zip,1,CP,1788,3200,54,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
NCP-12.zip,2,NCP,328,1804,112,2,"[1804, 1805]"
Normal-22.zip,0,Normal,2581,91,44,1,[91]
CP-7.zip,1,CP,1316,3669,62,3,"[3667, 3668, 3669]"
NCP-1.zip,2,NCP,1046,2618,70,1,[2618]
NCP-16.zip,2,NCP,456,2067,57,2,"[2066, 2067]"
CP-13.zip,1,CP,1508,4115,57,3,"[4115, 4116, 4117]"
Normal-1.zip,0,Normal,1730,1020,63,5,"[1019, 1020, 1021, 1022, 1023]"
NCP-17.zip,2,NCP,468,2092,65,2,"[2091, 2092]"
NCP-7.zip,2,NCP,2488,2688,40,1,[2688]
CP-10.zip,1,CP,1396,3852,58,3,"[3851, 3852, 3853]"
NCP-16.zip,2,NCP,447,2049,58,2,"[2048, 2049]"
Normal-8.zip,0,Normal,1864,319,88,1,[319]
CP-15.zip,1,CP,1560,4240,63,2,"[4239, 4240]"
CP-12.zip,1,CP,1484,4055,46,3,"[4053, 4054, 4055]"
Normal-1.zip,0,Normal,1682,853,81,6,"[847, 848, 852, 853, 857, 858]"
Normal-22.zip,0,Normal,2580,90,37,1,[90]
CP-2.zip,1,CP,1128,3346,196,1,[3346]
NCP-7.zip,2,NCP,240,1625,158,2,"[1625, 1626]"
Normal-15.zip,0,Normal,2086,541,91,1,[541]
Normal-7.zip,0,Normal,1837,292,94,1,[292]
CP-1.zip,1,CP,1069,3111,77,4,"[3108, 3109, 3110, 3111]"
CP-14.zip,1,CP,1549,4216,61,2,"[4215, 4216]"
Normal-11.zip,0,Normal,1970,425,88,1,[425]
NCP-13.zip,2,NCP,342,1836,61,2,"[1835, 1836]"
CP-25.zip,1,CP,728,3090,86,1,[3090]
NCP-21.zip,2,NCP,68,1268,115,2,"[1268, 1269]"
CP-8.zip,1,CP,1342,3725,58,3,"[3723, 3724, 3725]"
CP-12.zip,1,CP,1481,4046,58,3,"[4044, 4045, 4046]"
CP-5.zip,1,CP,1210,3428,156,1,[3428]
NCP-3.zip,2,NCP,136,1417,53,2,"[1416, 1417]"
NCP-2.zip,2,NCP,125,1393,54,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-23.zip,2,NCP,97,1330,97,2,"[1330, 1331]"
NCP-1.zip,2,NCP,1021,2588,209,4,"[2587, 2588, 2589, 2590]"
NCP-12.zip,2,NCP,317,1782,50,2,"[1781, 1782]"
NCP-14.zip,2,NCP,388,1926,162,2,"[1926, 1927]"
CP-26.zip,1,CP,3641,5600,300,1,[5600]
Normal-3.zip,0,Normal,760,195,117,1,[195]
NCP-12.zip,2,NCP,325,1798,117,2,"[1798, 1799]"
Normal-1.zip,0,Normal,1671,793,72,3,"[793, 794, 795]"
Normal-1.zip,0,Normal,1683,869,64,6,"[861, 862, 864, 865, 868, 869]"
Normal-5.zip,0,Normal,807,242,132,1,[242]
CP-19.zip,1,CP,1791,3211,55,4,"[3210, 3211, 3212, 3213]"
Normal-4.zip,0,Normal,792,227,108,1,[227]
CP-15.zip,1,CP,1564,4248,51,2,"[4248, 4249]"
NCP-12.zip,2,NCP,324,1797,51,2,"[1796, 1797]"
CP-13.zip,1,CP,1514,4130,61,2,"[4129, 4130]"
CP-30.zip,1,CP,4013,5562,29,1,[5562]
CP-7.zip,1,CP,13,3173,255,4,"[3170, 3171, 3172, 3173]"
CP-5.zip,1,CP,1214,3432,282,1,[3432]
Normal-8.zip,0,Normal,1878,333,88,1,[333]
NCP-7.zip,2,NCP,246,1638,139,2,"[1638, 1639]"
Normal-21.zip,0,Normal,2297,752,83,1,[752]
================================================
FILE: Finetune/CC-CCII/csv/CC_CCII_fold1_train.csv
================================================
zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids
CP-6.zip,1,CP,1229,3447,144,1,[3447]
CP-26.zip,1,CP,3718,5647,51,2,"[5647, 5648]"
CP-3.zip,1,CP,1148,3366,158,1,[3366]
CP-5.zip,1,CP,1200,3418,309,1,[3418]
CP-1.zip,1,CP,1088,3221,54,4,"[3220, 3221, 3222, 3223]"
CP-21.zip,1,CP,585,2947,94,1,[2947]
CP-18.zip,1,CP,1772,3178,72,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
CP-1.zip,1,CP,1078,3123,68,1,[3123]
CP-12.zip,1,CP,1473,4028,51,3,"[4026, 4027, 4028]"
CP-15.zip,1,CP,1559,4237,53,2,"[4237, 4238]"
CP-7.zip,1,CP,1259,3477,162,1,[3477]
CP-14.zip,1,CP,1541,4194,142,3,"[4194, 4195, 4196]"
CP-18.zip,1,CP,1658,4346,29,1,[4346]
CP-9.zip,1,CP,1373,3801,55,2,"[3800, 3801]"
CP-12.zip,1,CP,1456,3990,52,3,"[3988, 3989, 3990]"
CP-9.zip,1,CP,1367,3787,58,3,"[3785, 3786, 3787]"
CP-1.zip,1,CP,1097,3315,119,1,[3315]
CP-11.zip,1,CP,1438,3944,46,2,"[3944, 3945]"
Normal-2.zip,0,Normal,1759,1115,64,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
CP-14.zip,1,CP,1523,4150,65,2,"[4150, 4151]"
CP-19.zip,1,CP,2447,2923,83,2,"[2923, 2924]"
CP-19.zip,1,CP,1788,3203,57,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-5.zip,1,CP,1220,3438,200,1,[3438]
CP-16.zip,1,CP,1593,4281,22,1,[4281]
Normal-18.zip,0,Normal,2200,655,94,1,[655]
CP-28.zip,1,CP,3784,5728,29,1,[5728]
CP-2.zip,1,CP,1109,3327,210,1,[3327]
CP-19.zip,1,CP,2444,2918,124,2,"[2918, 2919]"
Normal-2.zip,0,Normal,1760,1122,137,4,"[1121, 1122, 1123, 1124]"
CP-12.zip,1,CP,1476,4033,106,2,"[4033, 4034]"
CP-14.zip,1,CP,1538,4186,66,3,"[4185, 4186, 4187]"
Normal-13.zip,0,Normal,2046,501,79,1,[501]
CP-15.zip,1,CP,1565,4250,66,2,"[4250, 4251]"
CP-10.zip,1,CP,1407,3876,58,2,"[3876, 3877]"
Normal-27.zip,0,Normal,3905,5437,288,2,"[5437, 5438]"
NCP-13.zip,2,NCP,36,1204,141,2,"[1204, 1205]"
NCP-30.zip,2,NCP,941,2484,169,1,[2484]
Normal-2.zip,0,Normal,1758,1109,291,2,"[1109, 1110]"
CP-8.zip,1,CP,1342,3723,139,3,"[3723, 3724, 3725]"
CP-3.zip,1,CP,1132,3350,180,1,[3350]
CP-18.zip,1,CP,1773,3184,67,4,"[3182, 3183, 3184, 3185]"
NCP-17.zip,2,NCP,464,2083,60,2,"[2082, 2083]"
NCP-16.zip,2,NCP,447,2048,139,2,"[2048, 2049]"
NCP-3.zip,2,NCP,136,1416,126,2,"[1416, 1417]"
NCP-18.zip,2,NCP,501,2158,146,2,"[2158, 2159]"
CP-19.zip,1,CP,2439,2909,409,1,[2909]
NCP-19.zip,2,NCP,538,2233,142,2,"[2233, 2234]"
Normal-27.zip,0,Normal,3907,5440,63,2,"[5440, 5441]"
CP-18.zip,1,CP,1773,3182,61,4,"[3182, 3183, 3184, 3185]"
CP-8.zip,1,CP,1320,3677,62,2,"[3676, 3677]"
CP-9.zip,1,CP,1366,3782,138,3,"[3782, 3783, 3784]"
CP-7.zip,1,CP,1309,3651,49,2,"[3651, 3652]"
NCP-18.zip,2,NCP,492,2140,139,2,"[2140, 2141]"
NCP-21.zip,2,NCP,69,1271,48,2,"[1270, 1271]"
CP-13.zip,1,CP,1515,4131,137,3,"[4131, 4132, 4133]"
Normal-11.zip,0,Normal,1980,435,83,1,[435]
Normal-14.zip,0,Normal,2073,528,87,1,[528]
CP-3.zip,1,CP,1149,3367,157,1,[3367]
NCP-14.zip,2,NCP,376,1905,60,2,"[1904, 1905]"
NCP-8.zip,2,NCP,253,1653,58,2,"[1652, 1653]"
NCP-27.zip,2,NCP,1061,2638,75,1,[2638]
Normal-9.zip,0,Normal,1921,376,80,1,[376]
NCP-16.zip,2,NCP,453,2061,51,2,"[2060, 2061]"
NCP-10.zip,2,NCP,275,1697,64,2,"[1696, 1697]"
CP-24.zip,1,CP,708,3070,80,1,[3070]
NCP-20.zip,2,NCP,560,2277,124,2,"[2277, 2279]"
NCP-6.zip,2,NCP,207,1558,109,2,"[1558, 1559]"
NCP-2.zip,2,NCP,114,1370,53,2,"[1369, 1370]"
CP-10.zip,1,CP,1407,3877,58,2,"[3876, 3877]"
Normal-1.zip,0,Normal,1682,858,70,6,"[847, 848, 852, 853, 857, 858]"
CP-14.zip,1,CP,1548,4214,51,2,"[4213, 4214]"
Normal-2.zip,0,Normal,1760,1124,74,4,"[1121, 1122, 1123, 1124]"
NCP-14.zip,2,NCP,374,1900,58,2,"[1899, 1900]"
NCP-7.zip,2,NCP,2486,2645,50,1,[2645]
NCP-19.zip,2,NCP,542,2242,55,2,"[2241, 2242]"
Normal-25.zip,0,Normal,3836,5348,202,1,[5348]
Normal-11.zip,0,Normal,1961,416,91,1,[416]
NCP-27.zip,2,NCP,819,2329,33,1,[2329]
NCP-5.zip,2,NCP,184,1512,112,2,"[1512, 1513]"
NCP-15.zip,2,NCP,416,1984,139,2,"[1984, 1986]"
CP-14.zip,1,CP,1538,4187,65,3,"[4185, 4186, 4187]"
CP-8.zip,1,CP,1351,3746,56,1,[3746]
NCP-10.zip,2,NCP,281,1709,51,2,"[1708, 1709]"
CP-10.zip,1,CP,1415,3895,65,3,"[3894, 3895, 3896]"
Normal-1.zip,0,Normal,1682,848,67,6,"[847, 848, 852, 853, 857, 858]"
NCP-17.zip,2,NCP,485,2126,64,2,"[2125, 2126]"
NCP-18.zip,2,NCP,501,2159,61,2,"[2158, 2159]"
Normal-8.zip,0,Normal,1863,318,82,1,[318]
CP-18.zip,1,CP,1772,3176,81,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
CP-26.zip,1,CP,3652,5551,53,2,"[5551, 5552]"
Normal-5.zip,0,Normal,808,243,134,1,[243]
CP-28.zip,1,CP,3771,5715,23,1,[5715]
CP-26.zip,1,CP,3637,5596,35,1,[5596]
CP-12.zip,1,CP,1455,3987,58,3,"[3985, 3986, 3987]"
CP-8.zip,1,CP,1336,3712,60,2,"[3712, 3713]"
CP-30.zip,1,CP,4015,5564,226,1,[5564]
Normal-8.zip,0,Normal,1883,338,91,1,[338]
Normal-3.zip,0,Normal,1764,1145,62,4,"[1143, 1144, 1145, 1146]"
NCP-15.zip,2,NCP,42,1218,61,2,"[1216, 1218]"
NCP-7.zip,2,NCP,245,1636,149,2,"[1636, 1637]"
Normal-14.zip,0,Normal,2066,521,74,1,[521]
Normal-20.zip,0,Normal,2275,730,85,1,[730]
NCP-8.zip,2,NCP,268,1682,126,2,"[1682, 1683]"
CP-7.zip,1,CP,1307,3647,49,4,"[3645, 3646, 3647, 3648]"
Normal-15.zip,0,Normal,2106,561,93,1,[561]
CP-20.zip,1,CP,2772,3303,261,1,[3303]
NCP-25.zip,2,NCP,3970,5479,48,1,[5479]
CP-28.zip,1,CP,3772,5716,23,1,[5716]
NCP-5.zip,2,NCP,175,1494,131,2,"[1494, 1495]"
NCP-18.zip,2,NCP,507,2171,58,2,"[2170, 2171]"
NCP-19.zip,2,NCP,537,2231,143,2,"[2231, 2232]"
Normal-1.zip,0,Normal,1728,1014,66,4,"[1013, 1014, 1015, 1016]"
Normal-23.zip,0,Normal,2608,118,25,1,[118]
NCP-23.zip,2,NCP,90,1317,43,2,"[1316, 1317]"
NCP-2.zip,2,NCP,123,1388,62,2,"[1387, 1388]"
NCP-18.zip,2,NCP,507,2170,138,2,"[2170, 2171]"
NCP-14.zip,2,NCP,395,1940,171,2,"[1940, 1941]"
NCP-23.zip,2,NCP,946,2489,26,1,[2489]
CP-7.zip,1,CP,1308,3649,43,2,"[3649, 3650]"
NCP-17.zip,2,NCP,462,2078,161,2,"[2078, 2079]"
Normal-16.zip,0,Normal,2145,600,86,1,[600]
NCP-20.zip,2,NCP,560,2279,51,2,"[2277, 2279]"
CP-30.zip,1,CP,3931,5630,82,4,"[5630, 5631, 5632, 5633]"
CP-13.zip,1,CP,1501,4101,55,2,"[4100, 4101]"
CP-1.zip,1,CP,1,3144,248,5,"[3143, 3144, 3145, 3146, 3147]"
CP-25.zip,1,CP,713,3075,120,1,[3075]
CP-15.zip,1,CP,1562,4244,55,2,"[4243, 4244]"
CP-26.zip,1,CP,3643,5602,298,2,"[5602, 5603]"
CP-27.zip,1,CP,3748,5692,17,1,[5692]
CP-14.zip,1,CP,1524,4152,229,3,"[4152, 4153, 4154]"
Normal-6.zip,0,Normal,1800,255,92,1,[255]
Normal-1.zip,0,Normal,1711,978,63,2,"[977, 978]"
Normal-17.zip,0,Normal,2157,612,78,1,[612]
CP-8.zip,1,CP,1334,3707,133,2,"[3707, 3708]"
NCP-19.zip,2,NCP,545,2247,135,2,"[2247, 2248]"
CP-28.zip,1,CP,3790,5734,23,1,[5734]
NCP-30.zip,2,NCP,993,2546,203,1,[2546]
NCP-9.zip,2,NCP,2689,2656,47,1,[2656]
Normal-27.zip,0,Normal,3907,5441,66,2,"[5440, 5441]"
CP-26.zip,1,CP,3652,5552,52,2,"[5551, 5552]"
NCP-11.zip,2,NCP,287,1719,142,2,"[1719, 1720]"
NCP-2.zip,2,NCP,114,1369,125,2,"[1369, 1370]"
NCP-21.zip,2,NCP,581,2320,58,2,"[2319, 2320]"
Normal-26.zip,0,Normal,3887,5404,78,3,"[5400, 5401, 5404]"
NCP-12.zip,2,NCP,325,1799,50,2,"[1798, 1799]"
NCP-27.zip,2,NCP,1060,2637,81,1,[2637]
CP-13.zip,1,CP,1516,4135,62,2,"[4134, 4135]"
CP-15.zip,1,CP,1580,4268,21,1,[4268]
NCP-15.zip,2,NCP,428,2009,125,2,"[2009, 2010]"
NCP-19.zip,2,NCP,52,1237,135,2,"[1237, 1238]"
NCP-9.zip,2,NCP,2691,2658,44,1,[2658]
NCP-12.zip,2,NCP,34,1200,156,2,"[1200, 1201]"
NCP-19.zip,2,NCP,539,2235,131,2,"[2235, 2236]"
Normal-1.zip,0,Normal,1728,1015,72,4,"[1013, 1014, 1015, 1016]"
NCP-6.zip,2,NCP,222,1588,122,2,"[1588, 1589]"
NCP-10.zip,2,NCP,273,1693,54,2,"[1692, 1693]"
CP-29.zip,1,CP,3822,5766,20,1,[5766]
CP-10.zip,1,CP,1401,3864,51,3,"[3862, 3863, 3864]"
Normal-13.zip,0,Normal,2030,485,66,1,[485]
NCP-4.zip,2,NCP,164,1473,63,2,"[1472, 1473]"
CP-21.zip,1,CP,3,3504,35,1,[3504]
CP-9.zip,1,CP,1368,3788,69,2,"[3788, 3789]"
Normal-1.zip,0,Normal,1704,963,69,4,"[961, 962, 963, 964]"
CP-12.zip,1,CP,1466,4012,52,2,"[4011, 4012]"
Normal-11.zip,0,Normal,1971,426,100,1,[426]
NCP-16.zip,2,NCP,450,2055,34,2,"[2054, 2055]"
NCP-30.zip,2,NCP,962,2505,38,1,[2505]
NCP-8.zip,2,NCP,2675,2648,44,1,[2648]
NCP-25.zip,2,NCP,3955,5468,46,1,[5468]
NCP-18.zip,2,NCP,488,2131,139,2,"[2131, 2133]"
CP-12.zip,1,CP,1484,4053,181,3,"[4053, 4054, 4055]"
CP-9.zip,1,CP,1368,3789,69,2,"[3788, 3789]"
NCP-10.zip,2,NCP,28,1188,145,2,"[1188, 1189]"
CP-30.zip,1,CP,3931,5631,82,4,"[5630, 5631, 5632, 5633]"
NCP-10.zip,2,NCP,277,1701,64,2,"[1700, 1701]"
NCP-4.zip,2,NCP,148,1441,63,2,"[1440, 1441]"
CP-12.zip,1,CP,1481,4044,139,3,"[4044, 4045, 4046]"
Normal-21.zip,0,Normal,2288,743,96,1,[743]
CP-30.zip,1,CP,4017,5566,41,1,[5566]
CP-13.zip,1,CP,1499,4098,53,2,"[4097, 4098]"
CP-13.zip,1,CP,1516,4134,62,2,"[4134, 4135]"
Normal-13.zip,0,Normal,2049,504,88,1,[504]
CP-18.zip,1,CP,1772,3179,72,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
NCP-20.zip,2,NCP,57,1248,56,2,"[1247, 1248]"
Normal-1.zip,0,Normal,1704,961,71,4,"[961, 962, 963, 964]"
CP-9.zip,1,CP,1366,3783,57,3,"[3782, 3783, 3784]"
CP-32.zip,1,CP,2464,3228,66,1,[3228]
CP-15.zip,1,CP,1555,4228,62,2,"[4228, 4229]"
Normal-3.zip,0,Normal,758,193,122,1,[193]
NCP-12.zip,2,NCP,329,1806,157,2,"[1806, 1807]"
CP-7.zip,1,CP,1307,3646,259,4,"[3645, 3646, 3647, 3648]"
CP-26.zip,1,CP,3722,5657,205,2,"[5656, 5657]"
NCP-14.zip,2,NCP,382,1916,139,2,"[1916, 1917]"
CP-27.zip,1,CP,3752,5696,20,1,[5696]
Normal-16.zip,0,Normal,2129,584,75,1,[584]
NCP-13.zip,2,NCP,367,1885,158,2,"[1885, 1886]"
NCP-6.zip,2,NCP,204,1553,58,2,"[1552, 1553]"
CP-30.zip,1,CP,3918,5542,71,1,[5542]
Normal-11.zip,0,Normal,1979,434,87,1,[434]
Normal-2.zip,0,Normal,1741,1053,61,2,"[1053, 1054]"
Normal-10.zip,0,Normal,1945,400,87,1,[400]
Normal-26.zip,0,Normal,3882,5394,27,1,[5394]
CP-20.zip,1,CP,2456,2940,126,1,[2940]
NCP-5.zip,2,NCP,184,1513,48,2,"[1512, 1513]"
NCP-9.zip,2,NCP,2693,2659,49,1,[2659]
CP-8.zip,1,CP,1348,3739,197,3,"[3739, 3740, 3741]"
Normal-18.zip,0,Normal,2214,669,102,1,[669]
CP-10.zip,1,CP,1415,3896,65,3,"[3894, 3895, 3896]"
NCP-3.zip,2,NCP,1290,2731,66,1,[2731]
Normal-2.zip,0,Normal,1759,1111,62,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-15.zip,2,NCP,401,1951,139,2,"[1951, 1952]"
CP-7.zip,1,CP,1309,3652,49,2,"[3651, 3652]"
Normal-4.zip,0,Normal,787,222,320,1,[222]
NCP-20.zip,2,NCP,550,2258,60,2,"[2257, 2258]"
NCP-5.zip,2,NCP,195,1534,143,2,"[1534, 1535]"
NCP-13.zip,2,NCP,367,1886,66,2,"[1885, 1886]"
NCP-19.zip,2,NCP,530,2218,132,1,[2218]
Normal-6.zip,0,Normal,1811,266,95,1,[266]
NCP-30.zip,2,NCP,963,2506,21,1,[2506]
Normal-2.zip,0,Normal,1759,1112,62,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-13.zip,2,NCP,369,1890,58,2,"[1889, 1890]"
NCP-16.zip,2,NCP,457,2068,134,2,"[2068, 2069]"
NCP-26.zip,2,NCP,3981,5488,45,1,[5488]
NCP-22.zip,2,NCP,816,2325,50,1,[2325]
Normal-1.zip,0,Normal,1730,1019,63,5,"[1019, 1020, 1021, 1022, 1023]"
NCP-15.zip,2,NCP,419,1991,130,2,"[1991, 1992]"
CP-30.zip,1,CP,4016,5565,37,1,[5565]
CP-24.zip,1,CP,694,3056,135,1,[3056]
NCP-17.zip,2,NCP,470,2095,154,2,"[2095, 2096]"
Normal-4.zip,0,Normal,781,216,118,1,[216]
CP-13.zip,1,CP,1497,4093,68,3,"[4092, 4093, 4094]"
NCP-26.zip,2,NCP,3991,5515,43,1,[5515]
CP-8.zip,1,CP,1331,3701,62,2,"[3701, 3702]"
Normal-9.zip,0,Normal,1910,365,91,1,[365]
NCP-27.zip,2,NCP,820,2330,34,1,[2330]
CP-7.zip,1,CP,13,3171,65,4,"[3170, 3171, 3172, 3173]"
CP-20.zip,1,CP,2764,3295,39,1,[3295]
Normal-1.zip,0,Normal,1714,984,71,3,"[982, 983, 984]"
CP-13.zip,1,CP,1501,4100,55,2,"[4100, 4101]"
Normal-15.zip,0,Normal,2117,572,87,1,[572]
CP-30.zip,1,CP,3929,5627,70,2,"[5626, 5627]"
NCP-4.zip,2,NCP,158,1461,52,2,"[1460, 1461]"
CP-10.zip,1,CP,1389,3834,52,3,"[3833, 3834, 3835]"
CP-13.zip,1,CP,1497,4094,68,3,"[4092, 4093, 4094]"
CP-10.zip,1,CP,1415,3894,155,3,"[3894, 3895, 3896]"
CP-30.zip,1,CP,4014,5563,35,1,[5563]
NCP-17.zip,2,NCP,462,2079,67,2,"[2078, 2079]"
CP-29.zip,1,CP,3803,5747,23,1,[5747]
CP-1.zip,1,CP,1,3143,300,5,"[3143, 3144, 3145, 3146, 3147]"
NCP-11.zip,2,NCP,305,1756,157,2,"[1756, 1758]"
Normal-1.zip,0,Normal,1668,781,63,4,"[778, 779, 780, 781]"
CP-4.zip,1,CP,1174,3392,175,1,[3392]
Normal-14.zip,0,Normal,2060,515,77,1,[515]
Normal-22.zip,0,Normal,2602,112,32,1,[112]
CP-14.zip,1,CP,1541,4196,58,3,"[4194, 4195, 4196]"
Normal-12.zip,0,Normal,2019,474,87,1,[474]
CP-25.zip,1,CP,733,3095,84,1,[3095]
CP-13.zip,1,CP,1499,4097,53,2,"[4097, 4098]"
CP-19.zip,1,CP,1788,3201,55,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
Normal-1.zip,0,Normal,1704,964,69,4,"[961, 962, 963, 964]"
NCP-3.zip,2,NCP,1289,2730,62,1,[2730]
NCP-20.zip,2,NCP,567,2292,148,2,"[2292, 2293]"
Normal-13.zip,0,Normal,2027,482,89,1,[482]
Normal-2.zip,0,Normal,1759,1114,59,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
Normal-27.zip,0,Normal,3897,5424,75,4,"[5423, 5424, 5426, 5427]"
Normal-22.zip,0,Normal,2317,772,77,1,[772]
Normal-2.zip,0,Normal,1758,1110,59,2,"[1109, 1110]"
NCP-2.zip,2,NCP,121,1384,43,2,"[1383, 1384]"
NCP-13.zip,2,NCP,356,1864,53,2,"[1863, 1864]"
Normal-2.zip,0,Normal,1760,1121,85,4,"[1121, 1122, 1123, 1124]"
CP-29.zip,1,CP,3825,5769,25,1,[5769]
NCP-17.zip,2,NCP,46,1226,52,2,"[1225, 1226]"
NCP-19.zip,2,NCP,53,1240,60,2,"[1239, 1240]"
NCP-12.zip,2,NCP,314,1776,58,2,"[1775, 1776]"
Normal-21.zip,0,Normal,2290,745,88,1,[745]
Normal-2.zip,0,Normal,1760,1123,74,4,"[1121, 1122, 1123, 1124]"
Normal-24.zip,0,Normal,2666,176,35,1,[176]
CP-8.zip,1,CP,1346,3735,53,3,"[3733, 3734, 3735]"
Normal-17.zip,0,Normal,2164,619,84,1,[619]
NCP-8.zip,2,NCP,2672,2647,47,1,[2647]
NCP-2.zip,2,NCP,1277,2718,57,1,[2718]
CP-9.zip,1,CP,1370,3793,62,2,"[3792, 3793]"
Normal-10.zip,0,Normal,1928,383,87,1,[383]
CP-21.zip,1,CP,598,2960,646,1,[2960]
CP-20.zip,1,CP,2755,3286,34,1,[3286]
Normal-16.zip,0,Normal,2141,596,100,1,[596]
CP-14.zip,1,CP,1544,4205,50,3,"[4203, 4204, 4205]"
NCP-9.zip,2,NCP,270,1687,62,2,"[1686, 1687]"
CP-5.zip,1,CP,1222,3440,157,1,[3440]
CP-19.zip,1,CP,1791,3210,100,4,"[3210, 3211, 3212, 3213]"
NCP-16.zip,2,NCP,450,2054,78,2,"[2054, 2055]"
NCP-20.zip,2,NCP,557,2272,56,2,"[2271, 2272]"
NCP-3.zip,2,NCP,1284,2725,50,1,[2725]
CP-5.zip,1,CP,1205,3423,146,1,[3423]
NCP-1.zip,2,NCP,1017,2583,452,1,[2583]
Normal-2.zip,0,Normal,1736,1033,25,5,"[1032, 1033, 1034, 1035, 1036]"
NCP-1.zip,2,NCP,1021,2589,183,4,"[2587, 2588, 2589, 2590]"
NCP-7.zip,2,NCP,232,1608,146,2,"[1608, 1609]"
NCP-23.zip,2,NCP,950,2493,34,1,[2493]
CP-6.zip,1,CP,1246,3464,175,1,[3464]
Normal-1.zip,0,Normal,1671,794,67,3,"[793, 794, 795]"
CP-1.zip,1,CP,1095,3313,161,1,[3313]
CP-25.zip,1,CP,714,3076,98,1,[3076]
NCP-17.zip,2,NCP,479,2114,58,2,"[2113, 2114]"
Normal-24.zip,0,Normal,2652,162,35,1,[162]
Normal-2.zip,0,Normal,1752,1085,66,1,[1085]
Normal-24.zip,0,Normal,2638,148,38,1,[148]
NCP-8.zip,2,NCP,260,1666,163,2,"[1666, 1667]"
NCP-18.zip,2,NCP,509,2175,58,2,"[2174, 2175]"
Normal-14.zip,0,Normal,2079,534,92,1,[534]
Normal-3.zip,0,Normal,751,186,119,1,[186]
NCP-8.zip,2,NCP,263,1673,74,2,"[1672, 1673]"
CP-22.zip,1,CP,626,2988,174,1,[2988]
Normal-23.zip,0,Normal,2619,129,43,1,[129]
CP-1.zip,1,CP,1069,3109,77,4,"[3108, 3109, 3110, 3111]"
NCP-13.zip,2,NCP,360,1872,51,2,"[1871, 1872]"
NCP-23.zip,2,NCP,915,2457,31,1,[2457]
NCP-3.zip,2,NCP,131,1407,117,2,"[1407, 1408]"
NCP-21.zip,2,NCP,79,1292,55,2,"[1291, 1292]"
Normal-4.zip,0,Normal,779,214,290,1,[214]
CP-27.zip,1,CP,3734,5676,32,3,"[5676, 5677, 5678]"
Normal-15.zip,0,Normal,2104,559,101,1,[559]
CP-5.zip,1,CP,1218,3436,213,1,[3436]
NCP-3.zip,2,NCP,1291,2732,55,1,[2732]
NCP-19.zip,2,NCP,537,2232,60,2,"[2231, 2232]"
NCP-21.zip,2,NCP,71,1274,126,2,"[1274, 1275]"
NCP-5.zip,2,NCP,195,1535,60,2,"[1534, 1535]"
CP-9.zip,1,CP,1359,3766,46,3,"[3764, 3765, 3766]"
NCP-2.zip,2,NCP,119,1380,62,2,"[1379, 1380]"
Normal-19.zip,0,Normal,2241,696,86,1,[696]
Normal-15.zip,0,Normal,2112,567,84,1,[567]
NCP-20.zip,2,NCP,569,2296,142,2,"[2296, 2297]"
NCP-9.zip,2,NCP,2700,2666,43,1,[2666]
NCP-9.zip,2,NCP,2697,2663,46,1,[2663]
CP-29.zip,1,CP,3809,5753,19,1,[5753]
NCP-10.zip,2,NCP,2718,2674,42,1,[2674]
Normal-1.zip,0,Normal,1668,778,60,4,"[778, 779, 780, 781]"
NCP-18.zip,2,NCP,509,2174,138,2,"[2174, 2175]"
NCP-16.zip,2,NCP,456,2066,135,2,"[2066, 2067]"
NCP-5.zip,2,NCP,187,1519,57,2,"[1518, 1519]"
NCP-20.zip,2,NCP,57,1247,132,2,"[1247, 1248]"
Normal-1.zip,0,Normal,1715,986,71,2,"[985, 986]"
Normal-2.zip,0,Normal,1749,1069,61,4,"[1069, 1070, 1071, 1072]"
NCP-24.zip,2,NCP,984,2530,241,2,"[2529, 2530]"
Normal-1.zip,0,Normal,1682,847,67,6,"[847, 848, 852, 853, 857, 858]"
CP-1.zip,1,CP,1069,3110,77,4,"[3108, 3109, 3110, 3111]"
Normal-12.zip,0,Normal,2017,472,99,1,[472]
CP-10.zip,1,CP,1400,3861,54,2,"[3860, 3861]"
NCP-22.zip,2,NCP,881,2416,225,1,[2416]
CP-11.zip,1,CP,1420,3906,59,2,"[3905, 3906]"
NCP-6.zip,2,NCP,20,1172,127,2,"[1172, 1173]"
NCP-28.zip,2,NCP,846,2364,269,1,[2364]
Normal-14.zip,0,Normal,2075,530,93,1,[530]
CP-6.zip,1,CP,1238,3456,191,1,[3456]
CP-7.zip,1,CP,1263,3481,120,1,[3481]
CP-1.zip,1,CP,1088,3220,54,4,"[3220, 3221, 3222, 3223]"
CP-8.zip,1,CP,1320,3676,62,2,"[3676, 3677]"
NCP-15.zip,2,NCP,426,2005,139,2,"[2005, 2006]"
NCP-28.zip,2,NCP,869,2397,58,1,[2397]
NCP-11.zip,2,NCP,288,1721,114,2,"[1721, 1722]"
NCP-21.zip,2,NCP,581,2319,139,2,"[2319, 2320]"
Normal-26.zip,0,Normal,3878,5390,24,1,[5390]
Normal-13.zip,0,Normal,2041,496,95,1,[496]
Normal-25.zip,0,Normal,3845,5357,182,1,[5357]
Normal-22.zip,0,Normal,2599,109,39,1,[109]
Normal-4.zip,0,Normal,789,224,120,1,[224]
Normal-1.zip,0,Normal,1714,982,40,3,"[982, 983, 984]"
NCP-16.zip,2,NCP,434,2022,51,2,"[2021, 2022]"
NCP-28.zip,2,NCP,830,2343,120,1,[2343]
Normal-1.zip,0,Normal,1704,962,71,4,"[961, 962, 963, 964]"
NCP-5.zip,2,NCP,196,1537,55,2,"[1536, 1537]"
CP-8.zip,1,CP,1336,3713,60,2,"[3712, 3713]"
NCP-29.zip,2,NCP,895,2436,140,2,"[2435, 2436]"
NCP-29.zip,2,NCP,930,2472,23,1,[2472]
CP-12.zip,1,CP,1482,4047,181,3,"[4047, 4048, 4049]"
CP-10.zip,1,CP,1401,3862,201,3,"[3862, 3863, 3864]"
NCP-5.zip,2,NCP,182,1509,55,2,"[1508, 1509]"
CP-12.zip,1,CP,1483,4050,148,3,"[4050, 4051, 4052]"
Normal-2.zip,0,Normal,1741,1054,61,2,"[1053, 1054]"
CP-8.zip,1,CP,1324,3684,58,2,"[3684, 3685]"
NCP-9.zip,2,NCP,2681,2696,58,1,[2696]
CP-9.zip,1,CP,1367,3786,58,3,"[3785, 3786, 3787]"
CP-19.zip,1,CP,1790,3209,69,2,"[3208, 3209]"
CP-11.zip,1,CP,1430,3928,77,2,"[3928, 3929]"
Normal-18.zip,0,Normal,2207,662,99,1,[662]
Normal-11.zip,0,Normal,1972,427,97,1,[427]
CP-5.zip,1,CP,1221,3439,295,1,[3439]
NCP-15.zip,2,NCP,42,1216,146,2,"[1216, 1218]"
CP-22.zip,1,CP,640,3002,136,1,[3002]
NCP-7.zip,2,NCP,245,1637,62,2,"[1636, 1637]"
NCP-6.zip,2,NCP,215,1574,155,2,"[1574, 1575]"
NCP-29.zip,2,NCP,903,2445,87,1,[2445]
NCP-7.zip,2,NCP,232,1609,61,2,"[1608, 1609]"
NCP-2.zip,2,NCP,119,1379,147,2,"[1379, 1380]"
Normal-2.zip,0,Normal,1739,1042,278,3,"[1042, 1043, 1044]"
CP-28.zip,1,CP,3791,5735,26,1,[5735]
NCP-27.zip,2,NCP,828,2341,45,1,[2341]
NCP-12.zip,2,NCP,314,1775,139,2,"[1775, 1776]"
NCP-6.zip,2,NCP,20,1173,54,2,"[1172, 1173]"
CP-13.zip,1,CP,1490,4073,69,3,"[4071, 4072, 4073]"
NCP-20.zip,2,NCP,569,2297,60,2,"[2296, 2297]"
Normal-2.zip,0,Normal,1759,1113,59,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-30.zip,2,NCP,987,2536,71,2,"[2536, 2537]"
CP-15.zip,1,CP,1579,4267,20,1,[4267]
CP-19.zip,1,CP,1790,3208,69,2,"[3208, 3209]"
NCP-20.zip,2,NCP,568,2295,61,2,"[2294, 2295]"
Normal-13.zip,0,Normal,2036,491,102,1,[491]
NCP-26.zip,2,NCP,3973,5482,48,1,[5482]
CP-27.zip,1,CP,3743,5687,22,1,[5687]
Normal-11.zip,0,Normal,1981,436,91,1,[436]
NCP-2.zip,2,NCP,125,1391,127,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-9.zip,2,NCP,270,1686,147,2,"[1686, 1687]"
Normal-23.zip,0,Normal,2636,146,42,1,[146]
NCP-3.zip,2,NCP,1286,2727,64,1,[2727]
CP-10.zip,1,CP,1386,3828,66,2,"[3827, 3828]"
Normal-19.zip,0,Normal,2230,685,91,1,[685]
Normal-5.zip,0,Normal,805,240,327,1,[240]
Normal-26.zip,0,Normal,3891,5412,62,2,"[5411, 5412]"
NCP-27.zip,2,NCP,2671,2691,51,1,[2691]
NCP-27.zip,2,NCP,1059,2636,52,1,[2636]
CP-8.zip,1,CP,1344,3730,58,3,"[3728, 3729, 3730]"
Normal-24.zip,0,Normal,2662,172,41,1,[172]
Normal-3.zip,0,Normal,744,179,278,1,[179]
CP-5.zip,1,CP,1202,3420,207,1,[3420]
NCP-27.zip,2,NCP,1006,2567,19,2,"[2566, 2567]"
Normal-19.zip,0,Normal,2248,703,87,1,[703]
Normal-2.zip,0,Normal,1736,1034,25,5,"[1032, 1033, 1034, 1035, 1036]"
Normal-8.zip,0,Normal,1867,322,87,1,[322]
Normal-23.zip,0,Normal,2609,119,40,1,[119]
CP-11.zip,1,CP,1453,3980,56,3,"[3979, 3980, 3981]"
Normal-26.zip,0,Normal,3875,5387,24,1,[5387]
CP-26.zip,1,CP,3647,5607,32,1,[5607]
Normal-12.zip,0,Normal,2006,461,77,1,[461]
Normal-6.zip,0,Normal,1827,282,99,1,[282]
NCP-19.zip,2,NCP,533,2224,156,1,[2224]
NCP-11.zip,2,NCP,287,1720,60,2,"[1719, 1720]"
NCP-7.zip,2,NCP,2487,2687,38,1,[2687]
CP-3.zip,1,CP,1160,3378,318,1,[3378]
Normal-7.zip,0,Normal,1858,313,95,1,[313]
CP-13.zip,1,CP,1514,4129,61,2,"[4129, 4130]"
NCP-20.zip,2,NCP,561,2280,139,2,"[2280, 2281]"
CP-14.zip,1,CP,1527,4161,58,3,"[4160, 4161, 4162]"
CP-25.zip,1,CP,721,3083,86,1,[3083]
CP-13.zip,1,CP,1496,4091,55,2,"[4090, 4091]"
Normal-1.zip,0,Normal,1728,1013,66,4,"[1013, 1014, 1015, 1016]"
NCP-12.zip,2,NCP,317,1781,117,2,"[1781, 1782]"
CP-19.zip,1,CP,2437,2906,132,3,"[2905, 2906, 2907]"
NCP-5.zip,2,NCP,196,1536,131,2,"[1536, 1537]"
CP-11.zip,1,CP,1437,3942,57,2,"[3942, 3943]"
NCP-5.zip,2,NCP,182,1508,130,2,"[1508, 1509]"
CP-9.zip,1,CP,1363,3774,64,2,"[3774, 3775]"
CP-10.zip,1,CP,1401,3863,51,3,"[3862, 3863, 3864]"
NCP-10.zip,2,NCP,275,1696,153,2,"[1696, 1697]"
CP-30.zip,1,CP,3931,5633,68,4,"[5630, 5631, 5632, 5633]"
NCP-7.zip,2,NCP,234,1614,58,2,"[1613, 1614]"
NCP-3.zip,2,NCP,1296,2737,66,1,[2737]
NCP-11.zip,2,NCP,283,1712,62,1,[1712]
CP-9.zip,1,CP,1363,3775,64,2,"[3774, 3775]"
NCP-17.zip,2,NCP,464,2082,144,2,"[2082, 2083]"
CP-12.zip,1,CP,1473,4027,51,3,"[4026, 4027, 4028]"
CP-28.zip,1,CP,3781,5725,20,1,[5725]
NCP-14.zip,2,NCP,391,1933,55,2,"[1932, 1933]"
Normal-13.zip,0,Normal,2032,487,85,1,[487]
NCP-28.zip,2,NCP,872,2403,183,2,"[2403, 2404]"
NCP-17.zip,2,NCP,479,2113,139,2,"[2113, 2114]"
NCP-11.zip,2,NCP,305,1758,65,2,"[1756, 1758]"
NCP-1.zip,2,NCP,1021,2587,201,4,"[2587, 2588, 2589, 2590]"
NCP-30.zip,2,NCP,957,2500,50,1,[2500]
Normal-17.zip,0,Normal,2172,627,91,1,[627]
CP-7.zip,1,CP,1316,3667,147,3,"[3667, 3668, 3669]"
NCP-24.zip,2,NCP,971,2514,74,1,[2514]
NCP-18.zip,2,NCP,494,2145,65,2,"[2144, 2145]"
NCP-30.zip,2,NCP,987,2537,368,2,"[2536, 2537]"
Normal-13.zip,0,Normal,2048,503,94,1,[503]
CP-8.zip,1,CP,1347,3736,265,3,"[3736, 3737, 3738]"
NCP-15.zip,2,NCP,41,1215,63,2,"[1214, 1215]"
CP-12.zip,1,CP,1456,3989,52,3,"[3988, 3989, 3990]"
NCP-21.zip,2,NCP,80,1294,54,2,"[1293, 1294]"
CP-29.zip,1,CP,3808,5752,23,1,[5752]
CP-26.zip,1,CP,3732,5671,53,2,"[5671, 5672]"
NCP-8.zip,2,NCP,251,1648,131,2,"[1648, 1649]"
Normal-2.zip,0,Normal,1755,1099,71,4,"[1097, 1098, 1099, 1100]"
Normal-2.zip,0,Normal,1759,1120,66,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-28.zip,2,NCP,874,2407,341,1,[2407]
Normal-1.zip,0,Normal,1730,1023,59,5,"[1019, 1020, 1021, 1022, 1023]"
Normal-8.zip,0,Normal,1891,346,96,1,[346]
CP-4.zip,1,CP,1170,3388,180,1,[3388]
CP-10.zip,1,CP,1395,3849,63,2,"[3849, 3850]"
NCP-2.zip,2,NCP,123,1387,148,2,"[1387, 1388]"
NCP-16.zip,2,NCP,446,2047,61,2,"[2046, 2047]"
NCP-2.zip,2,NCP,111,1364,56,2,"[1363, 1364]"
CP-15.zip,1,CP,1568,4256,22,1,[4256]
NCP-21.zip,2,NCP,79,1291,131,2,"[1291, 1292]"
CP-10.zip,1,CP,1395,3850,63,2,"[3849, 3850]"
CP-14.zip,1,CP,1542,4199,54,3,"[4197, 4198, 4199]"
CP-15.zip,1,CP,1555,4229,62,2,"[4228, 4229]"
CP-21.zip,1,CP,606,2968,255,1,[2968]
CP-12.zip,1,CP,1480,4042,54,2,"[4042, 4043]"
NCP-27.zip,2,NCP,1063,2640,82,1,[2640]
Normal-7.zip,0,Normal,1831,286,99,1,[286]
CP-14.zip,1,CP,1552,4221,62,2,"[4221, 4222]"
NCP-19.zip,2,NCP,541,2240,51,2,"[2239, 2240]"
NCP-23.zip,2,NCP,91,1318,100,2,"[1318, 1319]"
Normal-2.zip,0,Normal,1739,1043,56,3,"[1042, 1043, 1044]"
CP-19.zip,1,CP,2437,2907,183,3,"[2905, 2906, 2907]"
CP-10.zip,1,CP,1396,3851,139,3,"[3851, 3852, 3853]"
CP-13.zip,1,CP,1490,4072,69,3,"[4071, 4072, 4073]"
CP-6.zip,1,CP,1242,3460,229,1,[3460]
NCP-17.zip,2,NCP,471,2098,59,2,"[2097, 2098]"
NCP-16.zip,2,NCP,434,2021,119,2,"[2021, 2022]"
NCP-16.zip,2,NCP,446,2046,146,2,"[2046, 2047]"
NCP-21.zip,2,NCP,69,1270,113,2,"[1270, 1271]"
Normal-9.zip,0,Normal,1896,351,98,1,[351]
NCP-9.zip,2,NCP,2709,2702,44,1,[2702]
NCP-29.zip,2,NCP,907,2449,287,1,[2449]
NCP-2.zip,2,NCP,106,1349,150,2,"[1349, 1350]"
NCP-17.zip,2,NCP,477,2109,139,2,"[2109, 2110]"
CP-27.zip,1,CP,3734,5677,163,3,"[5676, 5677, 5678]"
Normal-8.zip,0,Normal,1877,332,88,1,[332]
Normal-7.zip,0,Normal,1853,308,94,1,[308]
NCP-2.zip,2,NCP,1272,2713,62,1,[2713]
CP-13.zip,1,CP,1515,4132,57,3,"[4131, 4132, 4133]"
NCP-21.zip,2,NCP,68,1269,49,2,"[1268, 1269]"
CP-25.zip,1,CP,719,3081,128,1,[3081]
NCP-10.zip,2,NCP,276,1698,139,2,"[1698, 1699]"
NCP-11.zip,2,NCP,294,1734,57,2,"[1733, 1734]"
CP-8.zip,1,CP,1342,3724,58,3,"[3723, 3724, 3725]"
Normal-4.zip,0,Normal,783,218,118,1,[218]
Normal-11.zip,0,Normal,1977,432,96,1,[432]
CP-12.zip,1,CP,1460,3998,60,2,"[3998, 3999]"
NCP-12.zip,2,NCP,32,1197,61,2,"[1196, 1197]"
Normal-2.zip,0,Normal,1736,1035,55,5,"[1032, 1033, 1034, 1035, 1036]"
CP-7.zip,1,CP,1308,3650,219,2,"[3649, 3650]"
NCP-17.zip,2,NCP,485,2125,153,2,"[2125, 2126]"
Normal-24.zip,0,Normal,2649,159,26,1,[159]
CP-1.zip,1,CP,1082,3127,74,1,[3127]
CP-28.zip,1,CP,3788,5732,26,1,[5732]
Normal-3.zip,0,Normal,1764,1143,66,4,"[1143, 1144, 1145, 1146]"
NCP-2.zip,2,NCP,125,1392,132,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-1.zip,2,NCP,1013,2577,524,1,[2577]
CP-22.zip,1,CP,630,2992,118,1,[2992]
Normal-27.zip,0,Normal,3897,5423,70,4,"[5423, 5424, 5426, 5427]"
CP-1.zip,1,CP,1088,3222,50,4,"[3220, 3221, 3222, 3223]"
NCP-11.zip,2,NCP,294,1733,136,2,"[1733, 1734]"
CP-3.zip,1,CP,1135,3353,202,1,[3353]
CP-10.zip,1,CP,1408,3879,59,3,"[3878, 3879, 3880]"
CP-19.zip,1,CP,1791,3213,71,4,"[3210, 3211, 3212, 3213]"
Normal-1.zip,0,Normal,1709,974,61,2,"[973, 974]"
CP-11.zip,1,CP,1438,3945,46,2,"[3944, 3945]"
CP-8.zip,1,CP,1325,3687,64,2,"[3686, 3687]"
CP-20.zip,1,CP,2761,3292,38,1,[3292]
NCP-17.zip,2,NCP,470,2096,64,2,"[2095, 2096]"
NCP-4.zip,2,NCP,164,1472,150,2,"[1472, 1473]"
NCP-14.zip,2,NCP,380,1912,148,2,"[1912, 1913]"
CP-7.zip,1,CP,1266,3484,134,1,[3484]
CP-10.zip,1,CP,1400,3860,54,2,"[3860, 3861]"
NCP-10.zip,2,NCP,281,1708,121,2,"[1708, 1709]"
NCP-14.zip,2,NCP,397,1944,158,2,"[1944, 1945]"
CP-27.zip,1,CP,3734,5678,32,3,"[5676, 5677, 5678]"
CP-15.zip,1,CP,1559,4238,53,2,"[4237, 4238]"
Normal-26.zip,0,Normal,3888,5406,63,1,[5406]
NCP-11.zip,2,NCP,308,1764,49,2,"[1763, 1764]"
NCP-16.zip,2,NCP,435,2024,62,2,"[2023, 2024]"
NCP-11.zip,2,NCP,285,1715,149,2,"[1715, 1716]"
NCP-20.zip,2,NCP,568,2294,144,2,"[2294, 2295]"
NCP-20.zip,2,NCP,550,2257,143,2,"[2257, 2258]"
NCP-6.zip,2,NCP,218,1581,58,2,"[1580, 1581]"
Normal-15.zip,0,Normal,2092,547,87,1,[547]
CP-10.zip,1,CP,1396,3853,58,3,"[3851, 3852, 3853]"
Normal-12.zip,0,Normal,2010,465,91,1,[465]
Normal-18.zip,0,Normal,2194,649,89,1,[649]
NCP-10.zip,2,NCP,276,1699,58,2,"[1698, 1699]"
CP-27.zip,1,CP,3746,5690,17,1,[5690]
Normal-24.zip,0,Normal,2656,166,34,1,[166]
CP-29.zip,1,CP,3802,5746,26,1,[5746]
CP-17.zip,1,CP,1641,4329,26,1,[4329]
Normal-2.zip,0,Normal,1749,1072,66,4,"[1069, 1070, 1071, 1072]"
CP-9.zip,1,CP,1373,3800,55,2,"[3800, 3801]"
Normal-22.zip,0,Normal,2596,106,44,1,[106]
Normal-14.zip,0,Normal,2072,527,77,1,[527]
Normal-20.zip,0,Normal,2251,706,89,1,[706]
CP-12.zip,1,CP,1482,4049,75,3,"[4047, 4048, 4049]"
CP-6.zip,1,CP,1231,3449,375,1,[3449]
CP-28.zip,1,CP,3797,5741,28,1,[5741]
CP-7.zip,1,CP,1307,3648,242,4,"[3645, 3646, 3647, 3648]"
NCP-1.zip,2,NCP,1030,2600,279,1,[2600]
CP-11.zip,1,CP,1448,3970,62,2,"[3969, 3970]"
Normal-20.zip,0,Normal,2255,710,95,1,[710]
CP-2.zip,1,CP,1124,3342,215,1,[3342]
NCP-28.zip,2,NCP,872,2404,46,2,"[2403, 2404]"
Normal-3.zip,0,Normal,1765,1147,60,2,"[1147, 1148]"
NCP-11.zip,2,NCP,289,1724,47,2,"[1723, 1724]"
CP-11.zip,1,CP,1442,3956,58,3,"[3954, 3955, 3956]"
CP-1.zip,1,CP,1081,3126,68,1,[3126]
Normal-20.zip,0,Normal,2263,718,108,1,[718]
NCP-19.zip,2,NCP,524,2204,191,1,[2204]
Normal-4.zip,0,Normal,784,219,105,1,[219]
CP-8.zip,1,CP,1337,3715,60,2,"[3714, 3715]"
NCP-28.zip,2,NCP,841,2356,282,1,[2356]
NCP-26.zip,2,NCP,3983,5510,40,1,[5510]
CP-20.zip,1,CP,2767,3298,35,1,[3298]
Normal-19.zip,0,Normal,2229,684,87,1,[684]
NCP-15.zip,2,NCP,429,2012,55,2,"[2011, 2012]"
CP-19.zip,1,CP,1788,3197,52,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-15.zip,1,CP,1558,4235,62,3,"[4234, 4235, 4236]"
NCP-3.zip,2,NCP,1280,2721,50,1,[2721]
NCP-4.zip,2,NCP,148,1440,150,2,"[1440, 1441]"
Normal-22.zip,0,Normal,2582,92,39,1,[92]
Normal-23.zip,0,Normal,2623,133,35,1,[133]
CP-13.zip,1,CP,1496,4090,55,2,"[4090, 4091]"
CP-30.zip,1,CP,3835,5779,23,1,[5779]
CP-11.zip,1,CP,1442,3954,139,3,"[3954, 3955, 3956]"
NCP-15.zip,2,NCP,429,2011,131,2,"[2011, 2012]"
CP-17.zip,1,CP,1621,4309,29,1,[4309]
CP-6.zip,1,CP,1244,3462,87,1,[3462]
NCP-1.zip,2,NCP,1021,2590,181,4,"[2587, 2588, 2589, 2590]"
NCP-9.zip,2,NCP,2706,2672,51,1,[2672]
NCP-14.zip,2,NCP,391,1932,131,2,"[1932, 1933]"
CP-3.zip,1,CP,1134,3352,330,1,[3352]
CP-8.zip,1,CP,1346,3734,53,3,"[3733, 3734, 3735]"
NCP-12.zip,2,NCP,320,1789,58,2,"[1788, 1789]"
NCP-21.zip,2,NCP,77,1287,126,2,"[1287, 1288]"
CP-17.zip,1,CP,1647,4335,23,1,[4335]
CP-11.zip,1,CP,1453,3979,221,3,"[3979, 3980, 3981]"
Normal-2.zip,0,Normal,1759,1117,65,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-17.zip,2,NCP,481,2118,68,2,"[2117, 2118]"
NCP-3.zip,2,NCP,1279,2720,66,1,[2720]
CP-8.zip,1,CP,1346,3733,53,3,"[3733, 3734, 3735]"
Normal-10.zip,0,Normal,1954,409,88,1,[409]
CP-17.zip,1,CP,1648,4336,29,1,[4336]
CP-14.zip,1,CP,1524,4154,58,3,"[4152, 4153, 4154]"
Normal-18.zip,0,Normal,2216,671,97,1,[671]
NCP-27.zip,2,NCP,179,1503,43,2,"[1503, 1502]"
CP-19.zip,1,CP,1788,3202,55,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-12.zip,1,CP,1482,4048,75,3,"[4047, 4048, 4049]"
NCP-29.zip,2,NCP,913,2455,268,1,[2455]
CP-19.zip,1,CP,2444,2919,112,2,"[2918, 2919]"
CP-22.zip,1,CP,639,3001,136,1,[3001]
NCP-2.zip,2,NCP,121,1383,100,2,"[1383, 1384]"
CP-8.zip,1,CP,1324,3685,58,2,"[3684, 3685]"
CP-11.zip,1,CP,1430,3929,77,2,"[3928, 3929]"
NCP-15.zip,2,NCP,401,1952,58,2,"[1951, 1952]"
Normal-4.zip,0,Normal,788,223,336,1,[223]
Normal-27.zip,0,Normal,3898,5428,74,1,[5428]
Normal-21.zip,0,Normal,2312,767,88,1,[767]
Normal-17.zip,0,Normal,2170,625,62,1,[625]
NCP-3.zip,2,NCP,130,1406,59,2,"[1405, 1406]"
CP-3.zip,1,CP,1154,3372,169,1,[3372]
Normal-3.zip,0,Normal,1765,1148,60,2,"[1147, 1148]"
Normal-11.zip,0,Normal,1962,417,78,1,[417]
CP-18.zip,1,CP,1667,4355,26,1,[4355]
CP-1.zip,1,CP,1066,3105,59,1,[3105]
NCP-1.zip,2,NCP,1047,2619,473,1,[2619]
NCP-10.zip,2,NCP,2711,2704,44,1,[2704]
Normal-19.zip,0,Normal,2237,692,85,1,[692]
NCP-11.zip,2,NCP,289,1723,110,2,"[1723, 1724]"
NCP-7.zip,2,NCP,240,1626,66,2,"[1625, 1626]"
Normal-11.zip,0,Normal,1974,429,96,1,[429]
Normal-26.zip,0,Normal,3887,5401,67,3,"[5400, 5401, 5404]"
Normal-26.zip,0,Normal,3891,5411,67,2,"[5411, 5412]"
Normal-18.zip,0,Normal,2191,646,106,1,[646]
NCP-28.zip,2,NCP,840,2355,55,1,[2355]
Normal-6.zip,0,Normal,1814,269,88,1,[269]
NCP-12.zip,2,NCP,329,1807,66,2,"[1806, 1807]"
CP-24.zip,1,CP,686,3048,133,1,[3048]
CP-19.zip,1,CP,2432,2894,124,1,[2894]
Normal-10.zip,0,Normal,1952,407,107,1,[407]
CP-13.zip,1,CP,1515,4133,57,3,"[4131, 4132, 4133]"
CP-8.zip,1,CP,1347,3737,34,3,"[3736, 3737, 3738]"
Normal-2.zip,0,Normal,1754,1095,69,4,"[1093, 1094, 1095, 1096]"
CP-22.zip,1,CP,622,2984,459,1,[2984]
CP-3.zip,1,CP,1141,3359,350,1,[3359]
CP-14.zip,1,CP,1533,4173,100,3,"[4173, 4174, 4175]"
Normal-10.zip,0,Normal,1935,390,91,1,[390]
Normal-22.zip,0,Normal,2320,775,91,1,[775]
Normal-25.zip,0,Normal,3859,5371,216,1,[5371]
Normal-12.zip,0,Normal,2018,473,93,1,[473]
CP-9.zip,1,CP,1359,3764,181,3,"[3764, 3765, 3766]"
CP-20.zip,1,CP,2452,2931,298,1,[2931]
NCP-23.zip,2,NCP,90,1316,100,2,"[1316, 1317]"
Normal-2.zip,0,Normal,1744,1058,71,2,"[1058, 1059]"
NCP-18.zip,2,NCP,492,2141,58,2,"[2140, 2141]"
Normal-13.zip,0,Normal,2053,508,81,1,[508]
Normal-17.zip,0,Normal,2156,611,82,1,[611]
NCP-19.zip,2,NCP,541,2239,121,2,"[2239, 2240]"
NCP-19.zip,2,NCP,531,2221,58,2,"[2220, 2221]"
CP-19.zip,1,CP,2448,2925,104,2,"[2925, 2926]"
CP-31.zip,1,CP,4044,5593,276,1,[5593]
CP-8.zip,1,CP,1345,3732,55,2,"[3731, 3732]"
Normal-3.zip,0,Normal,743,178,340,1,[178]
Normal-23.zip,0,Normal,2613,123,40,1,[123]
Normal-1.zip,0,Normal,1714,983,71,3,"[982, 983, 984]"
NCP-8.zip,2,NCP,268,1683,53,2,"[1682, 1683]"
CP-8.zip,1,CP,1347,3738,34,3,"[3736, 3737, 3738]"
CP-25.zip,1,CP,718,3080,466,1,[3080]
Normal-13.zip,0,Normal,2024,479,86,1,[479]
Normal-1.zip,0,Normal,1668,780,63,4,"[778, 779, 780, 781]"
CP-17.zip,1,CP,1636,4324,26,1,[4324]
NCP-20.zip,2,NCP,55,1244,63,2,"[1243, 1244]"
CP-32.zip,1,CP,2463,3227,77,1,[3227]
NCP-16.zip,2,NCP,435,2023,153,2,"[2023, 2024]"
NCP-2.zip,2,NCP,106,1350,63,2,"[1349, 1350]"
CP-27.zip,1,CP,3753,5697,20,1,[5697]
NCP-15.zip,2,NCP,415,1983,63,2,"[1982, 1983]"
NCP-5.zip,2,NCP,191,1527,54,2,"[1526, 1527]"
CP-3.zip,1,CP,1142,3360,138,1,[3360]
NCP-19.zip,2,NCP,531,2220,139,2,"[2220, 2221]"
CP-14.zip,1,CP,1524,4153,58,3,"[4152, 4153, 4154]"
CP-19.zip,1,CP,1788,3196,49,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-14.zip,1,CP,1533,4175,42,3,"[4173, 4174, 4175]"
NCP-7.zip,2,NCP,244,1635,69,2,"[1634, 1635]"
Normal-7.zip,0,Normal,1848,303,95,1,[303]
CP-24.zip,1,CP,703,3065,120,1,[3065]
Normal-1.zip,0,Normal,1731,1024,36,1,[1024]
CP-11.zip,1,CP,1448,3969,62,2,"[3969, 3970]"
Normal-25.zip,0,Normal,3850,5362,180,1,[5362]
CP-7.zip,1,CP,13,3172,255,4,"[3170, 3171, 3172, 3173]"
CP-14.zip,1,CP,1533,4174,42,3,"[4173, 4174, 4175]"
NCP-3.zip,2,NCP,130,1405,140,2,"[1405, 1406]"
CP-23.zip,1,CP,647,3009,384,1,[3009]
Normal-24.zip,0,Normal,2637,147,36,1,[147]
NCP-28.zip,2,NCP,848,2367,283,2,"[2366, 2367]"
Normal-9.zip,0,Normal,1903,358,86,1,[358]
Normal-26.zip,0,Normal,3889,5408,65,2,"[5407, 5408]"
NCP-20.zip,2,NCP,567,2293,60,2,"[2292, 2293]"
CP-22.zip,1,CP,621,2983,174,1,[2983]
CP-10.zip,1,CP,1389,3835,51,3,"[3833, 3834, 3835]"
CP-9.zip,1,CP,1362,3773,61,2,"[3772, 3773]"
Normal-27.zip,0,Normal,3897,5426,72,4,"[5423, 5424, 5426, 5427]"
NCP-15.zip,2,NCP,428,2010,53,2,"[2009, 2010]"
Normal-3.zip,0,Normal,759,194,297,1,[194]
CP-13.zip,1,CP,1497,4092,68,3,"[4092, 4093, 4094]"
Normal-19.zip,0,Normal,2246,701,87,1,[701]
CP-3.zip,1,CP,1130,3348,166,1,[3348]
CP-14.zip,1,CP,1552,4222,62,2,"[4221, 4222]"
NCP-26.zip,2,NCP,3994,5518,52,1,[5518]
NCP-27.zip,2,NCP,328,1805,43,2,"[1804, 1805]"
NCP-13.zip,2,NCP,369,1889,138,2,"[1889, 1890]"
CP-20.zip,1,CP,2756,3287,56,1,[3287]
CP-22.zip,1,CP,638,3000,116,1,[3000]
CP-6.zip,1,CP,1250,3468,451,1,[3468]
CP-19.zip,1,CP,2437,2905,316,3,"[2905, 2906, 2907]"
Normal-16.zip,0,Normal,2130,585,88,1,[585]
NCP-14.zip,2,NCP,376,1904,142,2,"[1904, 1905]"
Normal-10.zip,0,Normal,1932,387,91,1,[387]
NCP-16.zip,2,NCP,453,2060,121,2,"[2060, 2061]"
NCP-5.zip,2,NCP,191,1526,128,2,"[1526, 1527]"
CP-12.zip,1,CP,1476,4034,53,2,"[4033, 4034]"
NCP-5.zip,2,NCP,175,1495,55,2,"[1494, 1495]"
NCP-21.zip,2,NCP,71,1275,53,2,"[1274, 1275]"
Normal-10.zip,0,Normal,1925,380,90,1,[380]
NCP-30.zip,2,NCP,994,2548,226,2,"[2547, 2548]"
CP-4.zip,1,CP,1192,3410,184,1,[3410]
Normal-23.zip,0,Normal,2631,141,38,1,[141]
NCP-9.zip,2,NCP,2684,2697,50,1,[2697]
CP-27.zip,1,CP,3757,5701,22,1,[5701]
NCP-3.zip,2,NCP,1288,2729,61,1,[2729]
NCP-18.zip,2,NCP,505,2166,157,2,"[2166, 2167]"
CP-8.zip,1,CP,1348,3741,59,3,"[3739, 3740, 3741]"
Normal-24.zip,0,Normal,2651,161,34,1,[161]
Normal-23.zip,0,Normal,2618,128,35,1,[128]
CP-8.zip,1,CP,1331,3702,62,2,"[3701, 3702]"
NCP-14.zip,2,NCP,398,1947,70,2,"[1946, 1947]"
NCP-4.zip,2,NCP,158,1460,122,2,"[1460, 1461]"
NCP-23.zip,2,NCP,89,1312,157,4,"[1311, 1312, 1313, 1315]"
Normal-2.zip,0,Normal,1759,1116,64,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
CP-17.zip,1,CP,1645,4333,26,1,[4333]
CP-10.zip,1,CP,1408,3880,59,3,"[3878, 3879, 3880]"
CP-30.zip,1,CP,3917,5541,62,1,[5541]
NCP-30.zip,2,NCP,933,2475,23,1,[2475]
CP-8.zip,1,CP,1344,3728,142,3,"[3728, 3729, 3730]"
NCP-17.zip,2,NCP,459,2072,133,2,"[2072, 2073]"
NCP-4.zip,2,NCP,150,1445,75,2,"[1444, 1445]"
CP-12.zip,1,CP,1455,3986,58,3,"[3985, 3986, 3987]"
Normal-27.zip,0,Normal,3897,5427,72,4,"[5423, 5424, 5426, 5427]"
CP-18.zip,1,CP,1772,3177,81,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
CP-27.zip,1,CP,3745,5689,23,1,[5689]
NCP-29.zip,2,NCP,920,2462,183,1,[2462]
NCP-9.zip,2,NCP,2688,2655,56,1,[2655]
Normal-8.zip,0,Normal,1887,342,94,1,[342]
CP-1.zip,1,CP,1076,3120,70,1,[3120]
Normal-15.zip,0,Normal,2100,555,94,1,[555]
NCP-11.zip,2,NCP,285,1716,62,2,"[1715, 1716]"
CP-8.zip,1,CP,1344,3729,59,3,"[3728, 3729, 3730]"
Normal-12.zip,0,Normal,2021,476,85,1,[476]
Normal-15.zip,0,Normal,2105,560,87,1,[560]
CP-9.zip,1,CP,1366,3784,57,3,"[3782, 3783, 3784]"
CP-18.zip,1,CP,1772,3181,75,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
NCP-15.zip,2,NCP,426,2006,58,2,"[2005, 2006]"
NCP-1.zip,2,NCP,1020,2586,45,1,[2586]
NCP-13.zip,2,NCP,356,1863,124,2,"[1863, 1864]"
Normal-8.zip,0,Normal,1865,320,99,1,[320]
NCP-30.zip,2,NCP,994,2547,226,2,"[2547, 2548]"
Normal-12.zip,0,Normal,2011,466,93,1,[466]
CP-18.zip,1,CP,1773,3185,67,4,"[3182, 3183, 3184, 3185]"
NCP-18.zip,2,NCP,505,2167,66,2,"[2166, 2167]"
CP-8.zip,1,CP,1328,3694,69,2,"[3693, 3694]"
NCP-2.zip,2,NCP,1278,2719,61,1,[2719]
CP-25.zip,1,CP,736,3098,494,1,[3098]
CP-24.zip,1,CP,7,3512,299,2,"[3511, 3512]"
Normal-27.zip,0,Normal,3913,5455,71,2,"[5454, 5455]"
NCP-6.zip,2,NCP,218,1580,139,2,"[1580, 1581]"
Normal-4.zip,0,Normal,795,230,120,1,[230]
NCP-6.zip,2,NCP,207,1559,46,2,"[1558, 1559]"
NCP-5.zip,2,NCP,189,1523,58,2,"[1522, 1523]"
Normal-22.zip,0,Normal,2314,769,84,1,[769]
CP-14.zip,1,CP,1541,4195,58,3,"[4194, 4195, 4196]"
Normal-26.zip,0,Normal,3866,5378,27,1,[5378]
NCP-30.zip,2,NCP,938,2481,78,2,"[2480, 2481]"
NCP-1.zip,2,NCP,1041,2612,126,1,[2612]
Normal-24.zip,0,Normal,2664,174,28,1,[174]
CP-14.zip,1,CP,1542,4198,54,3,"[4197, 4198, 4199]"
CP-8.zip,1,CP,1332,3704,41,2,"[3703, 3704]"
CP-14.zip,1,CP,1527,4160,142,3,"[4160, 4161, 4162]"
Normal-2.zip,0,Normal,1749,1071,66,4,"[1069, 1070, 1071, 1072]"
CP-7.zip,1,CP,13,3170,271,4,"[3170, 3171, 3172, 3173]"
CP-20.zip,1,CP,2769,3300,36,1,[3300]
Normal-11.zip,0,Normal,1973,428,90,1,[428]
CP-28.zip,1,CP,3783,5727,26,1,[5727]
NCP-12.zip,2,NCP,320,1788,139,2,"[1788, 1789]"
Normal-10.zip,0,Normal,1929,384,91,1,[384]
Normal-7.zip,0,Normal,1841,296,79,1,[296]
Normal-8.zip,0,Normal,1881,336,91,1,[336]
NCP-25.zip,2,NCP,3964,5475,41,1,[5475]
CP-12.zip,1,CP,1480,4043,54,2,"[4042, 4043]"
NCP-23.zip,2,NCP,91,1319,43,2,"[1318, 1319]"
NCP-11.zip,2,NCP,30,1193,56,1,[1193]
NCP-29.zip,2,NCP,924,2466,18,1,[2466]
CP-16.zip,1,CP,1614,4302,23,1,[4302]
Normal-14.zip,0,Normal,2061,516,88,1,[516]
NCP-27.zip,2,NCP,826,2339,54,1,[2339]
Normal-13.zip,0,Normal,2038,493,80,1,[493]
Normal-1.zip,0,Normal,1715,985,71,2,"[985, 986]"
CP-28.zip,1,CP,3782,5726,25,1,[5726]
CP-21.zip,1,CP,2777,3308,22,1,[3308]
CP-8.zip,1,CP,1328,3693,69,2,"[3693, 3694]"
NCP-17.zip,2,NCP,468,2091,154,2,"[2091, 2092]"
NCP-13.zip,2,NCP,36,1205,59,2,"[1204, 1205]"
Normal-12.zip,0,Normal,2000,455,93,1,[455]
CP-19.zip,1,CP,2448,2926,102,2,"[2925, 2926]"
NCP-10.zip,2,NCP,2728,2711,54,1,[2711]
NCP-8.zip,2,NCP,263,1672,177,2,"[1672, 1673]"
CP-30.zip,1,CP,3831,5775,25,1,[5775]
Normal-1.zip,0,Normal,1709,973,61,2,"[973, 974]"
CP-27.zip,1,CP,3751,5695,22,1,[5695]
CP-11.zip,1,CP,1453,3981,56,3,"[3979, 3980, 3981]"
CP-16.zip,1,CP,1617,4305,23,1,[4305]
CP-19.zip,1,CP,1788,3198,53,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-19.zip,1,CP,2447,2924,86,2,"[2923, 2924]"
NCP-20.zip,2,NCP,55,1243,150,2,"[1243, 1244]"
CP-1.zip,1,CP,1069,3108,77,4,"[3108, 3109, 3110, 3111]"
CP-29.zip,1,CP,3827,5771,26,1,[5771]
CP-16.zip,1,CP,1599,4287,17,1,[4287]
NCP-12.zip,2,NCP,34,1201,64,2,"[1200, 1201]"
NCP-19.zip,2,NCP,523,2202,148,2,"[2202, 2203]"
CP-19.zip,1,CP,2429,2890,100,1,[2890]
NCP-9.zip,2,NCP,2695,2661,45,1,[2661]
Normal-1.zip,0,Normal,1730,1022,59,5,"[1019, 1020, 1021, 1022, 1023]"
CP-24.zip,1,CP,7,3511,298,2,"[3511, 3512]"
NCP-27.zip,2,NCP,1045,2617,30,1,[2617]
Normal-15.zip,0,Normal,2088,543,75,1,[543]
Normal-25.zip,0,Normal,3853,5365,205,1,[5365]
Normal-14.zip,0,Normal,2076,531,77,1,[531]
NCP-22.zip,2,NCP,84,1301,127,2,"[1301, 1302]"
CP-18.zip,1,CP,1660,4348,23,1,[4348]
NCP-26.zip,2,NCP,3980,5487,38,1,[5487]
CP-20.zip,1,CP,2758,3289,35,1,[3289]
Normal-6.zip,0,Normal,1808,263,95,1,[263]
Normal-2.zip,0,Normal,1739,1044,56,3,"[1042, 1043, 1044]"
CP-1.zip,1,CP,1068,3107,62,1,[3107]
Normal-14.zip,0,Normal,2083,538,87,1,[538]
CP-12.zip,1,CP,1484,4054,46,3,"[4053, 4054, 4055]"
CP-29.zip,1,CP,3811,5755,23,1,[5755]
CP-14.zip,1,CP,1548,4213,51,2,"[4213, 4214]"
NCP-20.zip,2,NCP,561,2281,58,2,"[2280, 2281]"
CP-14.zip,1,CP,1544,4204,51,3,"[4203, 4204, 4205]"
NCP-27.zip,2,NCP,1062,2639,176,1,[2639]
CP-25.zip,1,CP,735,3097,110,1,[3097]
CP-2.zip,1,CP,1115,3333,180,1,[3333]
CP-27.zip,1,CP,3756,5700,20,1,[5700]
Normal-5.zip,0,Normal,813,248,136,1,[248]
Normal-19.zip,0,Normal,2221,676,103,1,[676]
Normal-27.zip,0,Normal,3902,5434,73,1,[5434]
CP-11.zip,1,CP,1437,3943,57,2,"[3942, 3943]"
NCP-2.zip,2,NCP,126,1398,64,2,"[1396, 1398]"
Normal-20.zip,0,Normal,2265,720,87,1,[720]
CP-16.zip,1,CP,1589,4277,23,1,[4277]
Normal-16.zip,0,Normal,2149,604,85,1,[604]
NCP-19.zip,2,NCP,523,2203,62,2,"[2202, 2203]"
CP-12.zip,1,CP,1455,3985,138,3,"[3985, 3986, 3987]"
CP-30.zip,1,CP,4040,5589,38,1,[5589]
NCP-1.zip,2,NCP,1049,2622,205,1,[2622]
Normal-1.zip,0,Normal,1674,811,74,2,"[810, 811]"
NCP-19.zip,2,NCP,539,2236,55,2,"[2235, 2236]"
Normal-1.zip,0,Normal,1668,779,60,4,"[778, 779, 780, 781]"
NCP-19.zip,2,NCP,542,2241,130,2,"[2241, 2242]"
CP-25.zip,1,CP,739,3101,112,1,[3101]
CP-9.zip,1,CP,1367,3785,140,3,"[3785, 3786, 3787]"
CP-14.zip,1,CP,1549,4215,61,2,"[4215, 4216]"
NCP-19.zip,2,NCP,53,1239,144,2,"[1239, 1240]"
Normal-1.zip,0,Normal,1730,1021,294,5,"[1019, 1020, 1021, 1022, 1023]"
NCP-29.zip,2,NCP,918,2460,213,1,[2460]
NCP-23.zip,2,NCP,89,1311,138,4,"[1311, 1312, 1313, 1315]"
Normal-2.zip,0,Normal,1759,1119,66,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
CP-9.zip,1,CP,1359,3765,46,3,"[3764, 3765, 3766]"
NCP-25.zip,2,NCP,3706,5533,54,1,[5533]
CP-8.zip,1,CP,1325,3686,65,2,"[3686, 3687]"
NCP-19.zip,2,NCP,545,2248,57,2,"[2247, 2248]"
NCP-15.zip,2,NCP,418,1989,143,2,"[1989, 1990]"
CP-7.zip,1,CP,1261,3479,198,1,[3479]
NCP-29.zip,2,NCP,895,2435,143,2,"[2435, 2436]"
CP-12.zip,1,CP,1483,4051,62,3,"[4050, 4051, 4052]"
CP-12.zip,1,CP,1460,3999,60,2,"[3998, 3999]"
CP-12.zip,1,CP,1456,3988,122,3,"[3988, 3989, 3990]"
Normal-12.zip,0,Normal,2014,469,98,1,[469]
CP-14.zip,1,CP,1542,4197,180,3,"[4197, 4198, 4199]"
Normal-2.zip,0,Normal,1755,1098,73,4,"[1097, 1098, 1099, 1100]"
NCP-14.zip,2,NCP,382,1917,58,2,"[1916, 1917]"
NCP-4.zip,2,NCP,153,1451,58,2,"[1450, 1451]"
Normal-27.zip,0,Normal,3913,5454,68,2,"[5454, 5455]"
Normal-1.zip,0,Normal,1674,810,74,2,"[810, 811]"
Normal-2.zip,0,Normal,1736,1036,55,5,"[1032, 1033, 1034, 1035, 1036]"
Normal-1.zip,0,Normal,1682,852,81,6,"[847, 848, 852, 853, 857, 858]"
Normal-4.zip,0,Normal,796,231,287,1,[231]
NCP-11.zip,2,NCP,292,1729,138,2,"[1729, 1730]"
NCP-12.zip,2,NCP,327,1803,55,2,"[1802, 1803]"
Normal-25.zip,0,Normal,3712,5342,28,1,[5342]
CP-4.zip,1,CP,1182,3400,130,1,[3400]
CP-2.zip,1,CP,1113,3331,197,1,[3331]
NCP-22.zip,2,NCP,888,2426,55,1,[2426]
Normal-25.zip,0,Normal,3846,5358,209,1,[5358]
CP-9.zip,1,CP,1362,3772,61,2,"[3772, 3773]"
CP-4.zip,1,CP,1193,3411,190,1,[3411]
Normal-5.zip,0,Normal,802,237,298,1,[237]
CP-23.zip,1,CP,655,3017,511,1,[3017]
NCP-13.zip,2,NCP,360,1871,121,2,"[1871, 1872]"
NCP-30.zip,2,NCP,977,2521,257,1,[2521]
NCP-26.zip,2,NCP,3990,5514,51,1,[5514]
Normal-3.zip,0,Normal,768,203,130,1,[203]
Normal-1.zip,0,Normal,1713,980,71,2,"[980, 981]"
CP-26.zip,1,CP,3732,5672,53,2,"[5671, 5672]"
CP-20.zip,1,CP,2762,3293,33,1,[3293]
Normal-20.zip,0,Normal,2267,722,100,1,[722]
NCP-5.zip,2,NCP,189,1522,139,2,"[1522, 1523]"
NCP-28.zip,2,NCP,848,2366,57,2,"[2366, 2367]"
NCP-6.zip,2,NCP,215,1575,65,2,"[1574, 1575]"
Normal-27.zip,0,Normal,3905,5438,58,2,"[5437, 5438]"
CP-4.zip,1,CP,1163,3381,239,1,[3381]
CP-18.zip,1,CP,1665,4353,25,1,[4353]
Normal-25.zip,0,Normal,3842,5354,189,1,[5354]
Normal-22.zip,0,Normal,2583,93,46,1,[93]
NCP-11.zip,2,NCP,308,1763,116,2,"[1763, 1764]"
CP-4.zip,1,CP,1180,3398,150,1,[3398]
CP-7.zip,1,CP,1316,3668,63,3,"[3667, 3668, 3669]"
CP-5.zip,1,CP,1213,3431,159,1,[3431]
Normal-10.zip,0,Normal,1947,402,89,1,[402]
CP-24.zip,1,CP,698,3060,124,1,[3060]
CP-15.zip,1,CP,1562,4243,55,2,"[4243, 4244]"
NCP-25.zip,2,NCP,3962,5473,58,1,[5473]
CP-18.zip,1,CP,1772,3180,75,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
Normal-13.zip,0,Normal,2029,484,94,1,[484]
NCP-16.zip,2,NCP,443,2041,50,2,"[2040, 2041]"
NCP-24.zip,2,NCP,984,2529,259,2,"[2529, 2530]"
CP-18.zip,1,CP,1773,3183,61,4,"[3182, 3183, 3184, 3185]"
CP-5.zip,1,CP,1194,3412,158,1,[3412]
NCP-14.zip,2,NCP,39,1211,58,2,"[1210, 1211]"
CP-13.zip,1,CP,15,3174,98,1,[3174]
CP-28.zip,1,CP,3775,5719,29,1,[5719]
NCP-17.zip,2,NCP,477,2110,58,2,"[2109, 2110]"
Normal-16.zip,0,Normal,2133,588,73,1,[588]
NCP-4.zip,2,NCP,150,1444,181,2,"[1444, 1445]"
CP-4.zip,1,CP,1188,3406,308,1,[3406]
NCP-8.zip,2,NCP,251,1649,55,2,"[1648, 1649]"
CP-1.zip,1,CP,1094,3312,329,1,[3312]
NCP-12.zip,2,NCP,327,1802,130,2,"[1802, 1803]"
Normal-7.zip,0,Normal,1830,285,84,1,[285]
CP-12.zip,1,CP,1481,4045,58,3,"[4044, 4045, 4046]"
NCP-19.zip,2,NCP,52,1238,57,2,"[1237, 1238]"
NCP-20.zip,2,NCP,557,2271,132,2,"[2271, 2272]"
NCP-14.zip,2,NCP,398,1946,167,2,"[1946, 1947]"
NCP-8.zip,2,NCP,260,1667,68,2,"[1666, 1667]"
Normal-2.zip,0,Normal,1754,1094,73,4,"[1093, 1094, 1095, 1096]"
Normal-2.zip,0,Normal,1736,1032,124,5,"[1032, 1033, 1034, 1035, 1036]"
NCP-11.zip,2,NCP,292,1730,58,2,"[1729, 1730]"
CP-25.zip,1,CP,725,3087,80,1,[3087]
CP-15.zip,1,CP,1558,4234,62,3,"[4234, 4235, 4236]"
Normal-17.zip,0,Normal,2161,616,99,1,[616]
NCP-23.zip,2,NCP,970,2513,62,1,[2513]
NCP-10.zip,2,NCP,277,1700,152,2,"[1700, 1701]"
NCP-14.zip,2,NCP,395,1941,71,2,"[1940, 1941]"
Normal-2.zip,0,Normal,1755,1100,71,4,"[1097, 1098, 1099, 1100]"
CP-26.zip,1,CP,3718,5648,254,2,"[5647, 5648]"
Normal-25.zip,0,Normal,3841,5353,188,1,[5353]
Normal-23.zip,0,Normal,2621,131,41,1,[131]
NCP-20.zip,2,NCP,555,2267,133,2,"[2267, 2268]"
NCP-7.zip,2,NCP,244,1634,165,2,"[1634, 1635]"
Normal-6.zip,0,Normal,1821,276,102,1,[276]
NCP-17.zip,2,NCP,459,2073,56,2,"[2072, 2073]"
NCP-2.zip,2,NCP,124,1390,58,2,"[1389, 1390]"
Normal-18.zip,0,Normal,2185,640,100,1,[640]
NCP-5.zip,2,NCP,193,1530,124,2,"[1530, 1531]"
NCP-8.zip,2,NCP,253,1652,139,2,"[1652, 1653]"
NCP-23.zip,2,NCP,89,1313,58,4,"[1311, 1312, 1313, 1315]"
CP-5.zip,1,CP,1216,3434,307,1,[3434]
NCP-30.zip,2,NCP,979,2523,345,1,[2523]
NCP-23.zip,2,NCP,97,1331,41,2,"[1330, 1331]"
NCP-20.zip,2,NCP,555,2268,56,2,"[2267, 2268]"
Normal-16.zip,0,Normal,2126,581,84,1,[581]
NCP-18.zip,2,NCP,488,2133,58,2,"[2131, 2133]"
NCP-10.zip,2,NCP,28,1189,61,2,"[1188, 1189]"
NCP-15.zip,2,NCP,41,1214,151,2,"[1214, 1215]"
NCP-12.zip,2,NCP,32,1196,145,2,"[1196, 1197]"
CP-26.zip,1,CP,3722,5656,50,2,"[5656, 5657]"
CP-15.zip,1,CP,1573,4261,22,1,[4261]
NCP-27.zip,2,NCP,1028,2598,147,1,[2598]
Normal-18.zip,0,Normal,2197,652,105,1,[652]
Normal-16.zip,0,Normal,2152,607,66,1,[607]
NCP-14.zip,2,NCP,380,1913,62,2,"[1912, 1913]"
Normal-15.zip,0,Normal,2093,548,72,1,[548]
NCP-3.zip,2,NCP,1299,2740,63,1,[2740]
CP-8.zip,1,CP,1348,3740,59,3,"[3739, 3740, 3741]"
Normal-6.zip,0,Normal,1822,277,101,1,[277]
Normal-4.zip,0,Normal,800,235,116,1,[235]
CP-10.zip,1,CP,1386,3827,66,2,"[3827, 3828]"
Normal-12.zip,0,Normal,2004,459,106,1,[459]
NCP-25.zip,2,NCP,3957,5470,47,1,[5470]
CP-5.zip,1,CP,1204,3422,294,1,[3422]
CP-11.zip,1,CP,1420,3905,59,2,"[3905, 3906]"
CP-17.zip,1,CP,1649,4337,23,1,[4337]
CP-28.zip,1,CP,3769,5713,18,1,[5713]
Normal-8.zip,0,Normal,1868,323,91,1,[323]
CP-1.zip,1,CP,1087,3219,400,1,[3219]
CP-26.zip,1,CP,3640,5599,295,1,[5599]
NCP-7.zip,2,NCP,248,1642,139,2,"[1642, 1643]"
NCP-2.zip,2,NCP,125,1395,55,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-25.zip,2,NCP,3941,5538,38,1,[5538]
CP-15.zip,1,CP,1558,4236,62,3,"[4234, 4235, 4236]"
NCP-16.zip,2,NCP,443,2040,117,2,"[2040, 2041]"
Normal-15.zip,0,Normal,2102,557,100,1,[557]
Normal-2.zip,0,Normal,1755,1097,73,4,"[1097, 1098, 1099, 1100]"
Normal-9.zip,0,Normal,1924,379,98,1,[379]
CP-13.zip,1,CP,1517,4136,64,2,"[4136, 4137]"
CP-1.zip,1,CP,1,3146,70,5,"[3143, 3144, 3145, 3146, 3147]"
Normal-19.zip,0,Normal,2226,681,99,1,[681]
CP-13.zip,1,CP,1517,4137,64,2,"[4136, 4137]"
NCP-23.zip,2,NCP,95,1326,165,2,"[1326, 1327]"
NCP-19.zip,2,NCP,538,2234,60,2,"[2233, 2234]"
CP-6.zip,1,CP,1253,3471,130,1,[3471]
NCP-7.zip,2,NCP,242,1629,133,2,"[1629, 1630]"
CP-8.zip,1,CP,1337,3714,60,2,"[3714, 3715]"
NCP-23.zip,2,NCP,912,2454,373,1,[2454]
Normal-23.zip,0,Normal,2622,132,38,1,[132]
Normal-8.zip,0,Normal,1871,326,73,1,[326]
NCP-5.zip,2,NCP,193,1531,52,2,"[1530, 1531]"
Normal-24.zip,0,Normal,2646,156,41,1,[156]
CP-14.zip,1,CP,1538,4185,159,3,"[4185, 4186, 4187]"
CP-23.zip,1,CP,667,3029,226,1,[3029]
CP-1.zip,1,CP,1,3147,70,5,"[3143, 3144, 3145, 3146, 3147]"
NCP-27.zip,2,NCP,1006,2566,42,2,"[2566, 2567]"
Normal-1.zip,0,Normal,1711,977,63,2,"[977, 978]"
NCP-14.zip,2,NCP,374,1899,139,2,"[1899, 1900]"
NCP-16.zip,2,NCP,457,2069,57,2,"[2068, 2069]"
CP-22.zip,1,CP,634,2996,680,1,[2996]
NCP-23.zip,2,NCP,905,2447,26,1,[2447]
Normal-2.zip,0,Normal,1759,1118,65,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-11.zip,2,NCP,290,1725,103,2,"[1725, 1726]"
NCP-21.zip,2,NCP,77,1288,53,2,"[1287, 1288]"
CP-30.zip,1,CP,4018,5567,33,1,[5567]
CP-12.zip,1,CP,1483,4052,62,3,"[4050, 4051, 4052]"
CP-24.zip,1,CP,692,3054,74,1,[3054]
NCP-6.zip,2,NCP,204,1552,139,2,"[1552, 1553]"
NCP-7.zip,2,NCP,24,1179,146,2,"[1179, 1180]"
CP-6.zip,1,CP,1251,3469,133,1,[3469]
Normal-1.zip,0,Normal,1682,857,70,6,"[847, 848, 852, 853, 857, 858]"
NCP-10.zip,2,NCP,2712,2705,42,1,[2705]
CP-2.zip,1,CP,1100,3318,201,1,[3318]
Normal-1.zip,0,Normal,1671,795,67,3,"[793, 794, 795]"
NCP-17.zip,2,NCP,461,2077,67,2,"[2076, 2077]"
CP-15.zip,1,CP,1564,4249,51,2,"[4248, 4249]"
NCP-4.zip,2,NCP,153,1450,137,2,"[1450, 1451]"
CP-4.zip,1,CP,1166,3384,202,1,[3384]
NCP-28.zip,2,NCP,851,2370,145,1,[2370]
NCP-23.zip,2,NCP,95,1327,69,2,"[1326, 1327]"
Normal-18.zip,0,Normal,2196,651,95,1,[651]
CP-27.zip,1,CP,3749,5693,20,1,[5693]
Normal-6.zip,0,Normal,1797,252,85,1,[252]
CP-14.zip,1,CP,1544,4203,122,3,"[4203, 4204, 4205]"
CP-8.zip,1,CP,1345,3731,55,2,"[3731, 3732]"
NCP-8.zip,2,NCP,2678,2649,55,1,[2649]
NCP-23.zip,2,NCP,89,1315,66,4,"[1311, 1312, 1313, 1315]"
Normal-17.zip,0,Normal,2167,622,76,1,[622]
CP-22.zip,1,CP,631,2993,130,1,[2993]
CP-16.zip,1,CP,1618,4306,26,1,[4306]
NCP-17.zip,2,NCP,471,2097,139,2,"[2097, 2098]"
NCP-15.zip,2,NCP,416,1986,58,2,"[1984, 1986]"
CP-10.zip,1,CP,1389,3833,121,3,"[3833, 3834, 3835]"
CP-24.zip,1,CP,696,3058,74,1,[3058]
NCP-26.zip,2,NCP,3996,5494,37,1,[5494]
CP-15.zip,1,CP,1565,4251,66,2,"[4250, 4251]"
NCP-7.zip,2,NCP,248,1643,58,2,"[1642, 1643]"
NCP-30.zip,2,NCP,932,2474,20,1,[2474]
CP-8.zip,1,CP,1332,3703,41,2,"[3703, 3704]"
Normal-2.zip,0,Normal,1754,1093,73,4,"[1093, 1094, 1095, 1096]"
NCP-3.zip,2,NCP,131,1408,50,2,"[1407, 1408]"
NCP-13.zip,2,NCP,37,1206,147,2,"[1206, 1207]"
NCP-7.zip,2,NCP,242,1630,56,2,"[1629, 1630]"
CP-26.zip,1,CP,3643,5603,257,2,"[5602, 5603]"
Normal-24.zip,0,Normal,2639,149,28,1,[149]
Normal-13.zip,0,Normal,2037,492,82,1,[492]
CP-16.zip,1,CP,1610,4298,22,1,[4298]
NCP-15.zip,2,NCP,415,1982,149,2,"[1982, 1983]"
NCP-2.zip,2,NCP,125,1394,55,5,"[1391, 1392, 1393, 1394, 1395]"
Normal-23.zip,0,Normal,2616,126,39,1,[126]
CP-26.zip,1,CP,3635,5594,291,1,[5594]
Normal-18.zip,0,Normal,2211,666,85,1,[666]
NCP-17.zip,2,NCP,481,2117,163,2,"[2117, 2118]"
NCP-13.zip,2,NCP,37,1207,62,2,"[1206, 1207]"
Normal-2.zip,0,Normal,1749,1070,61,4,"[1069, 1070, 1071, 1072]"
NCP-29.zip,2,NCP,927,2469,20,1,[2469]
CP-6.zip,1,CP,1226,3444,190,1,[3444]
NCP-14.zip,2,NCP,394,1938,147,2,"[1938, 1939]"
CP-19.zip,1,CP,1791,3212,71,4,"[3210, 3211, 3212, 3213]"
CP-8.zip,1,CP,1334,3708,56,2,"[3707, 3708]"
NCP-12.zip,2,NCP,324,1796,120,2,"[1796, 1797]"
CP-30.zip,1,CP,3929,5626,71,2,"[5626, 5627]"
Normal-7.zip,0,Normal,1832,287,91,1,[287]
Normal-1.zip,0,Normal,1713,981,71,2,"[980, 981]"
NCP-2.zip,2,NCP,111,1363,133,2,"[1363, 1364]"
Normal-3.zip,0,Normal,1764,1144,66,4,"[1143, 1144, 1145, 1146]"
CP-15.zip,1,CP,1560,4239,63,2,"[4239, 4240]"
NCP-22.zip,2,NCP,84,1302,54,2,"[1301, 1302]"
Normal-2.zip,0,Normal,1744,1059,71,2,"[1058, 1059]"
CP-21.zip,1,CP,590,2952,86,1,[2952]
Normal-9.zip,0,Normal,1901,356,83,1,[356]
NCP-17.zip,2,NCP,461,2076,160,2,"[2076, 2077]"
CP-24.zip,1,CP,683,3045,138,1,[3045]
Normal-11.zip,0,Normal,1983,438,105,1,[438]
NCP-14.zip,2,NCP,39,1210,139,2,"[1210, 1211]"
NCP-18.zip,2,NCP,494,2144,156,2,"[2144, 2145]"
NCP-14.zip,2,NCP,388,1927,68,2,"[1926, 1927]"
NCP-28.zip,2,NCP,853,2373,664,1,[2373]
Normal-22.zip,0,Normal,2588,98,33,1,[98]
NCP-17.zip,2,NCP,46,1225,124,2,"[1225, 1226]"
NCP-2.zip,2,NCP,126,1396,152,2,"[1396, 1398]"
NCP-15.zip,2,NCP,418,1990,58,2,"[1989, 1990]"
Normal-3.zip,0,Normal,765,200,136,1,[200]
CP-9.zip,1,CP,1370,3792,62,2,"[3792, 3793]"
CP-13.zip,1,CP,1490,4071,166,3,"[4071, 4072, 4073]"
CP-5.zip,1,CP,1212,3430,187,1,[3430]
NCP-29.zip,2,NCP,894,2434,16,1,[2434]
CP-19.zip,1,CP,1788,3199,58,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-12.zip,1,CP,1466,4011,52,2,"[4011, 4012]"
CP-1.zip,1,CP,1088,3223,50,4,"[3220, 3221, 3222, 3223]"
NCP-25.zip,2,NCP,3947,5503,41,1,[5503]
CP-30.zip,1,CP,3931,5632,143,4,"[5630, 5631, 5632, 5633]"
NCP-2.zip,2,NCP,124,1389,139,2,"[1389, 1390]"
CP-7.zip,1,CP,1307,3645,53,4,"[3645, 3646, 3647, 3648]"
NCP-27.zip,2,NCP,823,2334,183,1,[2334]
Normal-1.zip,0,Normal,1728,1016,72,4,"[1013, 1014, 1015, 1016]"
Normal-2.zip,0,Normal,1754,1096,69,4,"[1093, 1094, 1095, 1096]"
CP-12.zip,1,CP,1473,4026,51,3,"[4026, 4027, 4028]"
Normal-3.zip,0,Normal,1764,1146,62,4,"[1143, 1144, 1145, 1146]"
CP-2.zip,1,CP,1103,3321,180,1,[3321]
CP-4.zip,1,CP,1181,3399,238,1,[3399]
CP-19.zip,1,CP,2436,2904,138,1,[2904]
CP-28.zip,1,CP,3795,5739,23,1,[5739]
CP-29.zip,1,CP,3805,5749,20,1,[5749]
NCP-3.zip,2,NCP,1300,2741,60,1,[2741]
NCP-23.zip,2,NCP,898,2439,48,1,[2439]
Normal-23.zip,0,Normal,2612,122,31,1,[122]
NCP-7.zip,2,NCP,24,1180,61,2,"[1179, 1180]"
Normal-6.zip,0,Normal,1807,262,95,1,[262]
NCP-30.zip,2,NCP,996,2551,189,2,"[2551, 2552]"
Normal-9.zip,0,Normal,1893,348,82,1,[348]
NCP-11.zip,2,NCP,290,1726,44,2,"[1725, 1726]"
NCP-21.zip,2,NCP,80,1293,129,2,"[1293, 1294]"
Normal-24.zip,0,Normal,2655,165,37,1,[165]
NCP-30.zip,2,NCP,996,2552,218,2,"[2551, 2552]"
CP-18.zip,1,CP,1653,4341,29,1,[4341]
NCP-5.zip,2,NCP,187,1518,136,2,"[1518, 1519]"
NCP-26.zip,2,NCP,3993,5517,39,1,[5517]
NCP-10.zip,2,NCP,273,1692,128,2,"[1692, 1693]"
NCP-5.zip,2,NCP,179,1502,122,2,"[1503, 1502]"
Normal-26.zip,0,Normal,3887,5400,67,3,"[5400, 5401, 5404]"
NCP-7.zip,2,NCP,234,1613,139,2,"[1613, 1614]"
Normal-1.zip,0,Normal,1725,1006,60,1,[1006]
NCP-15.zip,2,NCP,419,1992,55,2,"[1991, 1992]"
CP-14.zip,1,CP,1523,4151,65,2,"[4150, 4151]"
NCP-23.zip,2,NCP,938,2480,195,2,"[2480, 2481]"
NCP-13.zip,2,NCP,342,1835,149,2,"[1835, 1836]"
CP-24.zip,1,CP,680,3042,86,1,[3042]
NCP-14.zip,2,NCP,394,1939,62,2,"[1938, 1939]"
NCP-11.zip,2,NCP,288,1722,49,2,"[1721, 1722]"
CP-14.zip,1,CP,1527,4162,58,3,"[4160, 4161, 4162]"
CP-6.zip,1,CP,1241,3459,132,1,[3459]
CP-10.zip,1,CP,1408,3878,198,3,"[3878, 3879, 3880]"
NCP-14.zip,2,NCP,397,1945,66,2,"[1944, 1945]"
CP-1.zip,1,CP,1,3145,248,5,"[3143, 3144, 3145, 3146, 3147]"
Normal-15.zip,0,Normal,2111,566,95,1,[566]
Normal-12.zip,0,Normal,2007,462,85,1,[462]
NCP-6.zip,2,NCP,222,1589,52,2,"[1588, 1589]"
Normal-25.zip,0,Normal,3856,5368,220,1,[5368]
CP-6.zip,1,CP,1245,3463,306,1,[3463]
CP-9.zip,1,CP,1380,3814,56,1,[3814]
CP-11.zip,1,CP,1442,3955,58,3,"[3954, 3955, 3956]"
Normal-26.zip,0,Normal,3889,5407,68,2,"[5407, 5408]"
Normal-4.zip,0,Normal,773,208,321,1,[208]
CP-23.zip,1,CP,671,3033,448,1,[3033]
CP-23.zip,1,CP,674,3036,126,1,[3036]
CP-19.zip,1,CP,1788,3200,54,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
NCP-12.zip,2,NCP,328,1804,112,2,"[1804, 1805]"
Normal-22.zip,0,Normal,2581,91,44,1,[91]
CP-7.zip,1,CP,1316,3669,62,3,"[3667, 3668, 3669]"
NCP-1.zip,2,NCP,1046,2618,70,1,[2618]
NCP-16.zip,2,NCP,456,2067,57,2,"[2066, 2067]"
Normal-1.zip,0,Normal,1730,1020,63,5,"[1019, 1020, 1021, 1022, 1023]"
NCP-17.zip,2,NCP,468,2092,65,2,"[2091, 2092]"
NCP-7.zip,2,NCP,2488,2688,40,1,[2688]
CP-10.zip,1,CP,1396,3852,58,3,"[3851, 3852, 3853]"
NCP-16.zip,2,NCP,447,2049,58,2,"[2048, 2049]"
Normal-8.zip,0,Normal,1864,319,88,1,[319]
CP-15.zip,1,CP,1560,4240,63,2,"[4239, 4240]"
CP-12.zip,1,CP,1484,4055,46,3,"[4053, 4054, 4055]"
Normal-1.zip,0,Normal,1682,853,81,6,"[847, 848, 852, 853, 857, 858]"
Normal-22.zip,0,Normal,2580,90,37,1,[90]
CP-2.zip,1,CP,1128,3346,196,1,[3346]
NCP-7.zip,2,NCP,240,1625,158,2,"[1625, 1626]"
Normal-15.zip,0,Normal,2086,541,91,1,[541]
Normal-7.zip,0,Normal,1837,292,94,1,[292]
CP-1.zip,1,CP,1069,3111,77,4,"[3108, 3109, 3110, 3111]"
CP-14.zip,1,CP,1549,4216,61,2,"[4215, 4216]"
Normal-11.zip,0,Normal,1970,425,88,1,[425]
NCP-13.zip,2,NCP,342,1836,61,2,"[1835, 1836]"
CP-25.zip,1,CP,728,3090,86,1,[3090]
NCP-21.zip,2,NCP,68,1268,115,2,"[1268, 1269]"
CP-8.zip,1,CP,1342,3725,58,3,"[3723, 3724, 3725]"
CP-12.zip,1,CP,1481,4046,58,3,"[4044, 4045, 4046]"
CP-5.zip,1,CP,1210,3428,156,1,[3428]
NCP-3.zip,2,NCP,136,1417,53,2,"[1416, 1417]"
NCP-2.zip,2,NCP,125,1393,54,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-23.zip,2,NCP,97,1330,97,2,"[1330, 1331]"
NCP-1.zip,2,NCP,1021,2588,209,4,"[2587, 2588, 2589, 2590]"
NCP-12.zip,2,NCP,317,1782,50,2,"[1781, 1782]"
NCP-14.zip,2,NCP,388,1926,162,2,"[1926, 1927]"
CP-26.zip,1,CP,3641,5600,300,1,[5600]
Normal-3.zip,0,Normal,760,195,117,1,[195]
NCP-12.zip,2,NCP,325,1798,117,2,"[1798, 1799]"
Normal-1.zip,0,Normal,1671,793,72,3,"[793, 794, 795]"
Normal-5.zip,0,Normal,807,242,132,1,[242]
CP-19.zip,1,CP,1791,3211,55,4,"[3210, 3211, 3212, 3213]"
Normal-4.zip,0,Normal,792,227,108,1,[227]
CP-15.zip,1,CP,1564,4248,51,2,"[4248, 4249]"
NCP-12.zip,2,NCP,324,1797,51,2,"[1796, 1797]"
CP-13.zip,1,CP,1514,4130,61,2,"[4129, 4130]"
CP-30.zip,1,CP,4013,5562,29,1,[5562]
CP-7.zip,1,CP,13,3173,255,4,"[3170, 3171, 3172, 3173]"
CP-5.zip,1,CP,1214,3432,282,1,[3432]
Normal-8.zip,0,Normal,1878,333,88,1,[333]
Normal-21.zip,0,Normal,2297,752,83,1,[752]
CP-19.zip,1,CP,1789,3205,59,4,"[3204, 3205, 3206, 3207]"
CP-4.zip,1,CP,1176,3394,161,1,[3394]
CP-10.zip,1,CP,1397,3855,60,2,"[3854, 3855]"
CP-16.zip,1,CP,1594,4282,26,1,[4282]
CP-1.zip,1,CP,1077,3121,74,2,"[3121, 3122]"
CP-29.zip,1,CP,3819,5763,31,1,[5763]
CP-12.zip,1,CP,1468,4016,54,3,"[4015, 4016, 4017]"
CP-3.zip,1,CP,1139,3357,332,1,[3357]
Normal-14.zip,0,Normal,2070,525,104,1,[525]
Normal-1.zip,0,Normal,1672,798,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
CP-11.zip,1,CP,1435,3939,46,2,"[3938, 3939]"
CP-30.zip,1,CP,4019,5568,38,1,[5568]
CP-18.zip,1,CP,1777,3540,67,5,"[3540, 3541, 3542, 3543, 3544]"
CP-23.zip,1,CP,666,3028,192,1,[3028]
Normal-1.zip,0,Normal,1703,959,70,2,"[959, 960]"
CP-3.zip,1,CP,1133,3351,213,1,[3351]
CP-13.zip,1,CP,1504,4107,64,1,[4107]
Normal-3.zip,0,Normal,745,180,105,1,[180]
Normal-26.zip,0,Normal,3869,5381,27,1,[5381]
CP-18.zip,1,CP,1774,3528,58,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
Normal-21.zip,0,Normal,2301,756,88,1,[756]
CP-18.zip,1,CP,1771,3519,51,4,"[3518, 3519, 3520, 3521]"
CP-22.zip,1,CP,643,3005,126,1,[3005]
CP-26.zip,1,CP,3723,5658,43,1,[5658]
Normal-8.zip,0,Normal,1884,339,82,1,[339]
CP-15.zip,1,CP,1586,4274,23,1,[4274]
CP-8.zip,1,CP,1349,3743,58,3,"[3742, 3743, 3744]"
Normal-22.zip,0,Normal,2586,96,30,1,[96]
Normal-4.zip,0,Normal,785,220,292,1,[220]
CP-19.zip,1,CP,2428,2887,124,1,[2887]
NCP-13.zip,2,NCP,352,1856,58,2,"[1855, 1856]"
NCP-2.zip,2,NCP,109,1355,143,2,"[1355, 1356]"
CP-13.zip,1,CP,1493,4080,125,3,"[4080, 4081, 4082]"
CP-4.zip,1,CP,1191,3409,220,1,[3409]
CP-17.zip,1,CP,1642,4330,25,1,[4330]
CP-7.zip,1,CP,1304,3635,232,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-27.zip,2,NCP,1058,2635,46,1,[2635]
Normal-14.zip,0,Normal,2071,526,103,1,[526]
CP-26.zip,1,CP,3719,5650,55,3,"[5649, 5650, 5651]"
Normal-24.zip,0,Normal,2663,173,48,1,[173]
NCP-3.zip,2,NCP,1298,2739,60,1,[2739]
CP-19.zip,1,CP,2430,2891,102,2,"[2891, 2892]"
CP-12.zip,1,CP,1458,3993,69,3,"[3992, 3993, 3994]"
Normal-1.zip,0,Normal,1677,823,64,4,"[823, 824, 825, 826]"
CP-12.zip,1,CP,1469,4018,47,2,"[4018, 4019]"
CP-7.zip,1,CP,1268,3486,336,1,[3486]
Normal-18.zip,0,Normal,2203,658,75,1,[658]
CP-21.zip,1,CP,593,2955,100,1,[2955]
Normal-16.zip,0,Normal,2143,598,87,1,[598]
NCP-20.zip,2,NCP,552,2261,146,2,"[2261, 2262]"
NCP-11.zip,2,NCP,309,1766,69,2,"[1766, 1765]"
NCP-19.zip,2,NCP,520,2197,55,2,"[2196, 2197]"
CP-14.zip,1,CP,1550,4217,64,2,"[4217, 4218]"
NCP-26.zip,2,NCP,3976,5484,32,1,[5484]
NCP-31.zip,2,NCP,998,2555,44,1,[2555]
NCP-2.zip,2,NCP,107,1351,146,2,"[1351, 1352]"
Normal-16.zip,0,Normal,2136,591,83,1,[591]
CP-12.zip,1,CP,1463,4006,49,2,"[4005, 4006]"
NCP-4.zip,2,NCP,156,1457,58,2,"[1456, 1457]"
NCP-1.zip,2,NCP,1002,2561,58,1,[2561]
Normal-1.zip,0,Normal,1672,801,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
Normal-14.zip,0,Normal,2078,533,73,1,[533]
NCP-5.zip,2,NCP,185,1514,121,2,"[1514, 1515]"
CP-14.zip,1,CP,1530,4168,60,1,[4168]
NCP-15.zip,2,NCP,413,1976,128,4,"[1975, 1976, 1977, 1979]"
CP-5.zip,1,CP,1224,3442,204,1,[3442]
CP-5.zip,1,CP,1215,3433,165,1,[3433]
Normal-26.zip,0,Normal,3886,5399,76,1,[5399]
Normal-24.zip,0,Normal,2640,150,41,1,[150]
NCP-28.zip,2,NCP,836,2351,52,1,[2351]
NCP-4.zip,2,NCP,146,1436,123,2,"[1436, 1437]"
Normal-17.zip,0,Normal,2155,610,89,1,[610]
CP-30.zip,1,CP,3939,5547,38,1,[5547]
CP-19.zip,1,CP,1784,3590,112,4,"[3590, 3591, 3592, 3593]"
CP-10.zip,1,CP,1399,3859,45,2,"[3858, 3859]"
NCP-19.zip,2,NCP,519,2194,126,2,"[2194, 2195]"
NCP-11.zip,2,NCP,297,1739,144,2,"[1739, 1741]"
NCP-22.zip,2,NCP,88,1309,170,2,"[1309, 1310]"
CP-18.zip,1,CP,1778,3547,65,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
NCP-30.zip,2,NCP,968,2511,61,1,[2511]
CP-9.zip,1,CP,1360,3769,67,3,"[3767, 3768, 3769]"
CP-26.zip,1,CP,3638,5597,285,1,[5597]
NCP-13.zip,2,NCP,353,1857,167,2,"[1857, 1858]"
CP-30.zip,1,CP,3932,5634,71,2,"[5634, 5635]"
NCP-21.zip,2,NCP,62,1257,144,2,"[1257, 1258]"
CP-2.zip,1,CP,1127,3345,278,1,[3345]
NCP-12.zip,2,NCP,337,1823,58,2,"[1822, 1823]"
NCP-14.zip,2,NCP,390,1931,53,2,"[1930, 1931]"
NCP-15.zip,2,NCP,417,1988,58,2,"[1987, 1988]"
CP-24.zip,1,CP,689,3051,58,1,[3051]
CP-9.zip,1,CP,1377,3808,58,2,"[3808, 3809]"
CP-13.zip,1,CP,1505,4110,54,3,"[4108, 4109, 4110]"
CP-13.zip,1,CP,1492,4078,58,3,"[4077, 4078, 4079]"
NCP-4.zip,2,NCP,159,1463,61,2,"[1462, 1463]"
NCP-6.zip,2,NCP,220,1585,67,2,"[1584, 1585]"
NCP-29.zip,2,NCP,884,2421,23,1,[2421]
Normal-3.zip,0,Normal,757,192,110,1,[192]
CP-21.zip,1,CP,4,3505,298,4,"[3505, 3506, 3507, 3508]"
CP-16.zip,1,CP,1608,4296,23,1,[4296]
CP-4.zip,1,CP,1169,3387,171,1,[3387]
Normal-4.zip,0,Normal,797,232,112,1,[232]
NCP-19.zip,2,NCP,540,2238,54,2,"[2237, 2238]"
Normal-14.zip,0,Normal,2068,523,81,1,[523]
Normal-11.zip,0,Normal,1985,440,96,1,[440]
CP-9.zip,1,CP,1353,3748,140,3,"[3748, 3749, 3750]"
NCP-6.zip,2,NCP,224,1592,136,2,"[1592, 1593]"
CP-10.zip,1,CP,1397,3854,60,2,"[3854, 3855]"
NCP-12.zip,2,NCP,318,1784,63,2,"[1783, 1784]"
NCP-21.zip,2,NCP,59,1251,122,2,"[1251, 1252]"
Normal-17.zip,0,Normal,2184,639,86,1,[639]
NCP-18.zip,2,NCP,493,2143,56,2,"[2142, 2143]"
NCP-25.zip,2,NCP,3954,5467,42,1,[5467]
Normal-2.zip,0,Normal,1763,1137,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
CP-23.zip,1,CP,675,3037,124,1,[3037]
CP-9.zip,1,CP,1365,3780,60,3,"[3779, 3780, 3781]"
CP-6.zip,1,CP,1256,3474,140,1,[3474]
NCP-16.zip,2,NCP,441,2037,49,2,"[2036, 2037]"
NCP-7.zip,2,NCP,2484,2643,46,1,[2643]
CP-20.zip,1,CP,2771,3302,37,1,[3302]
NCP-10.zip,2,NCP,2714,2707,53,1,[2707]
Normal-4.zip,0,Normal,772,207,363,1,[207]
NCP-16.zip,2,NCP,440,2035,53,2,"[2034, 2035]"
CP-17.zip,1,CP,1646,4334,26,1,[4334]
NCP-11.zip,2,NCP,284,1713,139,2,"[1713, 1714]"
CP-23.zip,1,CP,656,3018,575,1,[3018]
CP-2.zip,1,CP,1104,3322,164,1,[3322]
NCP-22.zip,2,NCP,85,1303,139,2,"[1303, 1304]"
CP-30.zip,1,CP,3933,5637,38,2,"[5636, 5637]"
Normal-7.zip,0,Normal,1839,294,94,1,[294]
NCP-6.zip,2,NCP,223,1590,132,2,"[1590, 1591]"
CP-2.zip,1,CP,1119,3337,157,1,[3337]
CP-11.zip,1,CP,1431,3931,61,2,"[3930, 3931]"
CP-7.zip,1,CP,1304,3634,47,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-11.zip,2,NCP,299,1745,58,2,"[1744, 1745]"
NCP-15.zip,2,NCP,405,1960,60,2,"[1959, 1960]"
NCP-20.zip,2,NCP,574,2307,58,2,"[2306, 2307]"
CP-10.zip,1,CP,1412,3887,66,2,"[3887, 3888]"
NCP-4.zip,2,NCP,167,1479,60,2,"[1478, 1479]"
NCP-4.zip,2,NCP,157,1459,49,2,"[1458, 1459]"
NCP-13.zip,2,NCP,349,1849,135,2,"[1849, 1850]"
CP-18.zip,1,CP,1771,3520,51,4,"[3518, 3519, 3520, 3521]"
NCP-14.zip,2,NCP,372,1895,109,2,"[1895, 1896]"
NCP-18.zip,2,NCP,503,2162,146,2,"[2162, 2163]"
NCP-6.zip,2,NCP,199,1543,58,2,"[1542, 1543]"
CP-18.zip,1,CP,1662,4350,19,1,[4350]
CP-9.zip,1,CP,1377,3809,57,2,"[3808, 3809]"
Normal-1.zip,0,Normal,1727,1009,63,4,"[1009, 1010, 1011, 1012]"
NCP-20.zip,2,NCP,566,2290,160,2,"[2290, 2291]"
CP-29.zip,1,CP,3821,5765,29,1,[5765]
NCP-5.zip,2,NCP,190,1525,64,2,"[1524, 1525]"
Normal-2.zip,0,Normal,1746,1064,68,2,"[1063, 1064]"
CP-27.zip,1,CP,3744,5688,17,1,[5688]
CP-2.zip,1,CP,1111,3329,204,1,[3329]
Normal-10.zip,0,Normal,1948,403,98,1,[403]
NCP-12.zip,2,NCP,338,1824,150,2,"[1824, 1825]"
NCP-13.zip,2,NCP,348,1847,112,2,"[1847, 1848]"
CP-24.zip,1,CP,700,3062,86,1,[3062]
CP-18.zip,1,CP,1655,4343,23,1,[4343]
CP-27.zip,1,CP,3736,5680,16,1,[5680]
Normal-24.zip,0,Normal,2654,164,31,1,[164]
NCP-13.zip,2,NCP,359,1869,145,2,"[1869, 1870]"
NCP-16.zip,2,NCP,437,2027,142,2,"[2027, 2028]"
CP-27.zip,1,CP,3741,5685,17,1,[5685]
CP-24.zip,1,CP,693,3055,273,1,[3055]
CP-24.zip,1,CP,682,3044,149,1,[3044]
Normal-17.zip,0,Normal,2175,630,80,1,[630]
NCP-6.zip,2,NCP,223,1591,56,2,"[1590, 1591]"
NCP-2.zip,2,NCP,1051,2626,178,2,"[2625, 2626]"
CP-11.zip,1,CP,1454,3982,125,3,"[3982, 3983, 3984]"
Normal-20.zip,0,Normal,2253,708,70,1,[708]
Normal-20.zip,0,Normal,2252,707,84,1,[707]
Normal-21.zip,0,Normal,2308,763,85,1,[763]
NCP-18.zip,2,NCP,516,2189,57,2,"[2188, 2189]"
NCP-12.zip,2,NCP,313,1774,62,2,"[1773, 1774]"
CP-2.zip,1,CP,1126,3344,204,1,[3344]
Normal-20.zip,0,Normal,2257,712,83,1,[712]
NCP-6.zip,2,NCP,203,1551,59,2,"[1550, 1551]"
CP-13.zip,1,CP,1503,4106,64,3,"[4104, 4105, 4106]"
Normal-20.zip,0,Normal,2280,735,82,1,[735]
CP-19.zip,1,CP,2443,2915,112,3,"[2915, 2916, 2917]"
CP-20.zip,1,CP,2451,2930,136,1,[2930]
CP-1.zip,1,CP,1093,3311,173,1,[3311]
CP-13.zip,1,CP,1518,4138,160,3,"[4138, 4139, 4140]"
CP-20.zip,1,CP,2773,3304,30,1,[3304]
NCP-15.zip,2,NCP,414,1981,51,2,"[1980, 1981]"
NCP-23.zip,2,NCP,96,1328,145,2,"[1328, 1329]"
CP-11.zip,1,CP,1422,3909,59,3,"[3908, 3909, 3910]"
Normal-20.zip,0,Normal,2258,713,74,1,[713]
NCP-29.zip,2,NCP,882,2417,52,2,"[2417, 2418]"
Normal-2.zip,0,Normal,1737,1038,79,4,"[1037, 1038, 1039, 1040]"
Normal-13.zip,0,Normal,2025,480,101,1,[480]
NCP-5.zip,2,NCP,173,1490,139,2,"[1490, 1491]"
CP-6.zip,1,CP,1257,3475,155,1,[3475]
NCP-23.zip,2,NCP,952,2495,379,1,[2495]
Normal-1.zip,0,Normal,1700,954,64,2,"[953, 954]"
NCP-17.zip,2,NCP,465,2085,31,3,"[2084, 2085, 2086]"
Normal-16.zip,0,Normal,2122,577,85,1,[577]
CP-13.zip,1,CP,1502,4102,73,2,"[4102, 4103]"
Normal-17.zip,0,Normal,2153,608,82,1,[608]
Normal-24.zip,0,Normal,2650,160,40,1,[160]
NCP-27.zip,2,NCP,1031,2602,231,2,"[2601, 2602]"
NCP-14.zip,2,NCP,393,1937,62,2,"[1936, 1937]"
CP-5.zip,1,CP,12,3169,233,2,"[3168, 3169]"
Normal-11.zip,0,Normal,1986,441,88,1,[441]
CP-19.zip,1,CP,2433,2897,108,1,[2897]
NCP-4.zip,2,NCP,151,1447,54,2,"[1446, 1447]"
NCP-13.zip,2,NCP,370,1891,128,2,"[1891, 1892]"
Normal-17.zip,0,Normal,2168,623,89,1,[623]
NCP-29.zip,2,NCP,880,2415,312,1,[2415]
NCP-12.zip,2,NCP,338,1825,63,2,"[1824, 1825]"
Normal-23.zip,0,Normal,2634,144,37,1,[144]
NCP-14.zip,2,NCP,396,1942,170,2,"[1942, 1943]"
NCP-16.zip,2,NCP,439,2032,162,2,"[2032, 2033]"
NCP-8.zip,2,NCP,266,1678,137,2,"[1678, 1679]"
CP-11.zip,1,CP,1423,3911,204,3,"[3911, 3912, 3913]"
CP-11.zip,1,CP,1454,3984,53,3,"[3982, 3983, 3984]"
CP-28.zip,1,CP,3792,5736,20,1,[5736]
Normal-1.zip,0,Normal,1727,1011,66,4,"[1009, 1010, 1011, 1012]"
Normal-19.zip,0,Normal,2234,689,89,1,[689]
NCP-13.zip,2,NCP,35,1203,58,2,"[1202, 1203]"
NCP-18.zip,2,NCP,51,1236,59,2,"[1235, 1236]"
NCP-2.zip,2,NCP,113,1368,58,2,"[1367, 1368]"
Normal-2.zip,0,Normal,1757,1107,68,4,"[1105, 1106, 1107, 1108]"
NCP-12.zip,2,NCP,319,1785,158,2,"[1785, 1787]"
Normal-22.zip,0,Normal,2322,777,88,1,[777]
CP-21.zip,1,CP,584,2946,116,1,[2946]
CP-9.zip,1,CP,1365,3781,60,3,"[3779, 3780, 3781]"
NCP-12.zip,2,NCP,322,1792,120,2,"[1792, 1793]"
Normal-2.zip,0,Normal,1763,1140,75,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-21.zip,2,NCP,59,1252,52,2,"[1251, 1252]"
NCP-5.zip,2,NCP,170,1485,59,2,"[1484, 1485]"
NCP-21.zip,2,NCP,72,1276,129,2,"[1276, 1277]"
NCP-22.zip,2,NCP,887,2425,38,1,[2425]
CP-2.zip,1,CP,1117,3335,155,1,[3335]
Normal-2.zip,0,Normal,1763,1134,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
CP-18.zip,1,CP,1778,3550,64,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
CP-23.zip,1,CP,664,3026,78,1,[3026]
CP-23.zip,1,CP,668,3030,102,1,[3030]
NCP-13.zip,2,NCP,355,1862,53,2,"[1861, 1862]"
NCP-13.zip,2,NCP,358,1867,160,2,"[1867, 1868]"
CP-14.zip,1,CP,1550,4218,64,2,"[4217, 4218]"
CP-26.zip,1,CP,3729,5667,207,3,"[5665, 5666, 5667]"
CP-21.zip,1,CP,603,2965,88,1,[2965]
NCP-13.zip,2,NCP,370,1892,54,2,"[1891, 1892]"
NCP-13.zip,2,NCP,35,1202,139,2,"[1202, 1203]"
CP-3.zip,1,CP,1155,3373,171,1,[3373]
Normal-10.zip,0,Normal,1927,382,99,1,[382]
CP-15.zip,1,CP,1574,4262,26,1,[4262]
CP-13.zip,1,CP,1498,4096,60,2,"[4095, 4096]"
NCP-6.zip,2,NCP,205,1555,53,2,"[1554, 1555]"
NCP-11.zip,2,NCP,301,1748,147,2,"[1748, 1749]"
NCP-11.zip,2,NCP,303,1752,139,2,"[1752, 1753]"
CP-12.zip,1,CP,1468,4017,54,3,"[4015, 4016, 4017]"
Normal-14.zip,0,Normal,2081,536,93,1,[536]
Normal-2.zip,0,Normal,1763,1141,75,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-22.zip,2,NCP,859,2380,299,2,"[2380, 2381]"
Normal-26.zip,0,Normal,3885,5398,63,1,[5398]
CP-13.zip,1,CP,1505,4109,54,3,"[4108, 4109, 4110]"
NCP-1.zip,2,NCP,103,1343,150,2,"[1343, 1344]"
NCP-14.zip,2,NCP,396,1943,71,2,"[1942, 1943]"
NCP-22.zip,2,NCP,871,2402,293,2,"[2401, 2402]"
Normal-10.zip,0,Normal,1951,406,105,1,[406]
CP-11.zip,1,CP,1434,3936,63,2,"[3936, 3937]"
CP-26.zip,1,CP,3724,5659,51,1,[5659]
CP-12.zip,1,CP,1471,4022,56,2,"[4022, 4023]"
Normal-21.zip,0,Normal,2304,759,110,1,[759]
CP-28.zip,1,CP,3777,5721,26,1,[5721]
NCP-28.zip,2,NCP,837,2352,57,1,[2352]
Normal-2.zip,0,Normal,1763,1133,72,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
Normal-8.zip,0,Normal,1873,328,104,1,[328]
CP-12.zip,1,CP,1458,3992,165,3,"[3992, 3993, 3994]"
NCP-7.zip,2,NCP,230,1604,139,2,"[1604, 1605]"
CP-30.zip,1,CP,4042,5591,37,1,[5591]
Normal-4.zip,0,Normal,774,209,134,1,[209]
Normal-19.zip,0,Normal,2228,683,85,1,[683]
Normal-18.zip,0,Normal,2206,661,77,1,[661]
CP-17.zip,1,CP,1628,4316,23,1,[4316]
Normal-11.zip,0,Normal,1969,424,90,1,[424]
Normal-20.zip,0,Normal,2259,714,97,1,[714]
CP-17.zip,1,CP,1640,4328,25,1,[4328]
NCP-8.zip,2,NCP,254,1654,139,2,"[1654, 1655]"
Normal-16.zip,0,Normal,2140,595,88,1,[595]
CP-6.zip,1,CP,1249,3467,144,1,[3467]
NCP-23.zip,2,NCP,92,1321,37,2,"[1320, 1321]"
CP-18.zip,1,CP,1657,4345,24,1,[4345]
NCP-17.zip,2,NCP,484,2124,58,2,"[2123, 2124]"
Normal-2.zip,0,Normal,1743,1057,73,2,"[1056, 1057]"
CP-18.zip,1,CP,1778,3545,66,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
NCP-30.zip,2,NCP,966,2509,279,1,[2509]
CP-9.zip,1,CP,1376,3807,60,2,"[3806, 3807]"
Normal-1.zip,0,Normal,1716,987,71,2,"[987, 988]"
CP-7.zip,1,CP,1302,3602,42,4,"[3602, 3603, 3604, 3605]"
NCP-18.zip,2,NCP,50,1233,141,2,"[1233, 1234]"
CP-32.zip,1,CP,1781,3572,65,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-5.zip,2,NCP,192,1528,135,2,"[1528, 1529]"
NCP-7.zip,2,NCP,2489,2646,40,1,[2646]
CP-11.zip,1,CP,1434,3937,63,2,"[3936, 3937]"
CP-23.zip,1,CP,645,3007,124,1,[3007]
Normal-10.zip,0,Normal,1941,396,91,1,[396]
Normal-12.zip,0,Normal,2001,456,86,1,[456]
Normal-3.zip,0,Normal,761,196,120,1,[196]
CP-7.zip,1,CP,1265,3483,166,1,[3483]
NCP-3.zip,2,NCP,1287,2728,66,1,[2728]
NCP-28.zip,2,NCP,835,2350,52,2,"[2349, 2350]"
NCP-19.zip,2,NCP,543,2243,128,2,"[2243, 2244]"
CP-21.zip,1,CP,4,3507,259,4,"[3505, 3506, 3507, 3508]"
CP-17.zip,1,CP,1633,4321,26,1,[4321]
NCP-20.zip,2,NCP,565,2289,57,2,"[2288, 2289]"
NCP-22.zip,2,NCP,878,2412,46,2,"[2412, 2413]"
CP-14.zip,1,CP,1520,4144,57,3,"[4143, 4144, 4145]"
Normal-23.zip,0,Normal,2620,130,36,1,[130]
NCP-23.zip,2,NCP,958,2501,133,1,[2501]
CP-13.zip,1,CP,1513,4128,60,2,"[4127, 4128]"
NCP-24.zip,2,NCP,98,1332,139,2,"[1332, 1333]"
CP-9.zip,1,CP,1375,3804,60,2,"[3804, 3805]"
NCP-2.zip,2,NCP,1051,2625,88,2,"[2625, 2626]"
NCP-31.zip,2,NCP,999,2556,41,1,[2556]
CP-18.zip,1,CP,1781,3575,78,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-10.zip,2,NCP,278,1703,57,2,"[1702, 1703]"
NCP-12.zip,2,NCP,313,1773,147,2,"[1773, 1774]"
NCP-14.zip,2,NCP,381,1915,60,2,"[1914, 1915]"
NCP-11.zip,2,NCP,295,1735,236,2,"[1735, 1736]"
CP-11.zip,1,CP,1440,3948,196,3,"[3948, 3949, 3950]"
CP-19.zip,1,CP,1795,3597,41,2,"[3596, 3597]"
CP-12.zip,1,CP,1467,4013,60,2,"[4013, 4014]"
NCP-12.zip,2,NCP,322,1793,51,2,"[1792, 1793]"
CP-9.zip,1,CP,1353,3750,59,3,"[3748, 3749, 3750]"
CP-19.zip,1,CP,1784,3591,50,4,"[3590, 3591, 3592, 3593]"
NCP-9.zip,2,NCP,2699,2665,51,1,[2665]
NCP-12.zip,2,NCP,331,1810,158,2,"[1810, 1811]"
NCP-12.zip,2,NCP,334,1817,59,2,"[1816, 1817]"
NCP-1.zip,2,NCP,1009,2571,29,2,"[2570, 2571]"
CP-30.zip,1,CP,4041,5590,31,1,[5590]
CP-24.zip,1,CP,705,3067,168,1,[3067]
Normal-24.zip,0,Normal,2665,175,33,1,[175]
NCP-12.zip,2,NCP,332,1813,70,2,"[1812, 1813]"
CP-11.zip,1,CP,1444,3962,58,3,"[3960, 3961, 3962]"
CP-22.zip,1,CP,614,2976,100,1,[2976]
Normal-23.zip,0,Normal,2630,140,38,1,[140]
Normal-8.zip,0,Normal,1876,331,97,1,[331]
NCP-1.zip,2,NCP,1001,2559,141,1,[2559]
NCP-22.zip,2,NCP,845,2361,148,4,"[2360, 2361, 2362, 2363]"
CP-26.zip,1,CP,3646,5606,36,1,[5606]
Normal-9.zip,0,Normal,1907,362,92,1,[362]
Normal-1.zip,0,Normal,1672,800,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
NCP-12.zip,2,NCP,333,1815,68,2,"[1814, 1815]"
CP-17.zip,1,CP,1634,4322,23,1,[4322]
Normal-12.zip,0,Normal,2009,464,93,1,[464]
CP-26.zip,1,CP,3731,5670,215,1,[5670]
Normal-25.zip,0,Normal,3714,5344,22,1,[5344]
Normal-19.zip,0,Normal,2231,686,85,1,[686]
NCP-23.zip,2,NCP,940,2483,22,1,[2483]
Normal-25.zip,0,Normal,3851,5363,201,1,[5363]
NCP-6.zip,2,NCP,209,1562,139,2,"[1562, 1563]"
NCP-13.zip,2,NCP,347,1846,53,2,"[1845, 1846]"
NCP-11.zip,2,NCP,312,1772,62,2,"[1771, 1772]"
CP-5.zip,1,CP,1196,3414,186,1,[3414]
NCP-21.zip,2,NCP,74,1282,54,2,"[1281, 1282]"
CP-23.zip,1,CP,662,3024,114,1,[3024]
NCP-7.zip,2,NCP,23,1177,151,2,"[1177, 1178]"
CP-16.zip,1,CP,1591,4279,23,1,[4279]
Normal-12.zip,0,Normal,1995,450,95,1,[450]
Normal-20.zip,0,Normal,2264,719,82,1,[719]
NCP-30.zip,2,NCP,948,2491,365,1,[2491]
Normal-12.zip,0,Normal,1998,453,99,1,[453]
NCP-19.zip,2,NCP,522,2201,58,2,"[2200, 2201]"
CP-13.zip,1,CP,1510,4121,60,2,"[4121, 4122]"
NCP-15.zip,2,NCP,406,1962,61,2,"[1961, 1962]"
NCP-4.zip,2,NCP,162,1468,148,2,"[1468, 1469]"
CP-11.zip,1,CP,1431,3930,61,2,"[3930, 3931]"
CP-15.zip,1,CP,1569,4257,20,1,[4257]
CP-9.zip,1,CP,1379,3813,52,2,"[3812, 3813]"
NCP-30.zip,2,NCP,981,2525,40,2,"[2525, 2526]"
NCP-8.zip,2,NCP,2679,2650,42,1,[2650]
NCP-25.zip,2,NCP,3951,5465,43,1,[5465]
NCP-7.zip,2,NCP,2460,2684,36,1,[2684]
CP-25.zip,1,CP,734,3096,106,1,[3096]
NCP-6.zip,2,NCP,209,1563,58,2,"[1562, 1563]"
Normal-22.zip,0,Normal,2593,103,38,1,[103]
NCP-16.zip,2,NCP,438,2029,149,2,"[2029, 2030]"
CP-7.zip,1,CP,1304,3638,43,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
Normal-8.zip,0,Normal,1885,340,101,1,[340]
NCP-17.zip,2,NCP,484,2123,137,2,"[2123, 2124]"
NCP-20.zip,2,NCP,565,2288,135,2,"[2288, 2289]"
NCP-5.zip,2,NCP,185,1515,51,2,"[1514, 1515]"
NCP-29.zip,2,NCP,877,2411,65,1,[2411]
NCP-6.zip,2,NCP,216,1577,58,2,"[1576, 1577]"
Normal-24.zip,0,Normal,2658,168,37,1,[168]
CP-28.zip,1,CP,3779,5723,26,1,[5723]
Normal-15.zip,0,Normal,2090,545,83,1,[545]
Normal-2.zip,0,Normal,1750,1077,69,3,"[1074, 1077, 1078]"
NCP-24.zip,2,NCP,98,1333,58,2,"[1332, 1333]"
CP-5.zip,1,CP,1199,3417,180,1,[3417]
CP-3.zip,1,CP,1146,3364,161,1,[3364]
CP-11.zip,1,CP,1449,3971,50,2,"[3971, 3972]"
Normal-3.zip,0,Normal,1767,1154,66,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-22.zip,0,Normal,2585,95,41,1,[95]
CP-29.zip,1,CP,3816,5760,29,1,[5760]
NCP-21.zip,2,NCP,62,1258,60,2,"[1257, 1258]"
NCP-2.zip,2,NCP,1056,2632,473,1,[2632]
NCP-19.zip,2,NCP,525,2206,144,2,"[2206, 2207]"
Normal-22.zip,0,Normal,2600,110,41,1,[110]
CP-3.zip,1,CP,1161,3379,310,1,[3379]
NCP-12.zip,2,NCP,316,1779,139,2,"[1779, 1780]"
NCP-28.zip,2,NCP,868,2396,200,2,"[2395, 2396]"
CP-7.zip,1,CP,1301,3600,52,4,"[3598, 3599, 3600, 3601]"
NCP-11.zip,2,NCP,301,1749,62,2,"[1748, 1749]"
Normal-9.zip,0,Normal,1917,372,96,1,[372]
NCP-20.zip,2,NCP,571,2300,163,2,"[2300, 2301]"
Normal-3.zip,0,Normal,1767,1152,68,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-1.zip,0,Normal,1716,988,71,2,"[987, 988]"
NCP-28.zip,2,NCP,842,2357,42,1,[2357]
NCP-27.zip,2,NCP,309,1765,162,2,"[1766, 1765]"
CP-12.zip,1,CP,1479,4040,60,3,"[4039, 4040, 4041]"
NCP-6.zip,2,NCP,22,1175,163,2,"[1175, 1176]"
NCP-28.zip,2,NCP,868,2395,51,2,"[2395, 2396]"
CP-14.zip,1,CP,1532,4171,50,2,"[4171, 4172]"
Normal-11.zip,0,Normal,1984,439,86,1,[439]
Normal-24.zip,0,Normal,2643,153,39,1,[153]
CP-20.zip,1,CP,2765,3296,42,1,[3296]
Normal-2.zip,0,Normal,1763,1132,72,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-2.zip,2,NCP,109,1356,60,2,"[1355, 1356]"
NCP-7.zip,2,NCP,241,1628,55,2,"[1627, 1628]"
Normal-22.zip,0,Normal,2587,97,44,1,[97]
CP-20.zip,1,CP,2753,3284,37,1,[3284]
Normal-1.zip,0,Normal,1670,790,63,6,"[787, 788, 789, 790, 791, 792]"
Normal-15.zip,0,Normal,2103,558,88,1,[558]
CP-13.zip,1,CP,1503,4104,64,3,"[4104, 4105, 4106]"
Normal-21.zip,0,Normal,2313,768,94,1,[768]
CP-9.zip,1,CP,1382,3818,200,3,"[3818, 3819, 3820]"
Normal-2.zip,0,Normal,1756,1102,64,4,"[1101, 1102, 1103, 1104]"
NCP-12.zip,2,NCP,334,1816,140,2,"[1816, 1817]"
CP-13.zip,1,CP,1518,4140,67,3,"[4138, 4139, 4140]"
CP-13.zip,1,CP,1492,4077,139,3,"[4077, 4078, 4079]"
Normal-11.zip,0,Normal,1982,437,99,1,[437]
NCP-6.zip,2,NCP,213,1570,159,2,"[1570, 1571]"
CP-18.zip,1,CP,1779,3551,59,2,"[3551, 3552]"
NCP-12.zip,2,NCP,321,1790,122,2,"[1790, 1791]"
NCP-4.zip,2,NCP,159,1462,144,2,"[1462, 1463]"
CP-24.zip,1,CP,684,3046,161,1,[3046]
CP-29.zip,1,CP,3828,5772,26,1,[5772]
CP-12.zip,1,CP,1462,4004,51,3,"[4002, 4003, 4004]"
Normal-1.zip,0,Normal,1707,969,65,2,"[969, 970]"
CP-24.zip,1,CP,685,3047,168,1,[3047]
NCP-16.zip,2,NCP,444,2043,61,2,"[2042, 2043]"
CP-19.zip,1,CP,2430,2892,106,2,"[2891, 2892]"
Normal-25.zip,0,Normal,3857,5369,222,1,[5369]
CP-28.zip,1,CP,3774,5718,20,1,[5718]
CP-21.zip,1,CP,591,2953,124,1,[2953]
Normal-1.zip,0,Normal,1670,792,66,6,"[787, 788, 789, 790, 791, 792]"
NCP-14.zip,2,NCP,387,1925,54,2,"[1924, 1925]"
CP-10.zip,1,CP,14,3515,115,1,[3515]
NCP-4.zip,2,NCP,16,1164,113,2,"[1164, 1165]"
Normal-17.zip,0,Normal,2162,617,96,1,[617]
CP-13.zip,1,CP,1513,4127,60,2,"[4127, 4128]"
NCP-11.zip,2,NCP,300,1746,139,2,"[1746, 1747]"
NCP-21.zip,2,NCP,577,2312,61,2,"[2311, 2312]"
Normal-8.zip,0,Normal,1875,330,93,1,[330]
Normal-27.zip,0,Normal,3906,5439,62,1,[5439]
NCP-7.zip,2,NCP,249,1645,58,2,"[1644, 1645]"
NCP-20.zip,2,NCP,552,2262,61,2,"[2261, 2262]"
NCP-9.zip,2,NCP,2701,2667,56,1,[2667]
NCP-15.zip,2,NCP,417,1987,139,2,"[1987, 1988]"
NCP-9.zip,2,NCP,2705,2671,56,1,[2671]
Normal-3.zip,0,Normal,1767,1160,71,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-15.zip,1,CP,1585,4273,23,1,[4273]
CP-27.zip,1,CP,3742,5686,17,1,[5686]
CP-14.zip,1,CP,1521,4146,57,2,"[4146, 4147]"
Normal-1.zip,0,Normal,1703,960,70,2,"[959, 960]"
CP-21.zip,1,CP,6,3510,36,1,[3510]
NCP-19.zip,2,NCP,54,1242,62,2,"[1241, 1242]"
NCP-5.zip,2,NCP,17,1166,143,2,"[1166, 1167]"
NCP-15.zip,2,NCP,413,1977,47,4,"[1975, 1976, 1977, 1979]"
NCP-22.zip,2,NCP,845,2360,53,4,"[2360, 2361, 2362, 2363]"
NCP-2.zip,2,NCP,120,1381,139,2,"[1381, 1382]"
CP-5.zip,1,CP,1207,3425,189,1,[3425]
CP-27.zip,1,CP,3758,5702,23,1,[5702]
CP-16.zip,1,CP,1592,4280,25,1,[4280]
CP-21.zip,1,CP,4,3506,275,4,"[3505, 3506, 3507, 3508]"
NCP-21.zip,2,NCP,72,1277,55,2,"[1276, 1277]"
NCP-17.zip,2,NCP,475,2105,156,2,"[2105, 2106]"
NCP-13.zip,2,NCP,358,1868,67,2,"[1867, 1868]"
Normal-3.zip,0,Normal,764,199,130,1,[199]
CP-9.zip,1,CP,1358,3763,63,3,"[3761, 3762, 3763]"
NCP-4.zip,2,NCP,169,1483,56,2,"[1482, 1483]"
Normal-1.zip,0,Normal,1707,970,65,2,"[969, 970]"
NCP-18.zip,2,NCP,502,2160,140,2,"[2160, 2161]"
CP-18.zip,1,CP,1781,3568,67,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-10.zip,2,NCP,2727,2683,44,1,[2683]
CP-26.zip,1,CP,3719,5651,277,3,"[5649, 5650, 5651]"
CP-11.zip,1,CP,1422,3910,58,3,"[3908, 3909, 3910]"
NCP-4.zip,2,NCP,168,1480,139,2,"[1480, 1481]"
CP-8.zip,1,CP,1329,3695,89,3,"[3695, 3696, 3697]"
CP-12.zip,1,CP,1463,4005,49,2,"[4005, 4006]"
Normal-27.zip,0,Normal,3915,5458,70,1,[5458]
Normal-18.zip,0,Normal,2209,664,82,1,[664]
CP-13.zip,1,CP,1492,4079,58,3,"[4077, 4078, 4079]"
CP-30.zip,1,CP,3830,5774,29,1,[5774]
CP-8.zip,1,CP,1329,3696,45,3,"[3695, 3696, 3697]"
Normal-16.zip,0,Normal,2139,594,87,1,[594]
NCP-14.zip,2,NCP,393,1936,149,2,"[1936, 1937]"
CP-21.zip,1,CP,4,3508,290,4,"[3505, 3506, 3507, 3508]"
Normal-2.zip,0,Normal,1737,1037,79,4,"[1037, 1038, 1039, 1040]"
NCP-25.zip,2,NCP,3708,5535,59,1,[5535]
CP-7.zip,1,CP,1301,3601,276,4,"[3598, 3599, 3600, 3601]"
NCP-7.zip,2,NCP,249,1644,139,2,"[1644, 1645]"
NCP-12.zip,2,NCP,339,1827,51,2,"[1826, 1827]"
NCP-2.zip,2,NCP,1275,2716,68,1,[2716]
NCP-13.zip,2,NCP,354,1860,73,2,"[1859, 1860]"
Normal-2.zip,0,Normal,1757,1105,71,4,"[1105, 1106, 1107, 1108]"
NCP-27.zip,2,NCP,1016,2582,108,3,"[2580, 2581, 2582]"
CP-18.zip,1,CP,1777,3541,62,5,"[3540, 3541, 3542, 3543, 3544]"
NCP-1.zip,2,NCP,1008,2569,387,1,[2569]
CP-7.zip,1,CP,1315,3665,59,2,"[3665, 3666]"
CP-27.zip,1,CP,3737,5681,17,1,[5681]
Normal-9.zip,0,Normal,1914,369,88,1,[369]
Normal-1.zip,0,Normal,1672,802,75,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
NCP-8.zip,2,NCP,25,1181,129,2,"[1181, 1183]"
CP-19.zip,1,CP,1789,3207,64,4,"[3204, 3205, 3206, 3207]"
CP-11.zip,1,CP,1444,3960,139,3,"[3960, 3961, 3962]"
NCP-4.zip,2,NCP,145,1435,58,2,"[1434, 1435]"
CP-23.zip,1,CP,659,3021,594,1,[3021]
Normal-25.zip,0,Normal,3716,5346,31,1,[5346]
Normal-10.zip,0,Normal,1936,391,82,1,[391]
NCP-22.zip,2,NCP,821,2331,30,1,[2331]
CP-13.zip,1,CP,1505,4108,54,3,"[4108, 4109, 4110]"
NCP-15.zip,2,NCP,411,1972,62,2,"[1971, 1972]"
CP-7.zip,1,CP,1304,3633,18,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-27.zip,2,NCP,1048,2621,44,2,"[2620, 2621]"
CP-21.zip,1,CP,595,2957,306,1,[2957]
NCP-22.zip,2,NCP,861,2384,197,1,[2384]
CP-7.zip,1,CP,1302,3604,39,4,"[3602, 3603, 3604, 3605]"
NCP-17.zip,2,NCP,472,2099,151,2,"[2099, 2100]"
NCP-8.zip,2,NCP,26,1185,36,2,"[1184, 1185]"
Normal-27.zip,0,Normal,3903,5435,75,1,[5435]
Normal-25.zip,0,Normal,3840,5352,210,1,[5352]
NCP-8.zip,2,NCP,266,1679,58,2,"[1678, 1679]"
Normal-16.zip,0,Normal,2120,575,84,1,[575]
Normal-16.zip,0,Normal,2128,583,76,1,[583]
CP-11.zip,1,CP,1449,3972,50,2,"[3971, 3972]"
CP-7.zip,1,CP,1304,3636,47,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
Normal-22.zip,0,Normal,2597,107,41,1,[107]
NCP-10.zip,2,NCP,2726,2682,50,1,[2682]
Normal-7.zip,0,Normal,1849,304,87,1,[304]
Normal-13.zip,0,Normal,2040,495,95,1,[495]
Normal-16.zip,0,Normal,2125,580,83,1,[580]
CP-25.zip,1,CP,740,3102,193,1,[3102]
NCP-22.zip,2,NCP,871,2401,281,2,"[2401, 2402]"
NCP-9.zip,2,NCP,2704,2670,56,1,[2670]
NCP-12.zip,2,NCP,33,1198,147,2,"[1198, 1199]"
CP-18.zip,1,CP,1663,4351,26,1,[4351]
Normal-3.zip,0,Normal,1767,1157,28,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-2.zip,0,Normal,1735,1031,76,2,"[1030, 1031]"
Normal-10.zip,0,Normal,1938,393,66,1,[393]
NCP-24.zip,2,NCP,975,2518,484,1,[2518]
CP-18.zip,1,CP,1774,3523,65,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-14.zip,2,NCP,381,1914,143,2,"[1914, 1915]"
NCP-12.zip,2,NCP,33,1199,62,2,"[1198, 1199]"
NCP-13.zip,2,NCP,352,1855,138,2,"[1855, 1856]"
NCP-12.zip,2,NCP,333,1814,162,2,"[1814, 1815]"
NCP-23.zip,2,NCP,904,2446,667,1,[2446]
NCP-24.zip,2,NCP,985,2531,508,1,[2531]
NCP-6.zip,2,NCP,228,1600,161,2,"[1600, 1601]"
NCP-15.zip,2,NCP,414,1980,121,2,"[1980, 1981]"
NCP-1.zip,2,NCP,103,1344,63,2,"[1343, 1344]"
Normal-3.zip,0,Normal,1767,1155,66,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-8.zip,1,CP,1349,3744,58,3,"[3742, 3743, 3744]"
NCP-8.zip,2,NCP,261,1669,65,2,"[1668, 1669]"
Normal-21.zip,0,Normal,2300,755,98,1,[755]
NCP-13.zip,2,NCP,354,1859,177,2,"[1859, 1860]"
CP-23.zip,1,CP,665,3027,116,1,[3027]
CP-15.zip,1,CP,1561,4242,49,2,"[4241, 4242]"
CP-9.zip,1,CP,1376,3806,60,2,"[3806, 3807]"
Normal-1.zip,0,Normal,1727,1012,66,4,"[1009, 1010, 1011, 1012]"
NCP-28.zip,2,NCP,835,2349,46,2,"[2349, 2350]"
CP-8.zip,1,CP,1349,3742,142,3,"[3742, 3743, 3744]"
Normal-20.zip,0,Normal,2277,732,95,1,[732]
NCP-28.zip,2,NCP,876,2409,52,1,[2409]
Normal-15.zip,0,Normal,2101,556,85,1,[556]
CP-11.zip,1,CP,1444,3961,58,3,"[3960, 3961, 3962]"
NCP-2.zip,2,NCP,1276,2717,61,1,[2717]
Normal-3.zip,0,Normal,1767,1153,68,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-13.zip,0,Normal,2051,506,86,1,[506]
Normal-2.zip,0,Normal,1734,1029,66,2,"[1028, 1029]"
Normal-26.zip,0,Normal,3871,5383,22,1,[5383]
NCP-1.zip,2,NCP,1009,2570,39,2,"[2570, 2571]"
Normal-2.zip,0,Normal,1763,1139,65,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-13.zip,2,NCP,359,1870,61,2,"[1869, 1870]"
Normal-19.zip,0,Normal,2220,675,78,1,[675]
CP-9.zip,1,CP,1382,3819,60,3,"[3818, 3819, 3820]"
CP-20.zip,1,CP,2752,3283,26,1,[3283]
CP-13.zip,1,CP,1510,4122,60,2,"[4121, 4122]"
NCP-16.zip,2,NCP,440,2034,125,2,"[2034, 2035]"
CP-12.zip,1,CP,1458,3994,69,3,"[3992, 3993, 3994]"
NCP-11.zip,2,NCP,284,1714,58,2,"[1713, 1714]"
NCP-11.zip,2,NCP,303,1753,58,2,"[1752, 1753]"
NCP-6.zip,2,NCP,205,1554,126,2,"[1554, 1555]"
CP-14.zip,1,CP,1535,4179,53,2,"[4178, 4179]"
Normal-27.zip,0,Normal,3910,5446,66,2,"[5445, 5446]"
Normal-3.zip,0,Normal,742,177,107,1,[177]
Normal-22.zip,0,Normal,2589,99,37,1,[99]
NCP-22.zip,2,NCP,88,1310,71,2,"[1309, 1310]"
CP-14.zip,1,CP,1521,4147,57,2,"[4146, 4147]"
CP-26.zip,1,CP,3729,5666,179,3,"[5665, 5666, 5667]"
CP-28.zip,1,CP,3793,5737,29,1,[5737]
Normal-3.zip,0,Normal,767,202,358,1,[202]
NCP-5.zip,2,NCP,198,1540,144,2,"[1540, 1541]"
CP-27.zip,1,CP,3738,5682,19,1,[5682]
CP-27.zip,1,CP,3750,5694,28,1,[5694]
CP-10.zip,1,CP,1416,3898,58,2,"[3897, 3898]"
CP-8.zip,1,CP,1322,3680,56,2,"[3680, 3681]"
Normal-23.zip,0,Normal,2607,117,38,1,[117]
NCP-3.zip,2,NCP,138,1420,124,2,"[1420, 1421]"
CP-11.zip,1,CP,1425,3916,185,3,"[3916, 3917, 3918]"
CP-15.zip,1,CP,1581,4269,19,1,[4269]
CP-24.zip,1,CP,706,3068,124,1,[3068]
CP-18.zip,1,CP,1666,4354,23,1,[4354]
NCP-4.zip,2,NCP,161,1466,135,2,"[1466, 1467]"
Normal-7.zip,0,Normal,1847,302,102,1,[302]
CP-19.zip,1,CP,1784,3593,69,4,"[3590, 3591, 3592, 3593]"
CP-21.zip,1,CP,605,2967,157,1,[2967]
CP-5.zip,1,CP,12,3168,291,2,"[3168, 3169]"
Normal-9.zip,0,Normal,1909,364,102,1,[364]
NCP-22.zip,2,NCP,850,2369,52,1,[2369]
CP-24.zip,1,CP,687,3049,135,1,[3049]
NCP-1.zip,2,NCP,1033,2604,39,1,[2604]
Normal-2.zip,0,Normal,1750,1074,65,3,"[1074, 1077, 1078]"
CP-9.zip,1,CP,1365,3779,200,3,"[3779, 3780, 3781]"
NCP-18.zip,2,NCP,502,2161,59,2,"[2160, 2161]"
Normal-3.zip,0,Normal,1767,1162,76,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-1.zip,0,Normal,1672,799,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
Normal-3.zip,0,Normal,747,182,100,1,[182]
NCP-12.zip,2,NCP,319,1787,66,2,"[1785, 1787]"
NCP-15.zip,2,NCP,405,1959,143,2,"[1959, 1960]"
CP-18.zip,1,CP,1781,3574,64,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-21.zip,1,CP,600,2962,202,1,[2962]
CP-12.zip,1,CP,1479,4039,60,3,"[4039, 4040, 4041]"
NCP-27.zip,2,NCP,827,2340,173,1,[2340]
NCP-24.zip,2,NCP,983,2528,67,1,[2528]
CP-11.zip,1,CP,1424,3915,60,2,"[3914, 3915]"
CP-2.zip,1,CP,1105,3323,220,1,[3323]
CP-10.zip,1,CP,1412,3888,66,2,"[3887, 3888]"
NCP-18.zip,2,NCP,495,2147,65,2,"[2146, 2147]"
NCP-3.zip,2,NCP,134,1412,128,2,"[1412, 1413]"
Normal-10.zip,0,Normal,1940,395,74,1,[395]
Normal-17.zip,0,Normal,2163,618,89,1,[618]
CP-9.zip,1,CP,1358,3761,249,3,"[3761, 3762, 3763]"
CP-23.zip,1,CP,658,3020,273,1,[3020]
NCP-12.zip,2,NCP,341,1830,129,3,"[1830, 1832, 1834]"
CP-14.zip,1,CP,1520,4145,57,3,"[4143, 4144, 4145]"
CP-19.zip,1,CP,1783,3588,62,2,"[3588, 3589]"
Normal-3.zip,0,Normal,1767,1158,66,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-7.zip,1,CP,1301,3598,55,4,"[3598, 3599, 3600, 3601]"
Normal-6.zip,0,Normal,1810,265,85,1,[265]
NCP-12.zip,2,NCP,321,1791,51,2,"[1790, 1791]"
NCP-12.zip,2,NCP,341,1834,54,3,"[1830, 1832, 1834]"
CP-11.zip,1,CP,1435,3938,46,2,"[3938, 3939]"
Normal-26.zip,0,Normal,3876,5388,30,1,[5388]
Normal-16.zip,0,Normal,2123,578,90,1,[578]
Normal-6.zip,0,Normal,1816,271,76,1,[271]
NCP-26.zip,2,NCP,3992,5516,48,1,[5516]
CP-18.zip,1,CP,1777,3544,66,5,"[3540, 3541, 3542, 3543, 3544]"
NCP-5.zip,2,NCP,173,1491,58,2,"[1490, 1491]"
NCP-11.zip,2,NCP,312,1771,148,2,"[1771, 1772]"
NCP-19.zip,2,NCP,525,2207,61,2,"[2206, 2207]"
Normal-3.zip,0,Normal,752,187,103,1,[187]
NCP-7.zip,2,NCP,23,1178,63,2,"[1177, 1178]"
CP-27.zip,1,CP,3762,5706,26,1,[5706]
CP-18.zip,1,CP,1659,4347,26,1,[4347]
CP-20.zip,1,CP,2667,3248,46,3,"[3246, 3247, 3248]"
Normal-24.zip,0,Normal,2653,163,39,1,[163]
Normal-4.zip,0,Normal,801,236,107,1,[236]
Normal-20.zip,0,Normal,2272,727,79,1,[727]
NCP-30.zip,2,NCP,988,2539,56,2,"[2538, 2539]"
CP-18.zip,1,CP,1774,3527,58,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
Normal-17.zip,0,Normal,2165,620,95,1,[620]
CP-12.zip,1,CP,1479,4041,60,3,"[4039, 4040, 4041]"
Normal-21.zip,0,Normal,2299,754,90,1,[754]
CP-22.zip,1,CP,637,2999,118,1,[2999]
NCP-6.zip,2,NCP,217,1578,139,2,"[1578, 1579]"
CP-30.zip,1,CP,3919,5544,73,4,"[5543, 5544, 5545, 5546]"
CP-13.zip,1,CP,1511,4123,57,2,"[4123, 4124]"
Normal-13.zip,0,Normal,2035,490,82,1,[490]
CP-10.zip,1,CP,1417,3899,59,1,[3899]
NCP-8.zip,2,NCP,261,1668,155,2,"[1668, 1669]"
CP-20.zip,1,CP,2667,3247,92,3,"[3246, 3247, 3248]"
CP-26.zip,1,CP,3636,5595,290,1,[5595]
Normal-2.zip,0,Normal,1763,1136,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
Normal-9.zip,0,Normal,1913,368,88,1,[368]
CP-9.zip,1,CP,1375,3805,58,2,"[3804, 3805]"
CP-16.zip,1,CP,1606,4294,26,1,[4294]
CP-18.zip,1,CP,1777,3543,68,5,"[3540, 3541, 3542, 3543, 3544]"
Normal-21.zip,0,Normal,2287,742,77,1,[742]
CP-11.zip,1,CP,1422,3908,140,3,"[3908, 3909, 3910]"
NCP-22.zip,2,NCP,859,2381,268,2,"[2380, 2381]"
Normal-24.zip,0,Normal,2645,155,38,1,[155]
CP-7.zip,1,CP,1302,3605,201,4,"[3602, 3603, 3604, 3605]"
CP-23.zip,1,CP,646,3008,128,1,[3008]
CP-11.zip,1,CP,1425,3918,49,3,"[3916, 3917, 3918]"
CP-18.zip,1,CP,1781,3569,65,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-16.zip,2,NCP,436,2025,146,2,"[2025, 2026]"
NCP-18.zip,2,NCP,503,2163,61,2,"[2162, 2163]"
NCP-4.zip,2,NCP,167,1478,143,2,"[1478, 1479]"
Normal-26.zip,0,Normal,3880,5392,32,1,[5392]
NCP-25.zip,2,NCP,3709,5536,65,1,[5536]
Normal-2.zip,0,Normal,1734,1028,66,2,"[1028, 1029]"
Normal-17.zip,0,Normal,2169,624,92,1,[624]
NCP-20.zip,2,NCP,546,2249,134,2,"[2249, 2250]"
NCP-4.zip,2,NCP,146,1437,52,2,"[1436, 1437]"
NCP-26.zip,2,NCP,3995,5493,47,1,[5493]
CP-20.zip,1,CP,2763,3294,119,1,[3294]
NCP-13.zip,2,NCP,349,1850,57,2,"[1849, 1850]"
CP-26.zip,1,CP,3644,5604,284,1,[5604]
CP-8.zip,1,CP,1327,3690,253,3,"[3690, 3691, 3692]"
CP-20.zip,1,CP,2770,3301,38,1,[3301]
CP-12.zip,1,CP,1471,4023,55,2,"[4022, 4023]"
Normal-27.zip,0,Normal,3912,5453,68,1,[5453]
NCP-23.zip,2,NCP,93,1322,157,2,"[1322, 1323]"
CP-18.zip,1,CP,1781,3576,64,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-13.zip,2,NCP,347,1845,126,2,"[1845, 1846]"
CP-20.zip,1,CP,2454,2935,120,2,"[2935, 2936]"
Normal-1.zip,0,Normal,1670,788,58,6,"[787, 788, 789, 790, 791, 792]"
Normal-8.zip,0,Normal,1880,335,83,1,[335]
Normal-10.zip,0,Normal,1937,392,90,1,[392]
CP-20.zip,1,CP,2768,3299,38,1,[3299]
Normal-18.zip,0,Normal,2212,667,89,1,[667]
Normal-1.zip,0,Normal,1677,826,65,4,"[823, 824, 825, 826]"
CP-26.zip,1,CP,3721,5654,43,2,"[5654, 5655]"
NCP-16.zip,2,NCP,439,2033,66,2,"[2032, 2033]"
Normal-13.zip,0,Normal,2031,486,81,1,[486]
CP-19.zip,1,CP,1783,3589,62,2,"[3588, 3589]"
CP-2.zip,1,CP,1121,3339,156,1,[3339]
CP-22.zip,1,CP,612,2974,84,1,[2974]
Normal-26.zip,0,Normal,3867,5379,29,1,[5379]
NCP-1.zip,2,NCP,102,1342,56,2,"[1341, 1342]"
NCP-18.zip,2,NCP,493,2142,133,2,"[2142, 2143]"
NCP-12.zip,2,NCP,339,1826,120,2,"[1826, 1827]"
Normal-14.zip,0,Normal,2085,540,95,1,[540]
NCP-27.zip,2,NCP,238,1622,57,2,"[1621, 1622]"
Normal-2.zip,0,Normal,1737,1039,80,4,"[1037, 1038, 1039, 1040]"
CP-30.zip,1,CP,3919,5546,70,4,"[5543, 5544, 5545, 5546]"
NCP-1.zip,2,NCP,1012,2576,249,1,[2576]
NCP-17.zip,2,NCP,463,2080,144,2,"[2080, 2081]"
NCP-2.zip,2,NCP,127,1400,58,2,"[1399, 1400]"
Normal-21.zip,0,Normal,2291,746,96,1,[746]
NCP-8.zip,2,NCP,25,1183,45,2,"[1181, 1183]"
CP-9.zip,1,CP,1382,3820,60,3,"[3818, 3819, 3820]"
NCP-30.zip,2,NCP,967,2510,168,1,[2510]
Normal-27.zip,0,Normal,3910,5445,66,2,"[5445, 5446]"
NCP-4.zip,2,NCP,156,1456,138,2,"[1456, 1457]"
CP-12.zip,1,CP,1464,4007,63,2,"[4007, 4008]"
NCP-4.zip,2,NCP,162,1469,62,2,"[1468, 1469]"
CP-13.zip,1,CP,1493,4081,53,3,"[4080, 4081, 4082]"
CP-16.zip,1,CP,1602,4290,17,1,[4290]
NCP-6.zip,2,NCP,216,1576,139,2,"[1576, 1577]"
CP-25.zip,1,CP,723,3085,104,1,[3085]
NCP-15.zip,2,NCP,411,1971,149,2,"[1971, 1972]"
NCP-15.zip,2,NCP,425,2003,139,2,"[2003, 2004]"
CP-24.zip,1,CP,688,3050,127,1,[3050]
Normal-13.zip,0,Normal,2033,488,77,1,[488]
NCP-23.zip,2,NCP,96,1329,61,2,"[1328, 1329]"
Normal-5.zip,0,Normal,803,238,343,1,[238]
CP-16.zip,1,CP,1595,4283,23,1,[4283]
NCP-27.zip,2,NCP,238,1621,134,2,"[1621, 1622]"
NCP-19.zip,2,NCP,529,2214,141,3,"[2214, 2215, 2217]"
CP-25.zip,1,CP,710,3072,78,1,[3072]
Normal-19.zip,0,Normal,2243,698,86,1,[698]
CP-11.zip,1,CP,1440,3949,51,3,"[3948, 3949, 3950]"
CP-7.zip,1,CP,1260,3478,235,1,[3478]
Normal-1.zip,0,Normal,1672,797,76,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
CP-26.zip,1,CP,3719,5649,52,3,"[5649, 5650, 5651]"
NCP-23.zip,2,NCP,969,2512,68,1,[2512]
NCP-5.zip,2,NCP,186,1516,113,2,"[1516, 1517]"
CP-13.zip,1,CP,1507,4114,62,2,"[4113, 4114]"
CP-19.zip,1,CP,2443,2916,310,3,"[2915, 2916, 2917]"
CP-13.zip,1,CP,1503,4105,64,3,"[4104, 4105, 4106]"
Normal-10.zip,0,Normal,1934,389,85,1,[389]
CP-20.zip,1,CP,2760,3291,281,1,[3291]
Normal-19.zip,0,Normal,2242,697,86,1,[697]
NCP-22.zip,2,NCP,864,2388,214,2,"[2388, 2389]"
NCP-14.zip,2,NCP,377,1906,147,2,"[1906, 1907]"
CP-29.zip,1,CP,3818,5762,29,1,[5762]
CP-23.zip,1,CP,676,3038,291,1,[3038]
NCP-14.zip,2,NCP,389,1928,150,2,"[1928, 1929]"
CP-27.zip,1,CP,3761,5705,16,1,[5705]
NCP-27.zip,2,NCP,1016,2581,179,3,"[2580, 2581, 2582]"
Normal-22.zip,0,Normal,2321,776,90,1,[776]
CP-7.zip,1,CP,1304,3639,212,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-16.zip,2,NCP,438,2030,62,2,"[2029, 2030]"
NCP-2.zip,2,NCP,107,1352,61,2,"[1351, 1352]"
NCP-11.zip,2,NCP,295,1736,97,2,"[1735, 1736]"
CP-2.zip,1,CP,1122,3340,229,1,[3340]
Normal-25.zip,0,Normal,3849,5361,205,1,[5361]
CP-4.zip,1,CP,1189,3407,284,1,[3407]
NCP-4.zip,2,NCP,152,1449,61,2,"[1448, 1449]"
Normal-13.zip,0,Normal,2044,499,103,1,[499]
Normal-2.zip,0,Normal,1756,1103,65,4,"[1101, 1102, 1103, 1104]"
CP-9.zip,1,CP,1379,3812,52,2,"[3812, 3813]"
CP-20.zip,1,CP,2454,2936,116,2,"[2935, 2936]"
NCP-3.zip,2,NCP,1294,2735,62,1,[2735]
CP-6.zip,1,CP,1230,3448,37,1,[3448]
Normal-5.zip,0,Normal,815,250,120,1,[250]
CP-13.zip,1,CP,1488,4066,66,3,"[4064, 4065, 4066]"
NCP-7.zip,2,NCP,241,1627,131,2,"[1627, 1628]"
NCP-6.zip,2,NCP,220,1584,160,2,"[1584, 1585]"
NCP-30.zip,2,NCP,982,2527,242,1,[2527]
Normal-2.zip,0,Normal,1735,1030,76,2,"[1030, 1031]"
CP-18.zip,1,CP,1781,3573,65,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-26.zip,1,CP,3642,5601,29,1,[5601]
NCP-5.zip,2,NCP,186,1517,48,2,"[1516, 1517]"
Normal-7.zip,0,Normal,1846,301,105,1,[301]
CP-6.zip,1,CP,1252,3470,180,1,[3470]
NCP-8.zip,2,NCP,254,1655,58,2,"[1654, 1655]"
NCP-17.zip,2,NCP,460,2075,45,2,"[2074, 2075]"
NCP-3.zip,2,NCP,138,1421,52,2,"[1420, 1421]"
CP-29.zip,1,CP,3798,5742,21,1,[5742]
NCP-14.zip,2,NCP,389,1929,63,2,"[1928, 1929]"
NCP-22.zip,2,NCP,858,2379,52,1,[2379]
NCP-10.zip,2,NCP,2721,2677,37,1,[2677]
NCP-29.zip,2,NCP,882,2418,257,2,"[2417, 2418]"
NCP-18.zip,2,NCP,495,2146,156,2,"[2146, 2147]"
Normal-18.zip,0,Normal,2210,665,88,1,[665]
CP-7.zip,1,CP,1304,3632,18,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-18.zip,2,NCP,512,2180,149,2,"[2180, 2181]"
Normal-1.zip,0,Normal,1672,803,75,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
CP-21.zip,1,CP,2774,3305,31,1,[3305]
CP-9.zip,1,CP,1372,3797,193,3,"[3797, 3798, 3799]"
CP-22.zip,1,CP,615,2977,104,1,[2977]
CP-12.zip,1,CP,1469,4019,47,2,"[4018, 4019]"
CP-18.zip,1,CP,1774,3522,65,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-17.zip,2,NCP,472,2100,63,2,"[2099, 2100]"
Normal-14.zip,0,Normal,2069,524,81,1,[524]
CP-18.zip,1,CP,1774,3529,58,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-27.zip,2,NCP,1031,2601,216,2,"[2601, 2602]"
NCP-22.zip,2,NCP,857,2378,53,1,[2378]
Normal-3.zip,0,Normal,1767,1156,139,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-24.zip,0,Normal,2641,151,41,1,[151]
NCP-9.zip,2,NCP,2696,2662,44,1,[2662]
CP-17.zip,1,CP,1620,4308,24,1,[4308]
NCP-4.zip,2,NCP,149,1443,66,2,"[1442, 1443]"
CP-13.zip,1,CP,1488,4064,158,3,"[4064, 4065, 4066]"
Normal-22.zip,0,Normal,2315,770,82,1,[770]
NCP-12.zip,2,NCP,316,1780,58,2,"[1779, 1780]"
CP-9.zip,1,CP,1360,3767,67,3,"[3767, 3768, 3769]"
NCP-18.zip,2,NCP,512,2181,62,2,"[2180, 2181]"
NCP-20.zip,2,NCP,547,2252,66,2,"[2251, 2252]"
Normal-10.zip,0,Normal,1942,397,81,1,[397]
NCP-5.zip,2,NCP,198,1541,60,2,"[1540, 1541]"
NCP-6.zip,2,NCP,199,1542,138,2,"[1542, 1543]"
CP-17.zip,1,CP,1631,4319,23,1,[4319]
NCP-13.zip,2,NCP,353,1858,69,2,"[1857, 1858]"
NCP-17.zip,2,NCP,463,2081,60,2,"[2080, 2081]"
NCP-1.zip,2,NCP,1019,2585,363,1,[2585]
NCP-22.zip,2,NCP,845,2362,48,4,"[2360, 2361, 2362, 2363]"
NCP-15.zip,2,NCP,425,2004,58,2,"[2003, 2004]"
NCP-28.zip,2,NCP,873,2405,52,2,"[2405, 2406]"
NCP-4.zip,2,NCP,152,1448,145,2,"[1448, 1449]"
NCP-19.zip,2,NCP,543,2244,54,2,"[2243, 2244]"
Normal-14.zip,0,Normal,2062,517,84,1,[517]
NCP-17.zip,2,NCP,465,2086,61,3,"[2084, 2085, 2086]"
Normal-25.zip,0,Normal,3717,5347,25,1,[5347]
CP-4.zip,1,CP,1178,3396,133,1,[3396]
CP-22.zip,1,CP,620,2982,64,1,[2982]
Normal-1.zip,0,Normal,1677,825,65,4,"[823, 824, 825, 826]"
Normal-9.zip,0,Normal,1908,363,81,1,[363]
CP-30.zip,1,CP,3940,5646,33,1,[5646]
NCP-30.zip,2,NCP,942,2485,45,1,[2485]
CP-18.zip,1,CP,1781,3578,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-9.zip,1,CP,1358,3762,126,3,"[3761, 3762, 3763]"
CP-27.zip,1,CP,3764,5708,23,1,[5708]
NCP-8.zip,2,NCP,2673,2692,48,1,[2692]
NCP-19.zip,2,NCP,534,2226,49,2,"[2225, 2226]"
CP-11.zip,1,CP,1440,3950,51,3,"[3948, 3949, 3950]"
NCP-17.zip,2,NCP,465,2084,145,3,"[2084, 2085, 2086]"
NCP-19.zip,2,NCP,522,2200,137,2,"[2200, 2201]"
CP-12.zip,1,CP,1468,4015,54,3,"[4015, 4016, 4017]"
CP-13.zip,1,CP,1498,4095,60,2,"[4095, 4096]"
CP-18.zip,1,CP,1778,3548,65,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
Normal-1.zip,0,Normal,1670,791,66,6,"[787, 788, 789, 790, 791, 792]"
CP-16.zip,1,CP,1611,4299,19,1,[4299]
Normal-14.zip,0,Normal,2080,535,100,1,[535]
NCP-25.zip,2,NCP,3968,5477,44,1,[5477]
Normal-3.zip,0,Normal,755,190,107,1,[190]
Normal-16.zip,0,Normal,2151,606,93,1,[606]
NCP-4.zip,2,NCP,168,1481,58,2,"[1480, 1481]"
Normal-21.zip,0,Normal,2289,744,77,1,[744]
NCP-6.zip,2,NCP,224,1593,57,2,"[1592, 1593]"
CP-13.zip,1,CP,1502,4103,73,2,"[4102, 4103]"
NCP-22.zip,2,NCP,865,2390,34,2,"[2390, 2391]"
CP-28.zip,1,CP,3787,5731,27,1,[5731]
NCP-5.zip,2,NCP,170,1484,141,2,"[1484, 1485]"
Normal-20.zip,0,Normal,2271,726,81,1,[726]
NCP-7.zip,2,NCP,2485,2644,46,1,[2644]
NCP-17.zip,2,NCP,475,2106,63,2,"[2105, 2106]"
NCP-21.zip,2,NCP,74,1281,127,2,"[1281, 1282]"
CP-13.zip,1,CP,1507,4113,62,2,"[4113, 4114]"
CP-18.zip,1,CP,1781,3570,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-12.zip,1,CP,1462,4002,193,3,"[4002, 4003, 4004]"
Normal-1.zip,0,Normal,1672,796,76,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
NCP-12.zip,2,NCP,337,1822,139,2,"[1822, 1823]"
CP-9.zip,1,CP,1353,3749,60,3,"[3748, 3749, 3750]"
Normal-15.zip,0,Normal,2087,542,83,1,[542]
NCP-12.zip,2,NCP,331,1811,66,2,"[1810, 1811]"
CP-22.zip,1,CP,617,2979,110,1,[2979]
CP-18.zip,1,CP,1771,3518,51,4,"[3518, 3519, 3520, 3521]"
CP-26.zip,1,CP,3730,5668,212,2,"[5668, 5669]"
Normal-24.zip,0,Normal,2660,170,38,1,[170]
Normal-11.zip,0,Normal,1967,422,97,1,[422]
NCP-4.zip,2,NCP,149,1442,159,2,"[1442, 1443]"
CP-30.zip,1,CP,3834,5778,26,1,[5778]
NCP-19.zip,2,NCP,540,2237,127,2,"[2237, 2238]"
Normal-26.zip,0,Normal,3862,5374,188,1,[5374]
Normal-7.zip,0,Normal,1842,297,77,1,[297]
Normal-26.zip,0,Normal,3868,5380,30,1,[5380]
Normal-12.zip,0,Normal,2003,458,85,1,[458]
NCP-5.zip,2,NCP,17,1167,58,2,"[1166, 1167]"
NCP-2.zip,2,NCP,117,1375,130,2,"[1375, 1376]"
CP-13.zip,1,CP,1511,4124,57,2,"[4123, 4124]"
CP-18.zip,1,CP,1778,3546,66,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
NCP-19.zip,2,NCP,529,2217,58,3,"[2214, 2215, 2217]"
CP-14.zip,1,CP,1520,4143,57,3,"[4143, 4144, 4145]"
Normal-16.zip,0,Normal,2131,586,95,1,[586]
NCP-28.zip,2,NCP,873,2406,228,2,"[2405, 2406]"
NCP-3.zip,2,NCP,137,1418,126,2,"[1418, 1419]"
NCP-10.zip,2,NCP,279,1705,58,2,"[1704, 1705]"
CP-28.zip,1,CP,3796,5740,28,1,[5740]
NCP-19.zip,2,NCP,54,1241,147,2,"[1241, 1242]"
CP-28.zip,1,CP,3768,5712,19,1,[5712]
NCP-2.zip,2,NCP,120,1382,58,2,"[1381, 1382]"
CP-16.zip,1,CP,1603,4291,22,1,[4291]
CP-2.zip,1,CP,1118,3336,173,1,[3336]
NCP-30.zip,2,NCP,939,2482,49,1,[2482]
Normal-8.zip,0,Normal,1874,329,90,1,[329]
Normal-3.zip,0,Normal,746,181,110,1,[181]
CP-21.zip,1,CP,608,2970,86,1,[2970]
Normal-22.zip,0,Normal,2601,111,37,1,[111]
NCP-4.zip,2,NCP,16,1165,48,2,"[1164, 1165]"
NCP-1.zip,2,NCP,1036,2607,441,1,[2607]
NCP-19.zip,2,NCP,528,2213,59,2,"[2212, 2213]"
NCP-6.zip,2,NCP,217,1579,58,2,"[1578, 1579]"
CP-10.zip,1,CP,1416,3897,58,2,"[3897, 3898]"
CP-30.zip,1,CP,4043,5592,41,1,[5592]
CP-30.zip,1,CP,3933,5636,69,2,"[5636, 5637]"
CP-20.zip,1,CP,2667,3246,24,3,"[3246, 3247, 3248]"
Normal-1.zip,0,Normal,1677,824,64,4,"[823, 824, 825, 826]"
CP-18.zip,1,CP,1779,3552,59,2,"[3551, 3552]"
Normal-25.zip,0,Normal,3855,5367,209,1,[5367]
CP-24.zip,1,CP,691,3053,72,1,[3053]
CP-6.zip,1,CP,1239,3457,134,1,[3457]
CP-21.zip,1,CP,602,2964,84,1,[2964]
NCP-1.zip,2,NCP,105,1348,61,2,"[1347, 1348]"
CP-3.zip,1,CP,1151,3369,158,1,[3369]
NCP-15.zip,2,NCP,413,1975,110,4,"[1975, 1976, 1977, 1979]"
CP-8.zip,1,CP,1327,3691,64,3,"[3690, 3691, 3692]"
CP-6.zip,1,CP,1237,3455,178,1,[3455]
Normal-11.zip,0,Normal,1959,414,97,1,[414]
Normal-25.zip,0,Normal,3713,5343,27,1,[5343]
CP-21.zip,1,CP,597,2959,305,1,[2959]
CP-9.zip,1,CP,1356,3757,60,2,"[3756, 3757]"
NCP-7.zip,2,NCP,2483,2686,40,1,[2686]
NCP-27.zip,2,NCP,1048,2620,58,2,"[2620, 2621]"
Normal-3.zip,0,Normal,1767,1159,28,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-5.zip,1,CP,1219,3437,179,1,[3437]
NCP-4.zip,2,NCP,145,1434,139,2,"[1434, 1435]"
CP-15.zip,1,CP,1575,4263,20,1,[4263]
NCP-18.zip,2,NCP,516,2188,135,2,"[2188, 2189]"
CP-9.zip,1,CP,1360,3768,67,3,"[3767, 3768, 3769]"
CP-13.zip,1,CP,1488,4065,66,3,"[4064, 4065, 4066]"
CP-1.zip,1,CP,1077,3122,74,2,"[3121, 3122]"
Normal-14.zip,0,Normal,2084,539,92,1,[539]
Normal-3.zip,0,Normal,1767,1163,76,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-2.zip,0,Normal,1746,1063,68,2,"[1063, 1064]"
NCP-12.zip,2,NCP,332,1812,167,2,"[1812, 1813]"
Normal-12.zip,0,Normal,1990,445,97,1,[445]
CP-7.zip,1,CP,1301,3599,294,4,"[3598, 3599, 3600, 3601]"
CP-1.zip,1,CP,1070,3112,104,1,[3112]
CP-13.zip,1,CP,1493,4082,53,3,"[4080, 4081, 4082]"
NCP-19.zip,2,NCP,520,2196,129,2,"[2196, 2197]"
NCP-3.zip,2,NCP,137,1419,53,2,"[1418, 1419]"
NCP-30.zip,2,NCP,937,2479,22,1,[2479]
NCP-22.zip,2,NCP,865,2391,260,2,"[2390, 2391]"
NCP-7.zip,2,NCP,230,1605,58,2,"[1604, 1605]"
CP-7.zip,1,CP,1302,3603,207,4,"[3602, 3603, 3604, 3605]"
CP-16.zip,1,CP,1588,4276,20,1,[4276]
Normal-18.zip,0,Normal,2195,650,79,1,[650]
Normal-17.zip,0,Normal,2173,628,96,1,[628]
NCP-22.zip,2,NCP,878,2413,117,2,"[2412, 2413]"
Normal-18.zip,0,Normal,2188,643,88,1,[643]
CP-18.zip,1,CP,1774,3526,66,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
Normal-6.zip,0,Normal,1815,270,91,1,[270]
CP-5.zip,1,CP,1208,3426,321,1,[3426]
NCP-6.zip,2,NCP,22,1176,68,2,"[1175, 1176]"
NCP-15.zip,2,NCP,413,1979,54,4,"[1975, 1976, 1977, 1979]"
CP-18.zip,1,CP,1771,3521,51,4,"[3518, 3519, 3520, 3521]"
CP-4.zip,1,CP,1172,3390,195,1,[3390]
CP-26.zip,1,CP,3721,5655,206,2,"[5654, 5655]"
CP-27.zip,1,CP,3754,5698,21,1,[5698]
CP-19.zip,1,CP,1784,3592,69,4,"[3590, 3591, 3592, 3593]"
CP-9.zip,1,CP,1372,3799,49,3,"[3797, 3798, 3799]"
NCP-2.zip,2,NCP,113,1367,137,2,"[1367, 1368]"
Normal-22.zip,0,Normal,2318,773,105,1,[773]
CP-18.zip,1,CP,1770,3517,57,1,[3517]
Normal-21.zip,0,Normal,2293,748,88,1,[748]
Normal-22.zip,0,Normal,2595,105,43,1,[105]
NCP-18.zip,2,NCP,50,1234,59,2,"[1233, 1234]"
Normal-2.zip,0,Normal,1757,1106,71,4,"[1105, 1106, 1107, 1108]"
CP-8.zip,1,CP,1327,3692,64,3,"[3690, 3691, 3692]"
CP-18.zip,1,CP,1781,3577,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-30.zip,1,CP,3932,5635,67,2,"[5634, 5635]"
NCP-20.zip,2,NCP,566,2291,67,2,"[2290, 2291]"
NCP-10.zip,2,NCP,2715,2708,51,1,[2708]
CP-23.zip,1,CP,660,3022,82,1,[3022]
Normal-9.zip,0,Normal,1916,371,106,1,[371]
CP-20.zip,1,CP,2757,3288,211,1,[3288]
Normal-7.zip,0,Normal,1845,300,99,1,[300]
Normal-13.zip,0,Normal,2050,505,74,1,[505]
CP-1.zip,1,CP,1092,3310,216,1,[3310]
Normal-2.zip,0,Normal,1763,1135,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
Normal-9.zip,0,Normal,1898,353,72,1,[353]
NCP-21.zip,2,NCP,576,2310,124,1,[2310]
Normal-1.zip,0,Normal,1701,956,70,2,"[955, 956]"
Normal-17.zip,0,Normal,2178,633,85,1,[633]
CP-8.zip,1,CP,1322,3681,56,2,"[3680, 3681]"
Normal-6.zip,0,Normal,1802,257,107,1,[257]
NCP-20.zip,2,NCP,547,2251,159,2,"[2251, 2252]"
NCP-3.zip,2,NCP,1285,2726,66,1,[2726]
Normal-7.zip,0,Normal,1828,283,96,1,[283]
NCP-20.zip,2,NCP,546,2250,57,2,"[2249, 2250]"
Normal-2.zip,0,Normal,1750,1078,69,3,"[1074, 1077, 1078]"
Normal-9.zip,0,Normal,1892,347,77,1,[347]
NCP-19.zip,2,NCP,534,2225,115,2,"[2225, 2226]"
CP-29.zip,1,CP,3806,5750,20,1,[5750]
NCP-13.zip,2,NCP,355,1861,125,2,"[1861, 1862]"
Normal-6.zip,0,Normal,1813,268,80,1,[268]
Normal-2.zip,0,Normal,1756,1101,66,4,"[1101, 1102, 1103, 1104]"
CP-20.zip,1,CP,2759,3290,36,1,[3290]
Normal-17.zip,0,Normal,2183,638,110,1,[638]
NCP-6.zip,2,NCP,228,1601,67,2,"[1600, 1601]"
NCP-5.zip,2,NCP,197,1539,53,2,"[1538, 1539]"
CP-28.zip,1,CP,3766,5710,24,1,[5710]
CP-10.zip,1,CP,1399,3858,45,2,"[3858, 3859]"
Normal-14.zip,0,Normal,2074,529,82,1,[529]
Normal-2.zip,0,Normal,1733,1026,71,2,"[1026, 1027]"
NCP-11.zip,2,NCP,300,1747,58,2,"[1746, 1747]"
CP-17.zip,1,CP,1650,4338,31,1,[4338]
CP-20.zip,1,CP,2455,2937,116,1,[2937]
Normal-20.zip,0,Normal,2279,734,78,1,[734]
CP-8.zip,1,CP,1329,3697,45,3,"[3695, 3696, 3697]"
NCP-16.zip,2,NCP,444,2042,146,2,"[2042, 2043]"
Normal-12.zip,0,Normal,1999,454,78,1,[454]
CP-17.zip,1,CP,1624,4312,20,1,[4312]
NCP-10.zip,2,NCP,2720,2676,45,1,[2676]
CP-2.zip,1,CP,1107,3325,183,1,[3325]
CP-18.zip,1,CP,1777,3542,62,5,"[3540, 3541, 3542, 3543, 3544]"
NCP-15.zip,2,NCP,403,1955,110,2,"[1955, 1956]"
NCP-3.zip,2,NCP,134,1413,54,2,"[1412, 1413]"
CP-13.zip,1,CP,1500,4099,97,1,[4099]
CP-25.zip,1,CP,712,3074,118,1,[3074]
CP-23.zip,1,CP,648,3010,104,1,[3010]
CP-19.zip,1,CP,2443,2917,98,3,"[2915, 2916, 2917]"
NCP-16.zip,2,NCP,441,2036,115,2,"[2036, 2037]"
Normal-23.zip,0,Normal,2628,138,34,1,[138]
CP-19.zip,1,CP,1795,3596,41,2,"[3596, 3597]"
NCP-27.zip,2,NCP,1016,2580,20,3,"[2580, 2581, 2582]"
Normal-24.zip,0,Normal,2659,169,39,1,[169]
CP-17.zip,1,CP,1619,4307,29,1,[4307]
Normal-26.zip,0,Normal,3861,5373,211,1,[5373]
NCP-19.zip,2,NCP,519,2195,53,2,"[2194, 2195]"
NCP-6.zip,2,NCP,213,1571,66,2,"[1570, 1571]"
Normal-25.zip,0,Normal,3860,5372,212,1,[5372]
NCP-5.zip,2,NCP,192,1529,57,2,"[1528, 1529]"
CP-3.zip,1,CP,1153,3371,179,1,[3371]
CP-3.zip,1,CP,1159,3377,287,1,[3377]
NCP-30.zip,2,NCP,931,2473,21,1,[2473]
CP-6.zip,1,CP,1255,3473,107,1,[3473]
NCP-4.zip,2,NCP,169,1482,133,2,"[1482, 1483]"
NCP-12.zip,2,NCP,340,1828,128,2,"[1828, 1829]"
CP-26.zip,1,CP,3729,5665,36,3,"[5665, 5666, 5667]"
Normal-11.zip,0,Normal,1976,431,74,1,[431]
CP-9.zip,1,CP,1372,3798,49,3,"[3797, 3798, 3799]"
NCP-4.zip,2,NCP,161,1467,57,2,"[1466, 1467]"
CP-22.zip,1,CP,613,2975,78,1,[2975]
NCP-17.zip,2,NCP,460,2074,106,2,"[2074, 2075]"
NCP-21.zip,2,NCP,577,2311,145,2,"[2311, 2312]"
CP-25.zip,1,CP,741,3103,523,1,[3103]
CP-14.zip,1,CP,1532,4172,50,2,"[4171, 4172]"
NCP-11.zip,2,NCP,299,1744,139,2,"[1744, 1745]"
NCP-14.zip,2,NCP,372,1896,45,2,"[1895, 1896]"
CP-9.zip,1,CP,1356,3756,60,2,"[3756, 3757]"
Normal-11.zip,0,Normal,1968,423,96,1,[423]
CP-14.zip,1,CP,1525,4156,60,2,"[4155, 4156]"
CP-22.zip,1,CP,618,2980,166,1,[2980]
CP-17.zip,1,CP,1639,4327,26,1,[4327]
Normal-19.zip,0,Normal,2245,700,83,1,[700]
CP-13.zip,1,CP,1518,4139,67,3,"[4138, 4139, 4140]"
NCP-11.zip,2,NCP,29,1190,132,2,"[1190, 1191]"
CP-16.zip,1,CP,1615,4303,29,1,[4303]
CP-29.zip,1,CP,3823,5767,26,1,[5767]
NCP-20.zip,2,NCP,574,2306,139,2,"[2306, 2307]"
NCP-12.zip,2,NCP,340,1829,54,2,"[1828, 1829]"
Normal-21.zip,0,Normal,2285,740,68,1,[740]
NCP-16.zip,2,NCP,455,2065,56,2,"[2064, 2065]"
NCP-16.zip,2,NCP,436,2026,61,2,"[2025, 2026]"
NCP-14.zip,2,NCP,383,1918,139,2,"[1918, 1919]"
NCP-30.zip,2,NCP,988,2538,287,2,"[2538, 2539]"
NCP-7.zip,2,NCP,247,1641,66,2,"[1640, 1641]"
CP-15.zip,1,CP,1571,4259,16,1,[4259]
Normal-16.zip,0,Normal,2137,592,94,1,[592]
CP-7.zip,1,CP,1304,3637,218,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
CP-6.zip,1,CP,1235,3453,155,1,[3453]
Normal-4.zip,0,Normal,776,211,353,1,[211]
Normal-18.zip,0,Normal,2189,644,82,1,[644]
Normal-6.zip,0,Normal,1799,254,97,1,[254]
Normal-15.zip,0,Normal,2113,568,93,1,[568]
CP-3.zip,1,CP,1131,3349,157,1,[3349]
Normal-6.zip,0,Normal,1819,274,91,1,[274]
CP-18.zip,1,CP,1781,3571,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-16.zip,2,NCP,455,2064,132,2,"[2064, 2065]"
Normal-8.zip,0,Normal,1888,343,99,1,[343]
NCP-20.zip,2,NCP,571,2301,68,2,"[2300, 2301]"
NCP-7.zip,2,NCP,247,1640,159,2,"[1640, 1641]"
CP-3.zip,1,CP,1137,3355,147,1,[3355]
CP-11.zip,1,CP,1423,3913,53,3,"[3911, 3912, 3913]"
NCP-1.zip,2,NCP,105,1347,145,2,"[1347, 1348]"
NCP-14.zip,2,NCP,377,1907,62,2,"[1906, 1907]"
CP-14.zip,1,CP,1535,4178,53,2,"[4178, 4179]"
Normal-9.zip,0,Normal,1900,355,93,1,[355]
CP-2.zip,1,CP,1125,3343,115,1,[3343]
CP-6.zip,1,CP,1243,3461,176,1,[3461]
NCP-6.zip,2,NCP,203,1550,140,2,"[1550, 1551]"
Normal-1.zip,0,Normal,1670,789,63,6,"[787, 788, 789, 790, 791, 792]"
NCP-29.zip,2,NCP,909,2451,401,1,[2451]
NCP-25.zip,2,NCP,3949,5463,35,1,[5463]
Normal-26.zip,0,Normal,3879,5391,28,1,[5391]
NCP-11.zip,2,NCP,29,1191,56,2,"[1190, 1191]"
CP-3.zip,1,CP,1129,3347,158,1,[3347]
NCP-15.zip,2,NCP,406,1961,146,2,"[1961, 1962]"
NCP-4.zip,2,NCP,151,1446,129,2,"[1446, 1447]"
CP-19.zip,1,CP,1789,3206,64,4,"[3204, 3205, 3206, 3207]"
NCP-6.zip,2,NCP,227,1599,61,2,"[1598, 1599]"
CP-12.zip,1,CP,1462,4003,51,3,"[4002, 4003, 4004]"
CP-3.zip,1,CP,1147,3365,164,1,[3365]
Normal-23.zip,0,Normal,2629,139,36,1,[139]
Normal-1.zip,0,Normal,1700,953,64,2,"[953, 954]"
CP-15.zip,1,CP,1561,4241,49,2,"[4241, 4242]"
NCP-16.zip,2,NCP,437,2028,60,2,"[2027, 2028]"
CP-18.zip,1,CP,1654,4342,23,1,[4342]
Normal-20.zip,0,Normal,2273,728,75,1,[728]
Normal-14.zip,0,Normal,2067,522,94,1,[522]
NCP-29.zip,2,NCP,911,2453,48,1,[2453]
Normal-2.zip,0,Normal,1756,1104,65,4,"[1101, 1102, 1103, 1104]"
Normal-11.zip,0,Normal,1989,444,105,1,[444]
NCP-15.zip,2,NCP,403,1956,47,2,"[1955, 1956]"
NCP-13.zip,2,NCP,348,1848,48,2,"[1847, 1848]"
NCP-28.zip,2,NCP,844,2359,594,1,[2359]
NCP-18.zip,2,NCP,51,1235,141,2,"[1235, 1236]"
CP-28.zip,1,CP,3789,5733,26,1,[5733]
Normal-2.zip,0,Normal,1763,1138,65,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-10.zip,2,NCP,278,1702,137,2,"[1702, 1703]"
CP-28.zip,1,CP,3770,5714,23,1,[5714]
NCP-23.zip,2,NCP,93,1323,66,2,"[1322, 1323]"
NCP-14.zip,2,NCP,390,1930,126,2,"[1930, 1931]"
NCP-8.zip,2,NCP,26,1184,82,2,"[1184, 1185]"
Normal-2.zip,0,Normal,1763,1142,71,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-6.zip,2,NCP,201,1547,62,2,"[1546, 1547]"
Normal-23.zip,0,Normal,2626,136,33,1,[136]
NCP-25.zip,2,NCP,3707,5534,50,1,[5534]
Normal-21.zip,0,Normal,2305,760,104,1,[760]
Normal-6.zip,0,Normal,1818,273,87,1,[273]
CP-22.zip,1,CP,641,3003,136,1,[3003]
Normal-7.zip,0,Normal,1836,291,104,1,[291]
Normal-27.zip,0,Normal,3894,5417,287,1,[5417]
NCP-30.zip,2,NCP,981,2526,23,2,"[2525, 2526]"
NCP-1.zip,2,NCP,102,1341,132,2,"[1341, 1342]"
NCP-14.zip,2,NCP,387,1924,128,2,"[1924, 1925]"
NCP-2.zip,2,NCP,117,1376,55,2,"[1375, 1376]"
NCP-5.zip,2,NCP,190,1524,152,2,"[1524, 1525]"
CP-26.zip,1,CP,3639,5598,241,1,[5598]
Normal-1.zip,0,Normal,1670,787,58,6,"[787, 788, 789, 790, 791, 792]"
Normal-2.zip,0,Normal,1757,1108,68,4,"[1105, 1106, 1107, 1108]"
Normal-13.zip,0,Normal,2043,498,84,1,[498]
CP-2.zip,1,CP,1099,3317,198,1,[3317]
CP-7.zip,1,CP,1318,3673,56,1,[3673]
Normal-9.zip,0,Normal,1899,354,88,1,[354]
CP-12.zip,1,CP,1467,4014,60,2,"[4013, 4014]"
NCP-5.zip,2,NCP,197,1538,124,2,"[1538, 1539]"
CP-26.zip,1,CP,3730,5669,202,2,"[5668, 5669]"
NCP-22.zip,2,NCP,845,2363,428,4,"[2360, 2361, 2362, 2363]"
NCP-2.zip,2,NCP,127,1399,139,2,"[1399, 1400]"
Normal-26.zip,0,Normal,3893,5416,63,1,[5416]
NCP-8.zip,2,NCP,2669,2689,37,1,[2689]
CP-18.zip,1,CP,1778,3549,64,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
CP-25.zip,1,CP,722,3084,70,1,[3084]
NCP-4.zip,2,NCP,157,1458,114,2,"[1458, 1459]"
NCP-23.zip,2,NCP,92,1320,87,2,"[1320, 1321]"
CP-11.zip,1,CP,1424,3914,60,2,"[3914, 3915]"
NCP-19.zip,2,NCP,529,2215,33,3,"[2214, 2215, 2217]"
CP-24.zip,1,CP,704,3066,417,1,[3066]
NCP-6.zip,2,NCP,201,1546,149,2,"[1546, 1547]"
Normal-17.zip,0,Normal,2177,632,88,1,[632]
NCP-14.zip,2,NCP,383,1919,58,2,"[1918, 1919]"
Normal-2.zip,0,Normal,1737,1040,80,4,"[1037, 1038, 1039, 1040]"
Normal-26.zip,0,Normal,3881,5393,22,1,[5393]
Normal-3.zip,0,Normal,1767,1161,71,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-14.zip,1,CP,1525,4155,60,2,"[4155, 4156]"
NCP-12.zip,2,NCP,341,1832,55,3,"[1830, 1832, 1834]"
Normal-13.zip,0,Normal,2034,489,91,1,[489]
NCP-26.zip,2,NCP,3978,5485,49,1,[5485]
NCP-22.zip,2,NCP,864,2389,221,2,"[2388, 2389]"
NCP-9.zip,2,NCP,2682,2652,47,1,[2652]
NCP-7.zip,2,NCP,2461,2642,42,1,[2642]
Normal-21.zip,0,Normal,2303,758,110,1,[758]
NCP-8.zip,2,NCP,2670,2690,41,1,[2690]
CP-7.zip,1,CP,1315,3666,59,2,"[3665, 3666]"
CP-19.zip,1,CP,2449,2927,118,1,[2927]
CP-19.zip,1,CP,1789,3204,59,4,"[3204, 3205, 3206, 3207]"
Normal-6.zip,0,Normal,1803,258,100,1,[258]
Normal-1.zip,0,Normal,1675,812,73,1,[812]
NCP-25.zip,2,NCP,3705,5532,63,1,[5532]
Normal-1.zip,0,Normal,1727,1010,63,4,"[1009, 1010, 1011, 1012]"
NCP-3.zip,2,NCP,1283,2724,70,1,[2724]
CP-18.zip,1,CP,1774,3524,66,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
CP-18.zip,1,CP,1774,3525,66,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
CP-30.zip,1,CP,3919,5543,66,4,"[5543, 5544, 5545, 5546]"
NCP-22.zip,2,NCP,85,1304,58,2,"[1303, 1304]"
Normal-18.zip,0,Normal,2192,647,79,1,[647]
CP-30.zip,1,CP,3935,5641,70,1,[5641]
NCP-6.zip,2,NCP,227,1598,146,2,"[1598, 1599]"
Normal-20.zip,0,Normal,2250,705,76,1,[705]
CP-12.zip,1,CP,1464,4008,63,2,"[4007, 4008]"
CP-29.zip,1,CP,3807,5751,20,1,[5751]
Normal-12.zip,0,Normal,1993,448,97,1,[448]
NCP-19.zip,2,NCP,528,2212,140,2,"[2212, 2213]"
NCP-26.zip,2,NCP,3987,5511,60,1,[5511]
NCP-25.zip,2,NCP,3969,5478,50,1,[5478]
CP-17.zip,1,CP,1638,4326,25,1,[4326]
CP-17.zip,1,CP,1643,4331,24,1,[4331]
CP-17.zip,1,CP,1629,4317,23,1,[4317]
CP-11.zip,1,CP,1423,3912,53,3,"[3911, 3912, 3913]"
Normal-2.zip,0,Normal,1743,1056,73,2,"[1056, 1057]"
Normal-9.zip,0,Normal,1915,370,91,1,[370]
Normal-22.zip,0,Normal,2590,100,41,1,[100]
NCP-11.zip,2,NCP,297,1741,60,2,"[1739, 1741]"
CP-30.zip,1,CP,3919,5545,70,4,"[5543, 5544, 5545, 5546]"
NCP-25.zip,2,NCP,3971,5480,50,1,[5480]
CP-11.zip,1,CP,1454,3983,53,3,"[3982, 3983, 3984]"
Normal-21.zip,0,Normal,2282,737,69,1,[737]
NCP-12.zip,2,NCP,318,1783,150,2,"[1783, 1784]"
NCP-10.zip,2,NCP,279,1704,139,2,"[1704, 1705]"
CP-2.zip,1,CP,1108,3326,135,1,[3326]
Normal-2.zip,0,Normal,1733,1027,71,2,"[1026, 1027]"
CP-32.zip,1,CP,1781,3567,67,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-10.zip,2,NCP,2725,2681,51,1,[2681]
CP-11.zip,1,CP,1425,3917,49,3,"[3916, 3917, 3918]"
Normal-1.zip,0,Normal,1701,955,70,2,"[955, 956]"
CP-19.zip,1,CP,1787,3195,59,1,[3195]
================================================
FILE: Finetune/CC-CCII/csv/CC_CCII_fold1_valid.csv
================================================
zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids
Normal-2.zip,0,Normal,1740,1050,21,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-10.zip,1,CP,1387,3830,51,2,"[3829, 3830]"
NCP-10.zip,2,NCP,2719,2675,44,1,[2675]
CP-1.zip,1,CP,1065,3104,58,1,[3104]
CP-10.zip,1,CP,1392,3843,62,2,"[3843, 3844]"
CP-13.zip,1,CP,1508,4117,57,3,"[4115, 4116, 4117]"
NCP-22.zip,2,NCP,863,2387,282,2,"[2386, 2387]"
Normal-3.zip,0,Normal,763,198,102,1,[198]
Normal-23.zip,0,Normal,2635,145,27,1,[145]
NCP-20.zip,2,NCP,572,2303,58,2,"[2302, 2303]"
Normal-1.zip,0,Normal,1683,862,65,6,"[861, 862, 864, 865, 868, 869]"
CP-10.zip,1,CP,1398,3856,44,2,"[3856, 3857]"
CP-15.zip,1,CP,1566,4252,54,2,"[4252, 4253]"
NCP-10.zip,2,NCP,280,1707,51,2,"[1706, 1707]"
CP-19.zip,1,CP,1785,3187,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
CP-15.zip,1,CP,1570,4258,22,1,[4258]
CP-10.zip,1,CP,1413,3890,66,2,"[3889, 3890]"
CP-7.zip,1,CP,1303,3618,42,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-19.zip,1,CP,2435,2903,295,3,"[2901, 2902, 2903]"
NCP-22.zip,2,NCP,860,2382,212,2,"[2382, 2383]"
NCP-22.zip,2,NCP,883,2419,52,2,"[2419, 2420]"
Normal-2.zip,0,Normal,1751,1079,61,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
Normal-25.zip,0,Normal,3852,5364,195,1,[5364]
NCP-20.zip,2,NCP,559,2275,127,2,"[2275, 2276]"
NCP-18.zip,2,NCP,498,2153,58,2,"[2152, 2153]"
Normal-27.zip,0,Normal,3911,5448,64,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
Normal-17.zip,0,Normal,2158,613,100,1,[613]
NCP-7.zip,2,NCP,246,1639,58,2,"[1638, 1639]"
NCP-17.zip,2,NCP,473,2102,61,2,"[2101, 2102]"
Normal-2.zip,0,Normal,1732,1025,73,1,[1025]
NCP-10.zip,2,NCP,271,1688,146,2,"[1688, 1689]"
CP-7.zip,1,CP,1303,3627,252,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-11.zip,2,NCP,286,1717,121,2,"[1717, 1718]"
Normal-19.zip,0,Normal,2223,678,95,1,[678]
NCP-22.zip,2,NCP,822,2333,31,2,"[2332, 2333]"
NCP-28.zip,2,NCP,870,2400,47,2,"[2399, 2400]"
NCP-21.zip,2,NCP,75,1284,54,2,"[1283, 1284]"
NCP-17.zip,2,NCP,469,2094,66,2,"[2093, 2094]"
NCP-8.zip,2,NCP,255,1656,139,2,"[1656, 1657]"
NCP-6.zip,2,NCP,211,1566,137,2,"[1566, 1567]"
NCP-25.zip,2,NCP,3966,5476,43,1,[5476]
NCP-21.zip,2,NCP,575,2309,61,2,"[2308, 2309]"
Normal-2.zip,0,Normal,1740,1045,102,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
Normal-1.zip,0,Normal,1681,845,69,1,[845]
NCP-11.zip,2,NCP,310,1768,70,2,"[1767, 1768]"
NCP-22.zip,2,NCP,87,1307,145,2,"[1307, 1308]"
Normal-4.zip,0,Normal,786,221,124,1,[221]
Normal-20.zip,0,Normal,2270,725,86,1,[725]
NCP-18.zip,2,NCP,515,2187,58,2,"[2186, 2187]"
NCP-5.zip,2,NCP,172,1488,139,2,"[1488, 1489]"
NCP-20.zip,2,NCP,551,2260,65,2,"[2259, 2260]"
NCP-21.zip,2,NCP,61,1256,60,2,"[1255, 1256]"
CP-13.zip,1,CP,1508,4116,57,3,"[4115, 4116, 4117]"
NCP-22.zip,2,NCP,863,2386,228,2,"[2386, 2387]"
CP-10.zip,1,CP,1413,3889,67,2,"[3889, 3890]"
Normal-2.zip,0,Normal,1740,1047,60,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-19.zip,1,CP,1785,3188,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
CP-7.zip,1,CP,1303,3624,224,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-1.zip,0,Normal,1683,868,64,6,"[861, 862, 864, 865, 868, 869]"
CP-7.zip,1,CP,1303,3611,257,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3610,51,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-21.zip,2,NCP,61,1255,142,2,"[1255, 1256]"
Normal-1.zip,0,Normal,1683,865,72,6,"[861, 862, 864, 865, 868, 869]"
CP-7.zip,1,CP,1303,3630,49,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-10.zip,1,CP,1392,3844,62,2,"[3843, 3844]"
CP-15.zip,1,CP,1566,4253,54,2,"[4252, 4253]"
Normal-27.zip,0,Normal,3911,5447,65,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
Normal-27.zip,0,Normal,3911,5449,64,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
NCP-22.zip,2,NCP,87,1308,61,2,"[1307, 1308]"
Normal-1.zip,0,Normal,1683,861,65,6,"[861, 862, 864, 865, 868, 869]"
CP-7.zip,1,CP,1303,3613,232,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-20.zip,2,NCP,551,2259,154,2,"[2259, 2260]"
CP-19.zip,1,CP,1785,3191,79,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-18.zip,2,NCP,515,2186,139,2,"[2186, 2187]"
Normal-2.zip,0,Normal,1740,1048,60,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-7.zip,1,CP,1303,3626,51,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3606,49,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-19.zip,1,CP,2435,2902,100,3,"[2901, 2902, 2903]"
CP-7.zip,1,CP,1303,3612,49,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3619,213,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-5.zip,2,NCP,172,1489,59,2,"[1488, 1489]"
CP-7.zip,1,CP,1303,3617,27,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-10.zip,1,CP,1398,3857,44,2,"[3856, 3857]"
CP-7.zip,1,CP,1303,3608,55,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-28.zip,2,NCP,870,2399,247,2,"[2399, 2400]"
NCP-22.zip,2,NCP,883,2420,200,2,"[2419, 2420]"
CP-7.zip,1,CP,1303,3609,271,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-10.zip,1,CP,1387,3829,51,2,"[3829, 3830]"
NCP-8.zip,2,NCP,255,1657,58,2,"[1656, 1657]"
Normal-2.zip,0,Normal,1740,1051,59,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
Normal-2.zip,0,Normal,1751,1081,62,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-17.zip,2,NCP,469,2093,159,2,"[2093, 2094]"
CP-7.zip,1,CP,1303,3621,230,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3607,247,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-27.zip,0,Normal,3911,5452,65,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
NCP-17.zip,2,NCP,473,2101,145,2,"[2101, 2102]"
CP-19.zip,1,CP,2435,2901,104,3,"[2901, 2902, 2903]"
Normal-2.zip,0,Normal,1740,1049,21,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-19.zip,1,CP,1785,3189,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-10.zip,2,NCP,271,1689,61,2,"[1688, 1689]"
CP-7.zip,1,CP,1303,3629,244,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3631,242,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-27.zip,0,Normal,3911,5451,65,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
NCP-22.zip,2,NCP,822,2332,36,2,"[2332, 2333]"
CP-7.zip,1,CP,1303,3622,28,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-19.zip,1,CP,1785,3190,79,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-11.zip,2,NCP,310,1767,169,2,"[1767, 1768]"
Normal-2.zip,0,Normal,1751,1080,61,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-21.zip,2,NCP,575,2308,144,2,"[2308, 2309]"
CP-19.zip,1,CP,1785,3186,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-21.zip,2,NCP,75,1283,128,2,"[1283, 1284]"
NCP-11.zip,2,NCP,286,1718,51,2,"[1717, 1718]"
CP-7.zip,1,CP,1303,3628,50,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-2.zip,0,Normal,1740,1046,300,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-7.zip,1,CP,1303,3620,45,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3614,27,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-27.zip,0,Normal,3911,5450,68,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
Normal-1.zip,0,Normal,1683,864,72,6,"[861, 862, 864, 865, 868, 869]"
NCP-20.zip,2,NCP,572,2302,138,2,"[2302, 2303]"
Normal-2.zip,0,Normal,1751,1084,67,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
CP-7.zip,1,CP,1303,3625,32,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-2.zip,0,Normal,1740,1052,59,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
NCP-10.zip,2,NCP,280,1706,121,2,"[1706, 1707]"
NCP-18.zip,2,NCP,498,2152,139,2,"[2152, 2153]"
CP-7.zip,1,CP,1303,3623,45,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3615,44,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3616,209,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-6.zip,2,NCP,211,1567,58,2,"[1566, 1567]"
Normal-2.zip,0,Normal,1751,1083,67,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-22.zip,2,NCP,860,2383,183,2,"[2382, 2383]"
NCP-20.zip,2,NCP,559,2276,54,2,"[2275, 2276]"
Normal-2.zip,0,Normal,1751,1082,62,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
CP-13.zip,1,CP,1508,4115,57,3,"[4115, 4116, 4117]"
Normal-1.zip,0,Normal,1683,869,64,6,"[861, 862, 864, 865, 868, 869]"
NCP-7.zip,2,NCP,246,1638,139,2,"[1638, 1639]"
NCP-5.zip,2,NCP,18,1169,57,2,"[1168, 1169]"
Normal-15.zip,0,Normal,2096,551,93,1,[551]
CP-21.zip,1,CP,2776,3307,31,1,[3307]
NCP-16.zip,2,NCP,449,2053,61,2,"[2052, 2053]"
NCP-15.zip,2,NCP,404,1958,46,2,"[1957, 1958]"
NCP-6.zip,2,NCP,210,1565,55,2,"[1564, 1565]"
CP-3.zip,1,CP,1144,3362,159,1,[3362]
Normal-8.zip,0,Normal,1879,334,88,1,[334]
Normal-1.zip,0,Normal,1721,1000,75,4,"[1000, 997, 998, 999]"
NCP-21.zip,2,NCP,583,2323,147,2,"[2323, 2324]"
NCP-1.zip,2,NCP,1039,2610,45,1,[2610]
Normal-8.zip,0,Normal,1882,337,86,1,[337]
Normal-21.zip,0,Normal,2307,762,80,1,[762]
CP-14.zip,1,CP,1528,4163,61,2,"[4163, 4164]"
CP-11.zip,1,CP,1443,3958,58,3,"[3957, 3958, 3959]"
NCP-18.zip,2,NCP,496,2149,70,2,"[2148, 2149]"
CP-7.zip,1,CP,1270,3489,204,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-7.zip,0,Normal,1834,289,82,1,[289]
NCP-13.zip,2,NCP,351,1853,145,2,"[1853, 1854]"
CP-18.zip,1,CP,1782,3584,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-1.zip,0,Normal,1676,816,65,7,"[816, 817, 818, 819, 820, 821, 822]"
CP-11.zip,1,CP,1428,3923,221,3,"[3923, 3924, 3925]"
CP-8.zip,1,CP,1330,3699,58,3,"[3698, 3699, 3700]"
Normal-19.zip,0,Normal,2233,688,76,1,[688]
NCP-18.zip,2,NCP,514,2184,160,2,"[2184, 2185]"
Normal-6.zip,0,Normal,1804,259,102,1,[259]
Normal-22.zip,0,Normal,2598,108,38,1,[108]
CP-14.zip,1,CP,1534,4176,58,2,"[4176, 4177]"
CP-5.zip,1,CP,1217,3435,320,1,[3435]
NCP-14.zip,2,NCP,378,1908,168,2,"[1908, 1909]"
CP-18.zip,1,CP,1782,3582,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
NCP-25.zip,2,NCP,3963,5474,56,1,[5474]
NCP-22.zip,2,NCP,82,1298,55,2,"[1297, 1298]"
NCP-2.zip,2,NCP,1274,2715,55,1,[2715]
CP-22.zip,1,CP,619,2981,102,1,[2981]
Normal-24.zip,0,Normal,2661,171,31,1,[171]
CP-14.zip,1,CP,1540,4192,58,3,"[4191, 4192, 4193]"
NCP-10.zip,2,NCP,2724,2680,43,1,[2680]
Normal-2.zip,0,Normal,1742,1055,60,1,[1055]
CP-12.zip,1,CP,1486,4060,63,2,"[4059, 4060]"
NCP-19.zip,2,NCP,527,2211,48,2,"[2210, 2211]"
CP-10.zip,1,CP,1393,3846,60,2,"[3845, 3846]"
Normal-1.zip,0,Normal,1721,997,68,4,"[1000, 997, 998, 999]"
Normal-25.zip,0,Normal,3839,5351,220,1,[5351]
Normal-12.zip,0,Normal,1991,446,306,1,[446]
CP-19.zip,1,CP,1794,3595,38,2,"[3594, 3595]"
Normal-1.zip,0,Normal,1669,785,54,5,"[782, 783, 784, 785, 786]"
CP-18.zip,1,CP,1782,3580,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-11.zip,0,Normal,1963,418,95,1,[418]
CP-11.zip,1,CP,1428,3924,56,3,"[3923, 3924, 3925]"
Normal-9.zip,0,Normal,1918,373,85,1,[373]
Normal-16.zip,0,Normal,2118,573,89,1,[573]
NCP-4.zip,2,NCP,140,1424,128,2,"[1424, 1425]"
Normal-16.zip,0,Normal,2142,597,84,1,[597]
NCP-15.zip,2,NCP,410,1969,143,2,"[1969, 1970]"
Normal-3.zip,0,Normal,749,184,89,1,[184]
Normal-1.zip,0,Normal,1718,991,66,2,"[991, 992]"
NCP-5.zip,2,NCP,176,1497,53,2,"[1496, 1497]"
NCP-8.zip,2,NCP,265,1677,50,2,"[1676, 1677]"
CP-7.zip,1,CP,1270,3495,148,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-26.zip,2,NCP,3982,5489,34,1,[5489]
NCP-8.zip,2,NCP,2677,2695,51,1,[2695]
NCP-13.zip,2,NCP,357,1866,63,2,"[1865, 1866]"
NCP-13.zip,2,NCP,346,1843,139,2,"[1843, 1844]"
Normal-1.zip,0,Normal,1676,820,72,7,"[816, 817, 818, 819, 820, 821, 822]"
NCP-14.zip,2,NCP,379,1911,62,2,"[1910, 1911]"
NCP-1.zip,2,NCP,104,1345,139,2,"[1345, 1346]"
NCP-2.zip,2,NCP,116,1373,127,2,"[1373, 1374]"
NCP-17.zip,2,NCP,466,2087,145,2,"[2087, 2088]"
CP-11.zip,1,CP,1443,3957,139,3,"[3957, 3958, 3959]"
NCP-5.zip,2,NCP,181,1507,58,2,"[1506, 1507]"
NCP-18.zip,2,NCP,496,2148,168,2,"[2148, 2149]"
NCP-8.zip,2,NCP,265,1676,119,2,"[1676, 1677]"
Normal-1.zip,0,Normal,1669,782,62,5,"[782, 783, 784, 785, 786]"
CP-7.zip,1,CP,1270,3501,420,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-1.zip,0,Normal,1676,822,69,7,"[816, 817, 818, 819, 820, 821, 822]"
NCP-21.zip,2,NCP,583,2324,62,2,"[2323, 2324]"
NCP-19.zip,2,NCP,527,2210,114,2,"[2210, 2211]"
NCP-15.zip,2,NCP,404,1957,108,2,"[1957, 1958]"
NCP-17.zip,2,NCP,466,2088,61,2,"[2087, 2088]"
NCP-4.zip,2,NCP,140,1425,54,2,"[1424, 1425]"
NCP-13.zip,2,NCP,346,1844,58,2,"[1843, 1844]"
CP-7.zip,1,CP,1270,3494,129,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-7.zip,1,CP,1270,3497,133,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-18.zip,1,CP,1782,3579,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-1.zip,0,Normal,1676,818,65,7,"[816, 817, 818, 819, 820, 821, 822]"
CP-11.zip,1,CP,1428,3925,56,3,"[3923, 3924, 3925]"
CP-7.zip,1,CP,1270,3488,287,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-7.zip,1,CP,1270,3500,160,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-1.zip,0,Normal,1669,784,196,5,"[782, 783, 784, 785, 786]"
Normal-1.zip,0,Normal,1669,783,62,5,"[782, 783, 784, 785, 786]"
CP-18.zip,1,CP,1782,3586,69,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-1.zip,0,Normal,1721,998,68,4,"[1000, 997, 998, 999]"
Normal-1.zip,0,Normal,1676,817,65,7,"[816, 817, 818, 819, 820, 821, 822]"
CP-14.zip,1,CP,1540,4193,58,3,"[4191, 4192, 4193]"
Normal-1.zip,0,Normal,1676,821,72,7,"[816, 817, 818, 819, 820, 821, 822]"
Normal-1.zip,0,Normal,1676,819,65,7,"[816, 817, 818, 819, 820, 821, 822]"
NCP-22.zip,2,NCP,82,1297,129,2,"[1297, 1298]"
Normal-1.zip,0,Normal,1718,992,66,2,"[991, 992]"
CP-7.zip,1,CP,1270,3496,154,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-5.zip,2,NCP,181,1506,139,2,"[1506, 1507]"
CP-7.zip,1,CP,1270,3492,137,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-13.zip,2,NCP,357,1865,150,2,"[1865, 1866]"
CP-11.zip,1,CP,1443,3959,58,3,"[3957, 3958, 3959]"
Normal-1.zip,0,Normal,1669,786,54,5,"[782, 783, 784, 785, 786]"
NCP-6.zip,2,NCP,210,1564,131,2,"[1564, 1565]"
CP-19.zip,1,CP,1794,3594,38,2,"[3594, 3595]"
NCP-15.zip,2,NCP,410,1970,60,2,"[1969, 1970]"
NCP-14.zip,2,NCP,379,1910,147,2,"[1910, 1911]"
CP-7.zip,1,CP,1270,3491,142,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-1.zip,0,Normal,1721,999,75,4,"[1000, 997, 998, 999]"
CP-14.zip,1,CP,1540,4191,221,3,"[4191, 4192, 4193]"
CP-12.zip,1,CP,1486,4059,63,2,"[4059, 4060]"
CP-14.zip,1,CP,1528,4164,61,2,"[4163, 4164]"
NCP-16.zip,2,NCP,449,2052,145,2,"[2052, 2053]"
NCP-13.zip,2,NCP,351,1854,61,2,"[1853, 1854]"
CP-7.zip,1,CP,1270,3498,247,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-8.zip,1,CP,1330,3698,58,3,"[3698, 3699, 3700]"
NCP-2.zip,2,NCP,116,1374,54,2,"[1373, 1374]"
NCP-18.zip,2,NCP,514,2185,67,2,"[2184, 2185]"
CP-18.zip,1,CP,1782,3587,69,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
CP-8.zip,1,CP,1330,3700,58,3,"[3698, 3699, 3700]"
NCP-14.zip,2,NCP,378,1909,69,2,"[1908, 1909]"
NCP-1.zip,2,NCP,104,1346,58,2,"[1345, 1346]"
CP-14.zip,1,CP,1534,4177,58,2,"[4176, 4177]"
CP-7.zip,1,CP,1270,3490,237,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-7.zip,1,CP,1270,3493,193,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-18.zip,1,CP,1782,3583,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
CP-7.zip,1,CP,1270,3502,21,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-7.zip,1,CP,1270,3499,363,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-5.zip,2,NCP,18,1168,135,2,"[1168, 1169]"
CP-18.zip,1,CP,1782,3585,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
NCP-5.zip,2,NCP,176,1496,126,2,"[1496, 1497]"
CP-18.zip,1,CP,1782,3581,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
CP-10.zip,1,CP,1393,3845,60,2,"[3845, 3846]"
Normal-12.zip,0,Normal,2015,470,94,1,[470]
NCP-6.zip,2,NCP,206,1557,58,2,"[1556, 1557]"
CP-1.zip,1,CP,1096,3314,196,1,[3314]
NCP-16.zip,2,NCP,43,1220,65,2,"[1219, 1220]"
NCP-18.zip,2,NCP,499,2155,58,2,"[2154, 2155]"
CP-10.zip,1,CP,1409,3881,66,2,"[3881, 3882]"
Normal-4.zip,0,Normal,777,212,83,1,[212]
NCP-9.zip,2,NCP,2708,2701,59,1,[2701]
CP-11.zip,1,CP,1432,3933,60,2,"[3932, 3933]"
NCP-4.zip,2,NCP,141,1426,129,2,"[1426, 1427]"
CP-23.zip,1,CP,673,3035,76,1,[3035]
NCP-29.zip,2,NCP,879,2414,173,1,[2414]
NCP-19.zip,2,NCP,536,2229,145,2,"[2229, 2230]"
NCP-18.zip,2,NCP,504,2165,65,2,"[2164, 2165]"
Normal-1.zip,0,Normal,1678,829,34,6,"[827, 828, 829, 830, 831, 832]"
NCP-8.zip,2,NCP,264,1674,179,2,"[1674, 1675]"
NCP-4.zip,2,NCP,155,1454,139,2,"[1454, 1455]"
CP-11.zip,1,CP,1418,3900,180,3,"[3900, 3901, 3902]"
NCP-5.zip,2,NCP,194,1532,133,2,"[1532, 1533]"
NCP-13.zip,2,NCP,361,1873,143,2,"[1873, 1874]"
Normal-1.zip,0,Normal,1710,976,78,2,"[975, 976]"
Normal-15.zip,0,Normal,2091,546,106,1,[546]
NCP-19.zip,2,NCP,518,2192,135,2,"[2192, 2193]"
Normal-18.zip,0,Normal,2190,645,90,1,[645]
Normal-12.zip,0,Normal,2013,468,87,1,[468]
NCP-11.zip,2,NCP,302,1751,62,2,"[1750, 1751]"
Normal-15.zip,0,Normal,2109,564,103,1,[564]
NCP-8.zip,2,NCP,264,1675,75,2,"[1674, 1675]"
CP-23.zip,1,CP,653,3015,285,1,[3015]
NCP-7.zip,2,NCP,235,1615,139,2,"[1615, 1616]"
CP-19.zip,1,CP,1786,3194,77,3,"[3192, 3193, 3194]"
CP-1.zip,1,CP,0,3137,37,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-15.zip,2,NCP,423,1999,133,2,"[1999, 2000]"
CP-6.zip,1,CP,1232,3450,91,1,[3450]
CP-14.zip,1,CP,1526,4158,51,3,"[4157, 4158, 4159]"
CP-4.zip,1,CP,1184,3402,193,1,[3402]
NCP-17.zip,2,NCP,483,2122,56,2,"[2121, 2122]"
CP-12.zip,1,CP,1459,3996,69,3,"[3995, 3996, 3997]"
CP-17.zip,1,CP,1637,4325,20,1,[4325]
CP-10.zip,1,CP,1411,3885,66,2,"[3885, 3886]"
NCP-9.zip,2,NCP,2707,2673,44,1,[2673]
NCP-29.zip,2,NCP,892,2431,20,1,[2431]
CP-26.zip,1,CP,3720,5653,243,2,"[5652, 5653]"
Normal-13.zip,0,Normal,2023,478,96,1,[478]
CP-11.zip,1,CP,1439,3947,62,2,"[3946, 3947]"
Normal-6.zip,0,Normal,1801,256,89,1,[256]
NCP-16.zip,2,NCP,442,2038,131,2,"[2038, 2039]"
Normal-9.zip,0,Normal,1920,375,100,1,[375]
CP-13.zip,1,CP,1489,4067,457,4,"[4067, 4068, 4069, 4070]"
CP-9.zip,1,CP,1378,3811,50,2,"[3810, 3811]"
NCP-12.zip,2,NCP,336,1821,50,2,"[1820, 1821]"
NCP-3.zip,2,NCP,1295,2736,61,1,[2736]
Normal-20.zip,0,Normal,2268,723,85,1,[723]
Normal-20.zip,0,Normal,2281,736,84,1,[736]
CP-1.zip,1,CP,1083,3128,71,2,"[3128, 3129]"
CP-14.zip,1,CP,1545,4207,65,2,"[4206, 4207]"
Normal-21.zip,0,Normal,2306,761,103,1,[761]
NCP-13.zip,2,NCP,350,1852,47,2,"[1851, 1852]"
CP-8.zip,1,CP,1326,3688,53,2,"[3688, 3689]"
NCP-7.zip,2,NCP,236,1617,283,2,"[1617, 1618]"
Normal-1.zip,0,Normal,1722,1001,73,2,"[1001, 1002]"
NCP-5.zip,2,NCP,177,1498,139,2,"[1498, 1499]"
CP-20.zip,1,CP,2668,3259,52,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-1.zip,0,Normal,1708,971,74,2,"[971, 972]"
NCP-8.zip,2,NCP,2680,2651,46,1,[2651]
CP-11.zip,1,CP,1436,3940,45,2,"[3940, 3941]"
NCP-20.zip,2,NCP,570,2298,139,2,"[2298, 2299]"
Normal-1.zip,0,Normal,1723,1004,77,2,"[1003, 1004]"
NCP-10.zip,2,NCP,2723,2679,40,1,[2679]
NCP-13.zip,2,NCP,364,1880,56,2,"[1879, 1880]"
Normal-21.zip,0,Normal,2302,757,96,1,[757]
Normal-18.zip,0,Normal,2199,654,85,1,[654]
CP-9.zip,1,CP,1369,3790,67,2,"[3790, 3791]"
Normal-25.zip,0,Normal,3858,5370,234,1,[5370]
Normal-21.zip,0,Normal,2286,741,84,1,[741]
NCP-21.zip,2,NCP,65,1263,128,2,"[1263, 1264]"
CP-23.zip,1,CP,661,3023,116,1,[3023]
CP-30.zip,1,CP,3937,5643,66,2,"[5643, 5644]"
CP-25.zip,1,CP,8,3514,36,2,"[3513, 3514]"
Normal-1.zip,0,Normal,1720,995,74,2,"[995, 996]"
NCP-15.zip,2,NCP,421,1996,67,2,"[1995, 1996]"
CP-25.zip,1,CP,738,3100,110,1,[3100]
NCP-11.zip,2,NCP,304,1755,67,2,"[1754, 1755]"
NCP-22.zip,2,NCP,834,2348,226,2,"[2347, 2348]"
Normal-3.zip,0,Normal,769,204,138,1,[204]
Normal-1.zip,0,Normal,1680,840,66,6,"[839, 840, 841, 842, 843, 844]"
CP-13.zip,1,CP,1519,4141,68,2,"[4141, 4142]"
NCP-12.zip,2,NCP,315,1777,107,2,"[1777, 1778]"
Normal-2.zip,0,Normal,1753,1088,66,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
CP-8.zip,1,CP,1341,3722,57,1,[3722]
CP-13.zip,1,CP,1491,4075,48,3,"[4074, 4075, 4076]"
NCP-9.zip,2,NCP,2687,2654,51,1,[2654]
CP-28.zip,1,CP,3785,5729,28,1,[5729]
NCP-6.zip,2,NCP,212,1568,165,2,"[1568, 1569]"
CP-12.zip,1,CP,1477,4035,54,2,"[4035, 4036]"
CP-16.zip,1,CP,1605,4293,23,1,[4293]
NCP-29.zip,2,NCP,926,2468,24,1,[2468]
CP-10.zip,1,CP,1394,3847,62,2,"[3847, 3848]"
NCP-21.zip,2,NCP,580,2318,58,2,"[2317, 2318]"
NCP-19.zip,2,NCP,526,2208,137,2,"[2208, 2209]"
CP-13.zip,1,CP,1494,4085,65,3,"[4083, 4084, 4085]"
Normal-27.zip,0,Normal,3895,5421,71,4,"[5418, 5419, 5420, 5421]"
NCP-8.zip,2,NCP,267,1680,129,2,"[1680, 1681]"
Normal-16.zip,0,Normal,2124,579,101,1,[579]
NCP-18.zip,2,NCP,49,1232,61,2,"[1231, 1232]"
CP-21.zip,1,CP,589,2951,300,1,[2951]
CP-25.zip,1,CP,8,3513,42,2,"[3513, 3514]"
NCP-6.zip,2,NCP,206,1556,139,2,"[1556, 1557]"
CP-27.zip,1,CP,3765,5709,20,1,[5709]
NCP-4.zip,2,NCP,147,1438,173,2,"[1438, 1439]"
Normal-20.zip,0,Normal,2256,711,86,1,[711]
Normal-27.zip,0,Normal,3904,5436,82,1,[5436]
NCP-14.zip,2,NCP,384,1921,54,2,"[1920, 1921]"
CP-18.zip,1,CP,1780,3560,69,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-14.zip,1,CP,1522,4148,61,2,"[4148, 4149]"
NCP-8.zip,2,NCP,256,1658,139,2,"[1658, 1659]"
CP-10.zip,1,CP,1406,3874,60,2,"[3874, 3875]"
CP-4.zip,1,CP,1177,3395,210,1,[3395]
Normal-1.zip,0,Normal,1673,804,291,6,"[804, 805, 806, 807, 808, 809]"
NCP-2.zip,2,NCP,122,1385,149,2,"[1385, 1386]"
CP-9.zip,1,CP,1354,3752,46,3,"[3751, 3752, 3753]"
NCP-23.zip,2,NCP,922,2464,240,1,[2464]
CP-20.zip,1,CP,2668,3251,58,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-6.zip,0,Normal,1796,251,96,1,[251]
CP-9.zip,1,CP,1374,3803,50,2,"[3802, 3803]"
NCP-7.zip,2,NCP,237,1620,61,2,"[1619, 1620]"
NCP-13.zip,2,NCP,363,1878,58,2,"[1877, 1878]"
CP-1.zip,1,CP,1084,3130,67,1,[3130]
Normal-14.zip,0,Normal,2082,537,78,1,[537]
CP-18.zip,1,CP,1656,4344,26,1,[4344]
NCP-18.zip,2,NCP,491,2138,149,2,"[2138, 2139]"
CP-22.zip,1,CP,609,2971,76,1,[2971]
Normal-18.zip,0,Normal,2198,653,88,1,[653]
NCP-6.zip,2,NCP,212,1569,69,2,"[1568, 1569]"
CP-21.zip,1,CP,607,2969,178,1,[2969]
NCP-9.zip,2,NCP,269,1685,64,2,"[1684, 1685]"
CP-9.zip,1,CP,1364,3777,56,3,"[3776, 3777, 3778]"
CP-17.zip,1,CP,1622,4310,27,1,[4310]
CP-16.zip,1,CP,1601,4289,19,1,[4289]
CP-10.zip,1,CP,1388,3832,51,2,"[3831, 3832]"
Normal-27.zip,0,Normal,3908,5442,56,1,[5442]
CP-25.zip,1,CP,732,3094,159,1,[3094]
NCP-14.zip,2,NCP,40,1212,149,2,"[1212, 1213]"
NCP-21.zip,2,NCP,65,1264,54,2,"[1263, 1264]"
CP-12.zip,1,CP,1477,4036,54,2,"[4035, 4036]"
Normal-10.zip,0,Normal,1953,408,94,1,[408]
CP-15.zip,1,CP,1577,4265,22,1,[4265]
Normal-14.zip,0,Normal,2055,510,91,1,[510]
Normal-17.zip,0,Normal,2154,609,94,1,[609]
Normal-27.zip,0,Normal,3895,5418,61,4,"[5418, 5419, 5420, 5421]"
Normal-19.zip,0,Normal,2227,682,73,1,[682]
Normal-11.zip,0,Normal,1975,430,101,1,[430]
CP-15.zip,1,CP,1584,4272,20,1,[4272]
Normal-20.zip,0,Normal,2262,717,84,1,[717]
CP-14.zip,1,CP,1543,4200,190,3,"[4200, 4201, 4202]"
Normal-3.zip,0,Normal,753,188,300,1,[188]
CP-12.zip,1,CP,1475,4032,50,2,"[4031, 4032]"
NCP-16.zip,2,NCP,458,2071,55,2,"[2070, 2071]"
NCP-5.zip,2,NCP,180,1504,136,2,"[1504, 1505]"
CP-30.zip,1,CP,3938,5645,94,1,[5645]
CP-9.zip,1,CP,1364,3778,56,3,"[3776, 3777, 3778]"
Normal-23.zip,0,Normal,2632,142,39,1,[142]
Normal-5.zip,0,Normal,810,245,324,1,[245]
NCP-5.zip,2,NCP,174,1493,56,2,"[1492, 1493]"
CP-17.zip,1,CP,1632,4320,23,1,[4320]
NCP-2.zip,2,NCP,112,1366,56,2,"[1365, 1366]"
CP-10.zip,1,CP,1411,3886,66,2,"[3885, 3886]"
CP-18.zip,1,CP,1780,3554,67,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-20.zip,1,CP,2668,3252,51,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-6.zip,0,Normal,1820,275,83,1,[275]
Normal-1.zip,0,Normal,1673,809,57,6,"[804, 805, 806, 807, 808, 809]"
Normal-18.zip,0,Normal,2204,659,94,1,[659]
CP-14.zip,1,CP,1531,4169,59,2,"[4169, 4170]"
CP-12.zip,1,CP,1474,4030,62,2,"[4029, 4030]"
Normal-18.zip,0,Normal,2215,670,80,1,[670]
NCP-21.zip,2,NCP,579,2315,150,2,"[2315, 2316]"
NCP-28.zip,2,NCP,854,2374,265,1,[2374]
Normal-25.zip,0,Normal,3838,5350,201,1,[5350]
CP-9.zip,1,CP,1352,3747,61,1,[3747]
Normal-1.zip,0,Normal,1719,994,76,2,"[993, 994]"
CP-11.zip,1,CP,1418,3901,54,3,"[3900, 3901, 3902]"
NCP-28.zip,2,NCP,852,2372,47,2,"[2371, 2372]"
Normal-19.zip,0,Normal,2225,680,94,1,[680]
Normal-16.zip,0,Normal,2148,603,86,1,[603]
NCP-19.zip,2,NCP,544,2245,147,2,"[2245, 2246]"
CP-29.zip,1,CP,3826,5770,26,1,[5770]
NCP-7.zip,2,NCP,229,1602,156,2,"[1602, 1603]"
Normal-1.zip,0,Normal,1673,807,283,6,"[804, 805, 806, 807, 808, 809]"
Normal-6.zip,0,Normal,1823,278,85,1,[278]
NCP-27.zip,2,NCP,824,2335,259,1,[2335]
CP-18.zip,1,CP,1776,3535,64,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-18.zip,2,NCP,513,2183,68,2,"[2182, 2183]"
CP-30.zip,1,CP,3934,5639,77,3,"[5638, 5639, 5640]"
CP-4.zip,1,CP,1168,3386,203,1,[3386]
NCP-12.zip,2,NCP,323,1794,116,2,"[1794, 1795]"
CP-8.zip,1,CP,1340,3720,64,2,"[3720, 3721]"
CP-5.zip,1,CP,1223,3441,232,1,[3441]
NCP-4.zip,2,NCP,166,1477,58,2,"[1476, 1477]"
NCP-6.zip,2,NCP,219,1583,65,2,"[1582, 1583]"
NCP-4.zip,2,NCP,155,1455,58,2,"[1454, 1455]"
NCP-1.zip,2,NCP,101,1340,57,2,"[1339, 1340]"
NCP-11.zip,2,NCP,298,1742,145,2,"[1742, 1743]"
Normal-1.zip,0,Normal,1684,874,71,5,"[870, 871, 873, 874, 875]"
CP-14.zip,1,CP,1554,4227,41,2,"[4226, 4227]"
NCP-18.zip,2,NCP,489,2134,139,2,"[2134, 2135]"
Normal-23.zip,0,Normal,2615,125,36,1,[125]
NCP-8.zip,2,NCP,2674,2693,45,1,[2693]
NCP-6.zip,2,NCP,226,1596,142,2,"[1596, 1597]"
NCP-10.zip,2,NCP,274,1695,67,2,"[1694, 1695]"
Normal-10.zip,0,Normal,1944,399,97,1,[399]
CP-6.zip,1,CP,1236,3454,159,1,[3454]
CP-20.zip,1,CP,2668,3257,53,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
CP-23.zip,1,CP,670,3032,78,1,[3032]
NCP-20.zip,2,NCP,548,2253,144,2,"[2253, 2254]"
CP-18.zip,1,CP,1769,3516,23,1,[3516]
Normal-3.zip,0,Normal,754,189,308,1,[189]
NCP-7.zip,2,NCP,239,1623,146,2,"[1623, 1624]"
NCP-14.zip,2,NCP,392,1935,58,2,"[1934, 1935]"
Normal-6.zip,0,Normal,1824,279,86,1,[279]
Normal-2.zip,0,Normal,1753,1087,77,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
NCP-30.zip,2,NCP,997,2554,49,2,"[2553, 2554]"
CP-26.zip,1,CP,3727,5663,42,1,[5663]
CP-11.zip,1,CP,1433,3934,62,2,"[3934, 3935]"
Normal-18.zip,0,Normal,2187,642,92,1,[642]
NCP-2.zip,2,NCP,112,1365,133,2,"[1365, 1366]"
NCP-6.zip,2,NCP,219,1582,156,2,"[1582, 1583]"
Normal-10.zip,0,Normal,1939,394,93,1,[394]
CP-18.zip,1,CP,1775,3532,57,4,"[3530, 3531, 3532, 3533]"
CP-2.zip,1,CP,11,3165,268,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
Normal-12.zip,0,Normal,2012,467,102,1,[467]
CP-21.zip,1,CP,587,2949,151,1,[2949]
Normal-15.zip,0,Normal,2116,571,92,1,[571]
CP-1.zip,1,CP,10,3156,289,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
Normal-27.zip,0,Normal,3895,5419,61,4,"[5418, 5419, 5420, 5421]"
Normal-25.zip,0,Normal,3854,5366,197,1,[5366]
Normal-4.zip,0,Normal,771,206,306,1,[206]
NCP-3.zip,2,NCP,129,1403,132,2,"[1403, 1404]"
Normal-13.zip,0,Normal,2042,497,90,1,[497]
Normal-2.zip,0,Normal,1753,1090,296,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
NCP-17.zip,2,NCP,478,2111,145,2,"[2111, 2112]"
Normal-17.zip,0,Normal,2171,626,92,1,[626]
CP-10.zip,1,CP,1410,3884,51,2,"[3883, 3884]"
CP-3.zip,1,CP,1140,3358,370,1,[3358]
NCP-22.zip,2,NCP,885,2422,52,2,"[2422, 2423]"
NCP-27.zip,2,NCP,1050,2624,428,2,"[2623, 2624]"
NCP-17.zip,2,NCP,478,2112,61,2,"[2111, 2112]"
CP-20.zip,1,CP,2668,3254,47,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-16.zip,2,NCP,433,2019,120,2,"[2019, 2020]"
NCP-19.zip,2,NCP,517,2191,58,2,"[2190, 2191]"
Normal-24.zip,0,Normal,2657,167,27,1,[167]
CP-8.zip,1,CP,1339,3718,59,2,"[3718, 3719]"
NCP-17.zip,2,NCP,482,2119,139,2,"[2119, 2120]"
CP-17.zip,1,CP,1635,4323,27,1,[4323]
Normal-10.zip,0,Normal,1930,385,98,1,[385]
Normal-1.zip,0,Normal,1679,837,70,6,"[833, 834, 835, 836, 837, 838]"
NCP-19.zip,2,NCP,536,2230,61,2,"[2229, 2230]"
NCP-25.zip,2,NCP,3942,5539,37,1,[5539]
Normal-17.zip,0,Normal,2180,635,95,1,[635]
Normal-1.zip,0,Normal,1680,839,66,6,"[839, 840, 841, 842, 843, 844]"
Normal-1.zip,0,Normal,1705,965,69,2,"[965, 966]"
NCP-5.zip,2,NCP,174,1492,134,2,"[1492, 1493]"
NCP-14.zip,2,NCP,386,1923,62,1,[1923]
CP-22.zip,1,CP,625,2987,100,1,[2987]
CP-20.zip,1,CP,2450,2929,90,2,"[2928, 2929]"
Normal-10.zip,0,Normal,1949,404,92,1,[404]
CP-14.zip,1,CP,1546,4208,58,2,"[4208, 4209]"
NCP-21.zip,2,NCP,63,1260,58,2,"[1259, 1260]"
Normal-23.zip,0,Normal,2624,134,38,1,[134]
NCP-10.zip,2,NCP,272,1690,153,2,"[1690, 1691]"
CP-5.zip,1,CP,1209,3427,313,1,[3427]
NCP-11.zip,2,NCP,293,1731,122,2,"[1731, 1732]"
CP-9.zip,1,CP,1383,3822,71,2,"[3821, 3822]"
Normal-4.zip,0,Normal,793,228,94,1,[228]
NCP-2.zip,2,NCP,1057,2633,570,1,[2633]
Normal-1.zip,0,Normal,1679,835,67,6,"[833, 834, 835, 836, 837, 838]"
CP-4.zip,1,CP,1185,3403,131,1,[3403]
CP-11.zip,1,CP,1446,3965,63,2,"[3965, 3966]"
CP-15.zip,1,CP,1576,4264,23,1,[4264]
CP-12.zip,1,CP,1487,4062,68,3,"[4061, 4062, 4063]"
CP-9.zip,1,CP,1381,3817,66,3,"[3815, 3816, 3817]"
CP-28.zip,1,CP,3767,5711,17,1,[5711]
Normal-23.zip,0,Normal,2610,120,41,1,[120]
CP-10.zip,1,CP,1394,3848,62,2,"[3847, 3848]"
NCP-4.zip,2,NCP,160,1465,61,2,"[1464, 1465]"
CP-14.zip,1,CP,1543,4201,57,3,"[4200, 4201, 4202]"
CP-23.zip,1,CP,652,3014,277,1,[3014]
CP-16.zip,1,CP,1607,4295,17,1,[4295]
Normal-18.zip,0,Normal,2213,668,84,1,[668]
Normal-16.zip,0,Normal,2121,576,87,1,[576]
Normal-23.zip,0,Normal,2627,137,41,1,[137]
NCP-21.zip,2,NCP,582,2322,54,2,"[2321, 2322]"
CP-19.zip,1,CP,2431,2893,361,1,[2893]
Normal-1.zip,0,Normal,1717,989,67,2,"[989, 990]"
CP-10.zip,1,CP,1385,3825,64,2,"[3825, 3826]"
CP-5.zip,1,CP,1198,3416,162,1,[3416]
NCP-21.zip,2,NCP,578,2314,55,2,"[2313, 2314]"
NCP-20.zip,2,NCP,56,1246,68,2,"[1245, 1246]"
NCP-19.zip,2,NCP,532,2222,139,2,"[2222, 2223]"
Normal-21.zip,0,Normal,2283,738,87,1,[738]
Normal-19.zip,0,Normal,2222,677,78,1,[677]
CP-9.zip,1,CP,1361,3770,50,2,"[3770, 3771]"
NCP-15.zip,2,NCP,420,1993,177,2,"[1993, 1994]"
CP-18.zip,1,CP,1776,3538,76,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
Normal-1.zip,0,Normal,1706,968,64,2,"[967, 968]"
CP-20.zip,1,CP,2668,3253,51,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-5.zip,2,NCP,171,1486,143,2,"[1486, 1487]"
Normal-3.zip,0,Normal,750,185,281,1,[185]
CP-18.zip,1,CP,1780,3565,80,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-13.zip,2,NCP,362,1876,63,2,"[1875, 1876]"
CP-6.zip,1,CP,1234,3452,191,1,[3452]
Normal-1.zip,0,Normal,1684,873,133,5,"[870, 871, 873, 874, 875]"
Normal-6.zip,0,Normal,1812,267,99,1,[267]
NCP-17.zip,2,NCP,474,2103,114,2,"[2103, 2104]"
Normal-7.zip,0,Normal,1857,312,80,1,[312]
Normal-12.zip,0,Normal,1992,447,104,1,[447]
CP-18.zip,1,CP,1664,4352,20,1,[4352]
Normal-27.zip,0,Normal,3895,5420,71,4,"[5418, 5419, 5420, 5421]"
NCP-19.zip,2,NCP,517,2190,139,2,"[2190, 2191]"
Normal-23.zip,0,Normal,2625,135,39,1,[135]
Normal-5.zip,0,Normal,811,246,124,1,[246]
CP-4.zip,1,CP,1162,3380,212,1,[3380]
CP-22.zip,1,CP,611,2973,76,1,[2973]
CP-9.zip,1,CP,1381,3815,261,3,"[3815, 3816, 3817]"
CP-9.zip,1,CP,1371,3794,200,3,"[3794, 3795, 3796]"
NCP-16.zip,2,NCP,432,2017,128,2,"[2017, 2018]"
Normal-20.zip,0,Normal,2278,733,90,1,[733]
Normal-19.zip,0,Normal,2240,695,78,1,[695]
CP-28.zip,1,CP,3786,5730,29,1,[5730]
Normal-15.zip,0,Normal,2097,552,89,1,[552]
NCP-18.zip,2,NCP,500,2156,162,2,"[2156, 2157]"
CP-9.zip,1,CP,1374,3802,50,2,"[3802, 3803]"
Normal-23.zip,0,Normal,2606,116,33,1,[116]
CP-26.zip,1,CP,3651,5550,395,1,[5550]
Normal-9.zip,0,Normal,1912,367,92,1,[367]
NCP-25.zip,2,NCP,3953,5466,44,1,[5466]
CP-25.zip,1,CP,724,3086,100,1,[3086]
Normal-21.zip,0,Normal,2292,747,82,1,[747]
CP-7.zip,1,CP,1262,3480,384,1,[3480]
CP-13.zip,1,CP,1489,4068,229,4,"[4067, 4068, 4069, 4070]"
Normal-10.zip,0,Normal,1931,386,80,1,[386]
NCP-20.zip,2,NCP,563,2284,141,2,"[2284, 2285]"
CP-2.zip,1,CP,1123,3341,213,1,[3341]
NCP-17.zip,2,NCP,486,2127,153,2,"[2127, 2128]"
CP-26.zip,1,CP,3733,5673,32,3,"[5673, 5674, 5675]"
CP-3.zip,1,CP,1152,3370,69,1,[3370]
NCP-28.zip,2,NCP,838,2353,89,1,[2353]
Normal-1.zip,0,Normal,1717,990,67,2,"[989, 990]"
NCP-30.zip,2,NCP,997,2553,54,2,"[2553, 2554]"
NCP-17.zip,2,NCP,48,1230,61,2,"[1229, 1230]"
NCP-17.zip,2,NCP,467,2089,138,2,"[2089, 2090]"
NCP-20.zip,2,NCP,564,2286,143,2,"[2286, 2287]"
Normal-1.zip,0,Normal,1722,1002,73,2,"[1001, 1002]"
Normal-7.zip,0,Normal,1854,309,82,1,[309]
Normal-2.zip,0,Normal,1747,1065,60,1,[1065]
NCP-19.zip,2,NCP,535,2228,47,2,"[2227, 2228]"
NCP-26.zip,2,NCP,3974,5508,52,1,[5508]
CP-14.zip,1,CP,1526,4157,124,3,"[4157, 4158, 4159]"
Normal-7.zip,0,Normal,1829,284,92,1,[284]
Normal-1.zip,0,Normal,1673,808,57,6,"[804, 805, 806, 807, 808, 809]"
NCP-2.zip,2,NCP,1271,2712,56,1,[2712]
CP-30.zip,1,CP,3934,5638,59,3,"[5638, 5639, 5640]"
NCP-26.zip,2,NCP,3979,5486,52,1,[5486]
NCP-20.zip,2,NCP,554,2265,128,2,"[2265, 2266]"
NCP-6.zip,2,NCP,221,1587,53,2,"[1586, 1587]"
NCP-20.zip,2,NCP,558,2273,119,2,"[2273, 2274]"
CP-8.zip,1,CP,1321,3678,58,2,"[3678, 3679]"
NCP-6.zip,2,NCP,226,1597,60,2,"[1596, 1597]"
NCP-21.zip,2,NCP,76,1286,51,2,"[1285, 1286]"
NCP-1.zip,2,NCP,1042,2613,143,2,"[2613, 2614]"
NCP-13.zip,2,NCP,366,1884,67,2,"[1883, 1884]"
NCP-18.zip,2,NCP,490,2136,147,2,"[2136, 2137]"
NCP-28.zip,2,NCP,856,2376,227,2,"[2376, 2377]"
CP-19.zip,1,CP,2445,2920,283,2,"[2920, 2921]"
Normal-1.zip,0,Normal,1673,806,59,6,"[804, 805, 806, 807, 808, 809]"
CP-25.zip,1,CP,9,3151,72,4,"[3148, 3149, 3150, 3151]"
Normal-25.zip,0,Normal,3847,5359,219,1,[5359]
Normal-12.zip,0,Normal,2005,460,77,1,[460]
CP-30.zip,1,CP,3936,5642,59,1,[5642]
NCP-12.zip,2,NCP,326,1800,117,2,"[1800, 1801]"
Normal-13.zip,0,Normal,2045,500,85,1,[500]
CP-15.zip,1,CP,1583,4271,18,1,[4271]
Normal-20.zip,0,Normal,2261,716,83,1,[716]
Normal-20.zip,0,Normal,2276,731,91,1,[731]
CP-18.zip,1,CP,1776,3536,75,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-27.zip,2,NCP,1034,2605,19,1,[2605]
NCP-16.zip,2,NCP,445,2044,139,2,"[2044, 2045]"
CP-12.zip,1,CP,1461,4001,53,2,"[4000, 4001]"
CP-12.zip,1,CP,1485,4056,114,3,"[4056, 4057, 4058]"
NCP-7.zip,2,NCP,231,1606,139,2,"[1606, 1607]"
NCP-13.zip,2,NCP,343,1838,55,2,"[1837, 1838]"
NCP-6.zip,2,NCP,202,1548,161,2,"[1548, 1549]"
Normal-17.zip,0,Normal,2160,615,96,1,[615]
CP-28.zip,1,CP,3780,5724,27,1,[5724]
CP-9.zip,1,CP,1354,3753,46,3,"[3751, 3752, 3753]"
CP-16.zip,1,CP,1598,4286,23,1,[4286]
CP-19.zip,1,CP,2445,2921,119,2,"[2920, 2921]"
CP-9.zip,1,CP,1361,3771,50,2,"[3770, 3771]"
NCP-15.zip,2,NCP,412,1974,54,2,"[1973, 1974]"
Normal-8.zip,0,Normal,1861,316,76,1,[316]
Normal-3.zip,0,Normal,1766,1150,57,3,"[1149, 1150, 1151]"
Normal-17.zip,0,Normal,2182,637,96,1,[637]
Normal-7.zip,0,Normal,1833,288,102,1,[288]
Normal-9.zip,0,Normal,1894,349,99,1,[349]
Normal-22.zip,0,Normal,2319,774,101,1,[774]
Normal-1.zip,0,Normal,1680,844,64,6,"[839, 840, 841, 842, 843, 844]"
CP-24.zip,1,CP,679,3041,94,1,[3041]
CP-13.zip,1,CP,1489,4069,58,4,"[4067, 4068, 4069, 4070]"
CP-30.zip,1,CP,3832,5776,23,1,[5776]
CP-25.zip,1,CP,720,3082,84,1,[3082]
Normal-19.zip,0,Normal,2235,690,89,1,[690]
CP-11.zip,1,CP,1429,3927,52,2,"[3926, 3927]"
Normal-7.zip,0,Normal,1835,290,83,1,[290]
NCP-7.zip,2,NCP,239,1624,61,2,"[1623, 1624]"
Normal-27.zip,0,Normal,3899,5430,76,2,"[5429, 5430]"
CP-4.zip,1,CP,1165,3383,151,1,[3383]
NCP-3.zip,2,NCP,1297,2738,56,1,[2738]
CP-1.zip,1,CP,0,3134,37,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-22.zip,2,NCP,832,2345,25,1,[2345]
NCP-25.zip,2,NCP,3952,5505,46,1,[5505]
NCP-26.zip,2,NCP,3977,5509,56,1,[5509]
CP-16.zip,1,CP,1609,4297,20,1,[4297]
Normal-21.zip,0,Normal,2294,749,103,1,[749]
NCP-25.zip,2,NCP,3967,5507,46,1,[5507]
CP-13.zip,1,CP,1495,4089,48,4,"[4086, 4087, 4088, 4089]"
CP-7.zip,1,CP,1317,3672,58,3,"[3670, 3671, 3672]"
Normal-26.zip,0,Normal,3877,5389,25,1,[5389]
CP-20.zip,1,CP,2766,3297,41,1,[3297]
CP-18.zip,1,CP,1661,4349,32,1,[4349]
NCP-19.zip,2,NCP,535,2227,112,2,"[2227, 2228]"
CP-2.zip,1,CP,1120,3338,159,1,[3338]
NCP-2.zip,2,NCP,118,1377,142,2,"[1377, 1378]"
Normal-7.zip,0,Normal,1843,298,96,1,[298]
NCP-15.zip,2,NCP,400,1950,155,1,[1950]
NCP-25.zip,2,NCP,3704,5531,60,1,[5531]
Normal-15.zip,0,Normal,2095,550,99,1,[550]
Normal-1.zip,0,Normal,1684,870,68,5,"[870, 871, 873, 874, 875]"
NCP-16.zip,2,NCP,44,1222,52,2,"[1221, 1222]"
NCP-11.zip,2,NCP,31,1194,137,2,"[1194, 1195]"
NCP-15.zip,2,NCP,409,1968,64,2,"[1967, 1968]"
NCP-16.zip,2,NCP,451,2057,48,3,"[2056, 2057, 2058]"
Normal-2.zip,0,Normal,1753,1086,77,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
NCP-8.zip,2,NCP,262,1670,139,2,"[1670, 1671]"
Normal-10.zip,0,Normal,1955,410,93,1,[410]
Normal-6.zip,0,Normal,1826,281,104,1,[281]
NCP-28.zip,2,NCP,852,2371,47,2,"[2371, 2372]"
NCP-27.zip,2,NCP,1000,2558,39,1,[2558]
CP-1.zip,1,CP,1072,3115,52,1,[3115]
NCP-5.zip,2,NCP,177,1499,58,2,"[1498, 1499]"
Normal-13.zip,0,Normal,2052,507,71,1,[507]
CP-7.zip,1,CP,1314,3663,30,2,"[3663, 3664]"
NCP-13.zip,2,NCP,350,1851,109,2,"[1851, 1852]"
NCP-21.zip,2,NCP,67,1267,70,2,"[1266, 1267]"
NCP-3.zip,2,NCP,132,1409,117,1,[1409]
Normal-18.zip,0,Normal,2205,660,91,1,[660]
Normal-1.zip,0,Normal,1678,827,58,6,"[827, 828, 829, 830, 831, 832]"
Normal-14.zip,0,Normal,2054,509,88,1,[509]
Normal-5.zip,0,Normal,809,244,114,1,[244]
CP-1.zip,1,CP,1083,3129,71,2,"[3128, 3129]"
NCP-27.zip,2,NCP,1029,2599,39,1,[2599]
NCP-26.zip,2,NCP,3972,5481,58,1,[5481]
Normal-13.zip,0,Normal,2026,481,85,1,[481]
NCP-17.zip,2,NCP,47,1227,139,2,"[1227, 1228]"
CP-27.zip,1,CP,3763,5707,20,1,[5707]
Normal-6.zip,0,Normal,1798,253,93,1,[253]
NCP-9.zip,2,NCP,2703,2669,41,1,[2669]
CP-1.zip,1,CP,1071,3113,57,2,"[3113, 3114]"
NCP-16.zip,2,NCP,430,2014,64,2,"[2013, 2014]"
NCP-4.zip,2,NCP,144,1432,139,2,"[1432, 1433]"
Normal-4.zip,0,Normal,780,215,116,1,[215]
Normal-12.zip,0,Normal,2020,475,88,1,[475]
NCP-13.zip,2,NCP,366,1883,161,2,"[1883, 1884]"
Normal-2.zip,0,Normal,1761,1127,18,5,"[1125, 1126, 1127, 1128, 1129]"
NCP-29.zip,2,NCP,899,2441,42,2,"[2440, 2441]"
CP-16.zip,1,CP,1612,4300,26,1,[4300]
NCP-15.zip,2,NCP,412,1973,129,2,"[1973, 1974]"
NCP-10.zip,2,NCP,2717,2710,42,1,[2710]
CP-19.zip,1,CP,1792,3214,71,2,"[3214, 3215]"
Normal-20.zip,0,Normal,2269,724,113,1,[724]
CP-11.zip,1,CP,1451,3976,51,2,"[3975, 3976]"
Normal-11.zip,0,Normal,1978,433,94,1,[433]
NCP-3.zip,2,NCP,1282,2723,70,1,[2723]
CP-23.zip,1,CP,654,3016,74,1,[3016]
NCP-13.zip,2,NCP,345,1842,62,2,"[1841, 1842]"
CP-22.zip,1,CP,610,2972,70,1,[2972]
CP-29.zip,1,CP,3799,5743,23,1,[5743]
CP-1.zip,1,CP,0,3140,269,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-18.zip,2,NCP,506,2168,124,2,"[2168, 2169]"
Normal-19.zip,0,Normal,2218,673,84,1,[673]
NCP-7.zip,2,NCP,243,1632,31,3,"[1631, 1632, 1633]"
NCP-25.zip,2,NCP,3948,5504,50,1,[5504]
CP-7.zip,1,CP,1312,3658,65,2,"[3658, 3659]"
NCP-16.zip,2,NCP,451,2058,23,3,"[2056, 2057, 2058]"
CP-12.zip,1,CP,1461,4000,53,2,"[4000, 4001]"
CP-1.zip,1,CP,10,3154,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-10.zip,1,CP,1388,3831,51,2,"[3831, 3832]"
Normal-1.zip,0,Normal,1702,957,69,2,"[957, 958]"
Normal-17.zip,0,Normal,2181,636,100,1,[636]
NCP-19.zip,2,NCP,521,2198,139,2,"[2198, 2199]"
Normal-9.zip,0,Normal,1922,377,87,1,[377]
Normal-8.zip,0,Normal,1872,327,86,1,[327]
CP-9.zip,1,CP,1369,3791,67,2,"[3790, 3791]"
CP-29.zip,1,CP,3815,5759,23,1,[5759]
NCP-2.zip,2,NCP,118,1378,60,2,"[1377, 1378]"
CP-19.zip,1,CP,1793,3216,69,1,[3216]
NCP-5.zip,2,NCP,178,1501,52,2,"[1500, 1501]"
CP-13.zip,1,CP,1495,4087,50,4,"[4086, 4087, 4088, 4089]"
CP-18.zip,1,CP,1780,3566,41,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-9.zip,1,CP,1378,3810,50,2,"[3810, 3811]"
CP-8.zip,1,CP,1323,3682,62,2,"[3682, 3683]"
CP-20.zip,1,CP,2754,3285,30,1,[3285]
Normal-26.zip,0,Normal,3865,5377,24,1,[5377]
Normal-23.zip,0,Normal,2614,124,37,1,[124]
CP-12.zip,1,CP,1465,4009,67,2,"[4009, 4010]"
CP-14.zip,1,CP,1537,4183,53,3,"[4182, 4183, 4184]"
Normal-1.zip,0,Normal,1719,993,76,2,"[993, 994]"
NCP-3.zip,2,NCP,128,1401,122,2,"[1401, 1402]"
CP-28.zip,1,CP,3778,5722,25,1,[5722]
NCP-1.zip,2,NCP,1018,2584,252,1,[2584]
NCP-9.zip,2,NCP,27,1187,33,2,"[1186, 1187]"
CP-13.zip,1,CP,1494,4084,65,3,"[4083, 4084, 4085]"
NCP-13.zip,2,NCP,344,1839,152,2,"[1839, 1840]"
CP-21.zip,1,CP,604,2966,134,1,[2966]
NCP-1.zip,2,NCP,1037,2608,32,1,[2608]
CP-12.zip,1,CP,1485,4057,49,3,"[4056, 4057, 4058]"
NCP-16.zip,2,NCP,45,1223,152,2,"[1223, 1224]"
Normal-14.zip,0,Normal,2058,513,95,1,[513]
NCP-12.zip,2,NCP,323,1795,49,2,"[1794, 1795]"
NCP-26.zip,2,NCP,3999,5496,52,1,[5496]
Normal-15.zip,0,Normal,2107,562,92,1,[562]
CP-12.zip,1,CP,1478,4038,53,2,"[4037, 4038]"
Normal-15.zip,0,Normal,2099,554,85,1,[554]
NCP-21.zip,2,NCP,64,1261,132,2,"[1261, 1262]"
CP-9.zip,1,CP,1384,3824,66,2,"[3823, 3824]"
NCP-18.zip,2,NCP,511,2178,132,2,"[2178, 2179]"
CP-6.zip,1,CP,1227,3445,307,1,[3445]
Normal-23.zip,0,Normal,2633,143,40,1,[143]
NCP-10.zip,2,NCP,2722,2678,53,1,[2678]
NCP-15.zip,2,NCP,427,2008,56,2,"[2007, 2008]"
NCP-23.zip,2,NCP,94,1324,153,2,"[1324, 1325]"
CP-19.zip,1,CP,2446,2922,690,1,[2922]
CP-26.zip,1,CP,3728,5664,229,1,[5664]
CP-20.zip,1,CP,2668,3249,45,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-27.zip,0,Normal,3899,5429,75,2,"[5429, 5430]"
Normal-9.zip,0,Normal,1902,357,93,1,[357]
NCP-9.zip,2,NCP,27,1186,75,2,"[1186, 1187]"
NCP-18.zip,2,NCP,508,2172,145,2,"[2172, 2173]"
Normal-8.zip,0,Normal,1862,317,91,1,[317]
NCP-3.zip,2,NCP,128,1402,52,2,"[1401, 1402]"
NCP-8.zip,2,NCP,257,1660,152,2,"[1660, 1661]"
NCP-30.zip,2,NCP,973,2516,57,1,[2516]
CP-9.zip,1,CP,1357,3759,61,3,"[3758, 3759, 3760]"
Normal-26.zip,0,Normal,3864,5376,178,1,[5376]
CP-25.zip,1,CP,727,3089,104,1,[3089]
NCP-8.zip,2,NCP,259,1664,155,2,"[1664, 1665]"
CP-10.zip,1,CP,1390,3838,56,3,"[3836, 3837, 3838]"
Normal-21.zip,0,Normal,2295,750,79,1,[750]
NCP-18.zip,2,NCP,49,1231,146,2,"[1231, 1232]"
CP-10.zip,1,CP,1391,3840,59,4,"[3839, 3840, 3841, 3842]"
NCP-17.zip,2,NCP,48,1229,145,2,"[1229, 1230]"
NCP-21.zip,2,NCP,73,1278,130,3,"[1278, 1279, 1280]"
NCP-11.zip,2,NCP,296,1738,58,2,"[1737, 1738]"
NCP-3.zip,2,NCP,129,1404,56,2,"[1403, 1404]"
NCP-12.zip,2,NCP,330,1808,153,2,"[1808, 1809]"
CP-14.zip,1,CP,1529,4165,100,3,"[4165, 4166, 4167]"
CP-4.zip,1,CP,1187,3405,325,1,[3405]
NCP-11.zip,2,NCP,307,1761,136,2,"[1761, 1762]"
CP-26.zip,1,CP,3725,5661,258,2,"[5660, 5661]"
Normal-10.zip,0,Normal,1950,405,102,1,[405]
CP-15.zip,1,CP,1563,4247,61,3,"[4245, 4246, 4247]"
NCP-4.zip,2,NCP,144,1433,58,2,"[1432, 1433]"
NCP-28.zip,2,NCP,855,2375,39,1,[2375]
Normal-1.zip,0,Normal,1726,1008,69,2,"[1007, 1008]"
CP-22.zip,1,CP,629,2991,304,1,[2991]
NCP-4.zip,2,NCP,142,1428,141,2,"[1428, 1429]"
CP-21.zip,1,CP,592,2954,104,1,[2954]
CP-1.zip,1,CP,10,3159,293,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-8.zip,1,CP,1326,3689,53,2,"[3688, 3689]"
CP-9.zip,1,CP,1357,3760,61,3,"[3758, 3759, 3760]"
Normal-24.zip,0,Normal,2648,158,32,1,[158]
NCP-9.zip,2,NCP,269,1684,153,2,"[1684, 1685]"
Normal-15.zip,0,Normal,2108,563,101,1,[563]
CP-25.zip,1,CP,9,3148,290,4,"[3148, 3149, 3150, 3151]"
NCP-13.zip,2,NCP,364,1879,132,2,"[1879, 1880]"
Normal-23.zip,0,Normal,2605,115,35,1,[115]
NCP-10.zip,2,NCP,282,1711,51,2,"[1710, 1711]"
CP-14.zip,1,CP,1546,4209,58,2,"[4208, 4209]"
NCP-29.zip,2,NCP,925,2467,22,1,[2467]
Normal-21.zip,0,Normal,2296,751,102,1,[751]
CP-2.zip,1,CP,1114,3332,361,1,[3332]
NCP-5.zip,2,NCP,19,1171,61,2,"[1170, 1171]"
NCP-13.zip,2,NCP,363,1877,139,2,"[1877, 1878]"
CP-12.zip,1,CP,1475,4031,50,2,"[4031, 4032]"
NCP-14.zip,2,NCP,399,1949,62,2,"[1948, 1949]"
CP-17.zip,1,CP,1626,4314,26,1,[4314]
CP-18.zip,1,CP,1780,3556,60,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
Normal-19.zip,0,Normal,2236,691,83,1,[691]
CP-15.zip,1,CP,1572,4260,19,1,[4260]
CP-6.zip,1,CP,1240,3458,137,1,[3458]
NCP-21.zip,2,NCP,76,1285,121,2,"[1285, 1286]"
CP-22.zip,1,CP,623,2985,463,1,[2985]
CP-27.zip,1,CP,3760,5704,23,1,[5704]
CP-23.zip,1,CP,672,3034,86,1,[3034]
NCP-1.zip,2,NCP,1026,2596,21,1,[2596]
CP-22.zip,1,CP,635,2997,106,1,[2997]
NCP-14.zip,2,NCP,375,1901,115,3,"[1901, 1902, 1903]"
NCP-11.zip,2,NCP,304,1754,161,2,"[1754, 1755]"
NCP-15.zip,2,NCP,408,1965,131,2,"[1965, 1966]"
NCP-9.zip,2,NCP,2702,2668,41,1,[2668]
CP-11.zip,1,CP,1452,3978,56,2,"[3977, 3978]"
NCP-29.zip,2,NCP,891,2430,22,1,[2430]
NCP-16.zip,2,NCP,458,2070,131,2,"[2070, 2071]"
Normal-2.zip,0,Normal,1753,1092,60,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
Normal-1.zip,0,Normal,1702,958,69,2,"[957, 958]"
Normal-2.zip,0,Normal,1761,1126,45,5,"[1125, 1126, 1127, 1128, 1129]"
CP-12.zip,1,CP,1487,4063,68,3,"[4061, 4062, 4063]"
NCP-25.zip,2,NCP,3958,5471,38,1,[5471]
CP-15.zip,1,CP,1556,4231,40,2,"[4230, 4231]"
NCP-16.zip,2,NCP,431,2015,160,2,"[2015, 2016]"
Normal-2.zip,0,Normal,1745,1060,298,3,"[1060, 1061, 1062]"
NCP-23.zip,2,NCP,906,2448,55,1,[2448]
CP-2.zip,1,CP,11,3163,265,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-17.zip,2,NCP,487,2130,70,2,"[2129, 2130]"
CP-16.zip,1,CP,1600,4288,19,1,[4288]
NCP-21.zip,2,NCP,580,2317,139,2,"[2317, 2318]"
Normal-1.zip,0,Normal,1673,805,59,6,"[804, 805, 806, 807, 808, 809]"
CP-29.zip,1,CP,3801,5745,26,1,[5745]
Normal-1.zip,0,Normal,1726,1007,69,2,"[1007, 1008]"
NCP-29.zip,2,NCP,893,2432,25,2,"[2432, 2433]"
CP-3.zip,1,CP,1143,3361,177,1,[3361]
CP-8.zip,1,CP,1343,3726,56,2,"[3726, 3727]"
NCP-2.zip,2,NCP,115,1371,118,2,"[1371, 1372]"
NCP-11.zip,2,NCP,31,1195,57,2,"[1194, 1195]"
CP-1.zip,1,CP,1071,3114,57,2,"[3113, 3114]"
NCP-23.zip,2,NCP,951,2494,38,1,[2494]
Normal-1.zip,0,Normal,1706,967,64,2,"[967, 968]"
CP-1.zip,1,CP,0,3133,290,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-8.zip,2,NCP,262,1671,58,2,"[1670, 1671]"
Normal-10.zip,0,Normal,1943,398,94,1,[398]
NCP-8.zip,2,NCP,257,1661,64,2,"[1660, 1661]"
Normal-24.zip,0,Normal,2644,154,39,1,[154]
NCP-15.zip,2,NCP,407,1964,52,2,"[1963, 1964]"
Normal-26.zip,0,Normal,3883,5395,61,1,[5395]
NCP-9.zip,2,NCP,2685,2698,52,1,[2698]
NCP-30.zip,2,NCP,992,2545,213,1,[2545]
CP-21.zip,1,CP,596,2958,255,1,[2958]
CP-7.zip,1,CP,1314,3664,30,2,"[3663, 3664]"
NCP-16.zip,2,NCP,432,2018,54,2,"[2017, 2018]"
NCP-14.zip,2,NCP,371,1894,59,2,"[1893, 1894]"
NCP-7.zip,2,NCP,2482,2685,45,1,[2685]
Normal-1.zip,0,Normal,1679,834,66,6,"[833, 834, 835, 836, 837, 838]"
CP-29.zip,1,CP,3824,5768,23,1,[5768]
Normal-2.zip,0,Normal,1753,1089,66,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
Normal-7.zip,0,Normal,1859,314,85,1,[314]
NCP-21.zip,2,NCP,578,2313,130,2,"[2313, 2314]"
CP-10.zip,1,CP,1402,3866,55,3,"[3865, 3866, 3867]"
Normal-4.zip,0,Normal,791,226,138,1,[226]
Normal-13.zip,0,Normal,2039,494,101,1,[494]
Normal-15.zip,0,Normal,2115,570,94,1,[570]
CP-12.zip,1,CP,1470,4021,54,2,"[4020, 4021]"
CP-24.zip,1,CP,695,3057,201,1,[3057]
Normal-12.zip,0,Normal,1994,449,95,1,[449]
Normal-5.zip,0,Normal,804,239,325,1,[239]
CP-17.zip,1,CP,1623,4311,23,1,[4311]
Normal-18.zip,0,Normal,2208,663,95,1,[663]
NCP-19.zip,2,NCP,526,2209,58,2,"[2208, 2209]"
NCP-16.zip,2,NCP,45,1224,64,2,"[1223, 1224]"
Normal-1.zip,0,Normal,1679,838,70,6,"[833, 834, 835, 836, 837, 838]"
CP-2.zip,1,CP,11,3161,244,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
Normal-19.zip,0,Normal,2239,694,89,1,[694]
NCP-7.zip,2,NCP,243,1631,145,3,"[1631, 1632, 1633]"
NCP-7.zip,2,NCP,243,1633,61,3,"[1631, 1632, 1633]"
CP-18.zip,1,CP,1780,3561,63,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-11.zip,1,CP,1429,3926,52,2,"[3926, 3927]"
NCP-7.zip,2,NCP,237,1619,146,2,"[1619, 1620]"
CP-7.zip,1,CP,1319,3674,61,2,"[3674, 3675]"
NCP-28.zip,2,NCP,829,2342,36,1,[2342]
Normal-18.zip,0,Normal,2186,641,84,1,[641]
NCP-4.zip,2,NCP,141,1427,54,2,"[1426, 1427]"
Normal-16.zip,0,Normal,2127,582,84,1,[582]
Normal-1.zip,0,Normal,1723,1003,77,2,"[1003, 1004]"
CP-5.zip,1,CP,1197,3415,191,1,[3415]
CP-10.zip,1,CP,1414,3893,63,3,"[3891, 3892, 3893]"
NCP-14.zip,2,NCP,384,1920,127,2,"[1920, 1921]"
CP-7.zip,1,CP,1317,3671,116,3,"[3670, 3671, 3672]"
NCP-22.zip,2,NCP,81,1295,125,2,"[1295, 1296]"
CP-3.zip,1,CP,1156,3374,173,1,[3374]
Normal-2.zip,0,Normal,1761,1129,60,5,"[1125, 1126, 1127, 1128, 1129]"
NCP-8.zip,2,NCP,252,1651,58,2,"[1650, 1651]"
NCP-25.zip,2,NCP,3959,5472,44,1,[5472]
Normal-11.zip,0,Normal,1988,443,90,1,[443]
CP-30.zip,1,CP,3833,5777,23,1,[5777]
NCP-26.zip,2,NCP,3985,5491,50,1,[5491]
CP-20.zip,1,CP,2668,3255,28,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-14.zip,0,Normal,2077,532,92,1,[532]
Normal-14.zip,0,Normal,2059,514,95,1,[514]
CP-29.zip,1,CP,3829,5773,26,1,[5773]
NCP-15.zip,2,NCP,402,1954,62,2,"[1953, 1954]"
CP-29.zip,1,CP,3800,5744,29,1,[5744]
CP-9.zip,1,CP,1383,3821,71,2,"[3821, 3822]"
NCP-6.zip,2,NCP,225,1594,135,2,"[1594, 1595]"
CP-27.zip,1,CP,3759,5703,23,1,[5703]
NCP-15.zip,2,NCP,423,2000,56,2,"[1999, 2000]"
CP-4.zip,1,CP,1190,3408,173,1,[3408]
NCP-11.zip,2,NCP,302,1750,152,2,"[1750, 1751]"
NCP-29.zip,2,NCP,889,2427,38,2,"[2427, 2428]"
NCP-20.zip,2,NCP,570,2299,58,2,"[2298, 2299]"
NCP-14.zip,2,NCP,375,1902,40,3,"[1901, 1902, 1903]"
Normal-19.zip,0,Normal,2238,693,91,1,[693]
NCP-2.zip,2,NCP,1273,2714,56,1,[2714]
NCP-18.zip,2,NCP,497,2151,53,2,"[2150, 2151]"
CP-25.zip,1,CP,715,3077,609,1,[3077]
CP-7.zip,1,CP,1264,3482,126,1,[3482]
CP-1.zip,1,CP,10,3157,46,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
Normal-20.zip,0,Normal,2266,721,94,1,[721]
CP-11.zip,1,CP,1433,3935,62,2,"[3934, 3935]"
NCP-18.zip,2,NCP,511,2179,56,2,"[2178, 2179]"
CP-3.zip,1,CP,1138,3356,158,1,[3356]
Normal-20.zip,0,Normal,2249,704,66,1,[704]
Normal-6.zip,0,Normal,1809,264,94,1,[264]
CP-14.zip,1,CP,1547,4210,142,3,"[4210, 4211, 4212]"
CP-21.zip,1,CP,586,2948,174,1,[2948]
CP-23.zip,1,CP,650,3012,102,1,[3012]
CP-12.zip,1,CP,1459,3995,164,3,"[3995, 3996, 3997]"
CP-14.zip,1,CP,1522,4149,61,2,"[4148, 4149]"
NCP-8.zip,2,NCP,250,1646,144,2,"[1646, 1647]"
Normal-26.zip,0,Normal,3884,5397,298,2,"[5396, 5397]"
CP-28.zip,1,CP,3773,5717,20,1,[5717]
Normal-21.zip,0,Normal,2309,764,88,1,[764]
NCP-12.zip,2,NCP,326,1801,50,2,"[1800, 1801]"
Normal-1.zip,0,Normal,1729,1017,74,2,"[1017, 1018]"
Normal-1.zip,0,Normal,1684,871,68,5,"[870, 871, 873, 874, 875]"
CP-15.zip,1,CP,1567,4254,118,2,"[4254, 4255]"
NCP-4.zip,2,NCP,163,1470,154,2,"[1470, 1471]"
Normal-1.zip,0,Normal,1705,966,69,2,"[965, 966]"
CP-11.zip,1,CP,1446,3966,63,2,"[3965, 3966]"
NCP-6.zip,2,NCP,225,1595,57,2,"[1594, 1595]"
NCP-11.zip,2,NCP,293,1732,52,2,"[1731, 1732]"
NCP-28.zip,2,NCP,839,2354,209,1,[2354]
NCP-18.zip,2,NCP,513,2182,163,2,"[2182, 2183]"
Normal-8.zip,0,Normal,1889,344,87,1,[344]
CP-2.zip,1,CP,1112,3330,154,1,[3330]
Normal-26.zip,0,Normal,3874,5386,28,1,[5386]
CP-29.zip,1,CP,3813,5757,21,1,[5757]
CP-7.zip,1,CP,1317,3670,229,3,"[3670, 3671, 3672]"
NCP-20.zip,2,NCP,553,2264,58,2,"[2263, 2264]"
CP-29.zip,1,CP,3820,5764,31,1,[5764]
NCP-17.zip,2,NCP,482,2120,58,2,"[2119, 2120]"
NCP-7.zip,2,NCP,233,1610,86,2,"[1610, 1612]"
NCP-18.zip,2,NCP,500,2157,68,2,"[2156, 2157]"
Normal-4.zip,0,Normal,799,234,118,1,[234]
NCP-16.zip,2,NCP,442,2039,53,2,"[2038, 2039]"
NCP-23.zip,2,NCP,94,1325,64,2,"[1324, 1325]"
CP-18.zip,1,CP,1780,3563,60,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-23.zip,2,NCP,902,2444,45,1,[2444]
CP-2.zip,1,CP,11,3162,260,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-3.zip,2,NCP,135,1415,58,2,"[1414, 1415]"
CP-8.zip,1,CP,1350,3745,55,1,[3745]
Normal-14.zip,0,Normal,2065,520,81,1,[520]
NCP-5.zip,2,NCP,188,1521,57,2,"[1520, 1521]"
Normal-2.zip,0,Normal,1745,1061,60,3,"[1060, 1061, 1062]"
NCP-15.zip,2,NCP,424,2002,64,2,"[2001, 2002]"
Normal-4.zip,0,Normal,790,225,126,1,[225]
NCP-4.zip,2,NCP,142,1429,59,2,"[1428, 1429]"
CP-7.zip,1,CP,1310,3653,51,2,"[3653, 3654]"
CP-14.zip,1,CP,1537,4182,53,3,"[4182, 4183, 4184]"
CP-17.zip,1,CP,1625,4313,26,1,[4313]
Normal-1.zip,0,Normal,1680,843,64,6,"[839, 840, 841, 842, 843, 844]"
NCP-11.zip,2,NCP,311,1769,134,2,"[1769, 1770]"
CP-1.zip,1,CP,0,3136,290,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-1.zip,1,CP,1075,3118,553,2,"[3118, 3119]"
Normal-4.zip,0,Normal,770,205,116,1,[205]
CP-7.zip,1,CP,1311,3655,160,3,"[3655, 3656, 3657]"
Normal-1.zip,0,Normal,1724,1005,55,1,[1005]
NCP-20.zip,2,NCP,563,2285,59,2,"[2284, 2285]"
NCP-4.zip,2,NCP,163,1471,65,2,"[1470, 1471]"
Normal-15.zip,0,Normal,2114,569,101,1,[569]
Normal-12.zip,0,Normal,2016,471,89,1,[471]
CP-23.zip,1,CP,657,3019,343,1,[3019]
Normal-1.zip,0,Normal,1729,1018,74,2,"[1017, 1018]"
CP-18.zip,1,CP,1780,3558,73,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-5.zip,2,NCP,183,1511,52,2,"[1510, 1511]"
CP-1.zip,1,CP,1074,3117,61,1,[3117]
Normal-8.zip,0,Normal,1870,325,88,1,[325]
CP-6.zip,1,CP,1254,3472,125,1,[3472]
CP-21.zip,1,CP,2775,3306,43,1,[3306]
CP-16.zip,1,CP,1587,4275,20,1,[4275]
NCP-26.zip,2,NCP,3984,5490,54,1,[5490]
CP-27.zip,1,CP,3747,5691,20,1,[5691]
CP-13.zip,1,CP,1495,4088,48,4,"[4086, 4087, 4088, 4089]"
CP-9.zip,1,CP,1384,3823,66,2,"[3823, 3824]"
NCP-1.zip,2,NCP,100,1338,58,2,"[1337, 1338]"
NCP-27.zip,2,NCP,1025,2595,252,1,[2595]
NCP-18.zip,2,NCP,510,2177,43,2,"[2176, 2177]"
NCP-11.zip,2,NCP,298,1743,61,2,"[1742, 1743]"
Normal-17.zip,0,Normal,2174,629,88,1,[629]
CP-23.zip,1,CP,677,3039,309,1,[3039]
Normal-21.zip,0,Normal,2284,739,80,1,[739]
Normal-18.zip,0,Normal,2193,648,85,1,[648]
CP-1.zip,1,CP,0,3135,269,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-27.zip,2,NCP,1015,2579,39,1,[2579]
NCP-6.zip,2,NCP,214,1572,144,2,"[1572, 1573]"
CP-6.zip,1,CP,1248,3466,141,1,[3466]
Normal-27.zip,0,Normal,3901,5433,66,1,[5433]
CP-13.zip,1,CP,1519,4142,68,2,"[4141, 4142]"
NCP-14.zip,2,NCP,385,1922,64,1,[1922]
CP-7.zip,1,CP,1311,3657,67,3,"[3655, 3656, 3657]"
CP-14.zip,1,CP,1547,4212,58,3,"[4210, 4211, 4212]"
CP-4.zip,1,CP,1186,3404,204,1,[3404]
CP-14.zip,1,CP,1526,4159,51,3,"[4157, 4158, 4159]"
NCP-4.zip,2,NCP,165,1474,131,2,"[1474, 1475]"
CP-1.zip,1,CP,10,3160,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-3.zip,1,CP,1157,3375,204,1,[3375]
NCP-11.zip,2,NCP,307,1762,57,2,"[1761, 1762]"
CP-11.zip,1,CP,1441,3952,53,3,"[3951, 3952, 3953]"
NCP-21.zip,2,NCP,63,1259,139,2,"[1259, 1260]"
Normal-6.zip,0,Normal,1806,261,100,1,[261]
CP-1.zip,1,CP,0,3131,285,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-17.zip,1,CP,1627,4315,26,1,[4315]
Normal-14.zip,0,Normal,2064,519,91,1,[519]
NCP-5.zip,2,NCP,180,1505,57,2,"[1504, 1505]"
Normal-16.zip,0,Normal,2134,589,72,1,[589]
Normal-14.zip,0,Normal,2063,518,99,1,[518]
CP-11.zip,1,CP,1451,3975,51,2,"[3975, 3976]"
Normal-24.zip,0,Normal,2647,157,34,1,[157]
NCP-21.zip,2,NCP,66,1265,58,1,[1265]
Normal-25.zip,0,Normal,3843,5355,180,1,[5355]
NCP-12.zip,2,NCP,336,1820,117,2,"[1820, 1821]"
CP-25.zip,1,CP,729,3091,106,1,[3091]
CP-20.zip,1,CP,2668,3256,53,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-6.zip,2,NCP,200,1544,123,2,"[1544, 1545]"
Normal-1.zip,0,Normal,1685,879,65,4,"[877, 878, 879, 880]"
NCP-24.zip,2,NCP,972,2515,120,1,[2515]
CP-14.zip,1,CP,1547,4211,58,3,"[4210, 4211, 4212]"
CP-18.zip,1,CP,1775,3530,58,4,"[3530, 3531, 3532, 3533]"
CP-11.zip,1,CP,1427,3921,43,2,"[3921, 3922]"
CP-18.zip,1,CP,1776,3534,64,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-13.zip,2,NCP,368,1888,54,2,"[1887, 1888]"
CP-23.zip,1,CP,644,3006,134,1,[3006]
CP-7.zip,1,CP,1312,3659,65,2,"[3658, 3659]"
NCP-4.zip,2,NCP,139,1422,132,2,"[1422, 1423]"
NCP-15.zip,2,NCP,422,1998,63,2,"[1997, 1998]"
CP-10.zip,1,CP,1391,3842,59,4,"[3839, 3840, 3841, 3842]"
CP-11.zip,1,CP,1441,3953,53,3,"[3951, 3952, 3953]"
NCP-4.zip,2,NCP,154,1452,110,2,"[1452, 1453]"
NCP-6.zip,2,NCP,202,1549,67,2,"[1548, 1549]"
CP-11.zip,1,CP,1436,3941,45,2,"[3940, 3941]"
NCP-16.zip,2,NCP,431,2016,67,2,"[2015, 2016]"
Normal-26.zip,0,Normal,3870,5382,30,1,[5382]
Normal-17.zip,0,Normal,2159,614,89,1,[614]
CP-11.zip,1,CP,1427,3922,43,2,"[3921, 3922]"
NCP-7.zip,2,NCP,235,1616,58,2,"[1615, 1616]"
CP-11.zip,1,CP,1418,3902,54,3,"[3900, 3901, 3902]"
CP-6.zip,1,CP,1228,3446,307,1,[3446]
NCP-15.zip,2,NCP,422,1997,156,2,"[1997, 1998]"
Normal-1.zip,0,Normal,1679,836,67,6,"[833, 834, 835, 836, 837, 838]"
CP-16.zip,1,CP,1604,4292,22,1,[4292]
CP-4.zip,1,CP,1179,3397,153,1,[3397]
NCP-6.zip,2,NCP,221,1586,125,2,"[1586, 1587]"
CP-18.zip,1,CP,1780,3564,41,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-4.zip,2,NCP,139,1423,56,2,"[1422, 1423]"
Normal-1.zip,0,Normal,1685,880,65,4,"[877, 878, 879, 880]"
CP-18.zip,1,CP,1780,3557,73,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-7.zip,1,CP,1269,3487,172,1,[3487]
Normal-1.zip,0,Normal,1680,841,69,6,"[839, 840, 841, 842, 843, 844]"
CP-13.zip,1,CP,1491,4074,113,3,"[4074, 4075, 4076]"
NCP-13.zip,2,NCP,344,1840,63,2,"[1839, 1840]"
NCP-17.zip,2,NCP,476,2108,53,2,"[2107, 2108]"
Normal-12.zip,0,Normal,1997,452,104,1,[452]
Normal-2.zip,0,Normal,1745,1062,60,3,"[1060, 1061, 1062]"
Normal-19.zip,0,Normal,2224,679,82,1,[679]
CP-2.zip,1,CP,1101,3319,187,1,[3319]
Normal-26.zip,0,Normal,3873,5385,25,1,[5385]
CP-15.zip,1,CP,1578,4266,22,1,[4266]
Normal-22.zip,0,Normal,2591,101,37,1,[101]
Normal-11.zip,0,Normal,1966,421,90,1,[421]
NCP-17.zip,2,NCP,480,2115,139,2,"[2115, 2116]"
CP-19.zip,1,CP,2,3503,34,1,[3503]
NCP-7.zip,2,NCP,236,1618,119,2,"[1617, 1618]"
CP-16.zip,1,CP,1616,4304,29,1,[4304]
CP-11.zip,1,CP,1439,3946,62,2,"[3946, 3947]"
CP-10.zip,1,CP,1410,3883,51,2,"[3883, 3884]"
CP-24.zip,1,CP,701,3063,66,1,[3063]
NCP-6.zip,2,NCP,200,1545,52,2,"[1544, 1545]"
CP-1.zip,1,CP,10,3155,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
NCP-4.zip,2,NCP,160,1464,146,2,"[1464, 1465]"
Normal-8.zip,0,Normal,1890,345,99,1,[345]
NCP-9.zip,2,NCP,2694,2660,39,1,[2660]
CP-30.zip,1,CP,3930,5628,62,2,"[5628, 5629]"
CP-25.zip,1,CP,9,3149,290,4,"[3148, 3149, 3150, 3151]"
Normal-13.zip,0,Normal,2022,477,92,1,[477]
Normal-1.zip,0,Normal,1680,842,69,6,"[839, 840, 841, 842, 843, 844]"
NCP-7.zip,2,NCP,229,1603,65,2,"[1602, 1603]"
Normal-1.zip,0,Normal,1712,979,70,1,[979]
Normal-12.zip,0,Normal,2002,457,96,1,[457]
CP-6.zip,1,CP,1233,3451,150,1,[3451]
NCP-18.zip,2,NCP,489,2135,58,2,"[2134, 2135]"
CP-7.zip,1,CP,1310,3654,51,2,"[3653, 3654]"
CP-22.zip,1,CP,636,2998,102,1,[2998]
NCP-21.zip,2,NCP,70,1273,51,2,"[1272, 1273]"
Normal-23.zip,0,Normal,2603,113,41,1,[113]
CP-8.zip,1,CP,1323,3683,62,2,"[3682, 3683]"
Normal-20.zip,0,Normal,2274,729,85,1,[729]
NCP-29.zip,2,NCP,889,2428,121,2,"[2427, 2428]"
NCP-1.zip,2,NCP,1040,2611,113,1,[2611]
CP-1.zip,1,CP,0,3139,39,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
Normal-21.zip,0,Normal,2298,753,80,1,[753]
CP-19.zip,1,CP,1792,3215,71,2,"[3214, 3215]"
Normal-27.zip,0,Normal,3916,5459,77,1,[5459]
Normal-21.zip,0,Normal,2311,766,91,1,[766]
CP-1.zip,1,CP,0,3132,42,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
Normal-1.zip,0,Normal,1708,972,74,2,"[971, 972]"
NCP-13.zip,2,NCP,343,1837,130,2,"[1837, 1838]"
NCP-26.zip,2,NCP,3989,5513,45,1,[5513]
CP-12.zip,1,CP,1459,3997,69,3,"[3995, 3996, 3997]"
CP-13.zip,1,CP,1495,4086,112,4,"[4086, 4087, 4088, 4089]"
Normal-5.zip,0,Normal,812,247,126,1,[247]
Normal-15.zip,0,Normal,2098,553,84,1,[553]
Normal-16.zip,0,Normal,2119,574,93,1,[574]
CP-25.zip,1,CP,731,3093,82,1,[3093]
CP-16.zip,1,CP,1597,4285,23,1,[4285]
CP-26.zip,1,CP,3726,5662,232,1,[5662]
CP-4.zip,1,CP,1183,3401,294,1,[3401]
CP-10.zip,1,CP,1391,3839,59,4,"[3839, 3840, 3841, 3842]"
NCP-23.zip,2,NCP,901,2443,320,1,[2443]
Normal-11.zip,0,Normal,1957,412,78,1,[412]
NCP-18.zip,2,NCP,504,2164,155,2,"[2164, 2165]"
NCP-17.zip,2,NCP,474,2104,48,2,"[2103, 2104]"
NCP-9.zip,2,NCP,2698,2664,57,1,[2664]
NCP-7.zip,2,NCP,233,1612,45,2,"[1610, 1612]"
NCP-9.zip,2,NCP,2686,2699,48,1,[2699]
CP-18.zip,1,CP,1776,3537,75,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
CP-3.zip,1,CP,1158,3376,193,1,[3376]
CP-27.zip,1,CP,3755,5699,23,1,[5699]
CP-13.zip,1,CP,1509,4120,59,3,"[4118, 4119, 4120]"
NCP-29.zip,2,NCP,910,2452,76,1,[2452]
CP-2.zip,1,CP,11,3166,274,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-16.zip,2,NCP,433,2020,51,2,"[2019, 2020]"
Normal-26.zip,0,Normal,3863,5375,231,1,[5375]
Normal-7.zip,0,Normal,1851,306,102,1,[306]
NCP-23.zip,2,NCP,917,2459,272,1,[2459]
NCP-26.zip,2,NCP,3986,5492,42,1,[5492]
CP-12.zip,1,CP,1478,4037,53,2,"[4037, 4038]"
NCP-2.zip,2,NCP,115,1372,50,2,"[1371, 1372]"
NCP-13.zip,2,NCP,362,1875,151,2,"[1875, 1876]"
Normal-22.zip,0,Normal,2592,102,39,1,[102]
CP-9.zip,1,CP,1357,3758,61,3,"[3758, 3759, 3760]"
Normal-6.zip,0,Normal,1825,280,81,1,[280]
Normal-4.zip,0,Normal,775,210,134,1,[210]
NCP-13.zip,2,NCP,365,1881,117,2,"[1881, 1882]"
CP-24.zip,1,CP,709,3071,302,1,[3071]
CP-17.zip,1,CP,1630,4318,23,1,[4318]
CP-15.zip,1,CP,1557,4232,43,2,"[4232, 4233]"
NCP-23.zip,2,NCP,956,2499,156,1,[2499]
CP-2.zip,1,CP,1106,3324,164,1,[3324]
Normal-9.zip,0,Normal,1895,350,92,1,[350]
CP-21.zip,1,CP,599,2961,68,1,[2961]
Normal-1.zip,0,Normal,1720,996,74,2,"[995, 996]"
NCP-16.zip,2,NCP,448,2051,58,2,"[2050, 2051]"
CP-5.zip,1,CP,1206,3424,176,1,[3424]
CP-26.zip,1,CP,3648,5540,170,1,[5540]
CP-1.zip,1,CP,1091,3309,354,1,[3309]
NCP-10.zip,2,NCP,2713,2706,39,1,[2706]
NCP-30.zip,2,NCP,949,2492,42,1,[2492]
NCP-17.zip,2,NCP,480,2116,58,2,"[2115, 2116]"
CP-7.zip,1,CP,1306,3643,48,3,"[3642, 3643, 3644]"
Normal-7.zip,0,Normal,1840,295,108,1,[295]
CP-18.zip,1,CP,1780,3562,63,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-1.zip,2,NCP,1011,2575,111,2,"[2574, 2575]"
Normal-16.zip,0,Normal,2132,587,97,1,[587]
CP-29.zip,1,CP,3814,5758,29,1,[5758]
CP-18.zip,1,CP,1768,3175,175,1,[3175]
Normal-13.zip,0,Normal,2028,483,89,1,[483]
NCP-16.zip,2,NCP,454,2062,139,2,"[2062, 2063]"
CP-8.zip,1,CP,1333,3706,52,2,"[3705, 3706]"
CP-25.zip,1,CP,737,3099,84,1,[3099]
NCP-9.zip,2,NCP,2683,2653,46,1,[2653]
Normal-11.zip,0,Normal,1958,413,90,1,[413]
Normal-7.zip,0,Normal,1855,310,86,1,[310]
NCP-10.zip,2,NCP,282,1710,120,2,"[1710, 1711]"
NCP-8.zip,2,NCP,252,1650,139,2,"[1650, 1651]"
NCP-3.zip,2,NCP,133,1411,41,2,"[1410, 1411]"
CP-21.zip,1,CP,588,2950,116,1,[2950]
Normal-15.zip,0,Normal,2094,549,78,1,[549]
NCP-20.zip,2,NCP,562,2282,113,2,"[2282, 2283]"
Normal-5.zip,0,Normal,806,241,104,1,[241]
CP-3.zip,1,CP,1145,3363,169,1,[3363]
NCP-28.zip,2,NCP,847,2365,53,1,[2365]
NCP-4.zip,2,NCP,143,1431,54,2,"[1430, 1431]"
CP-19.zip,1,CP,1786,3192,81,3,"[3192, 3193, 3194]"
NCP-15.zip,2,NCP,407,1963,124,2,"[1963, 1964]"
Normal-6.zip,0,Normal,1817,272,85,1,[272]
CP-32.zip,1,CP,1089,3224,90,1,[3224]
NCP-22.zip,2,NCP,834,2347,194,2,"[2347, 2348]"
CP-9.zip,1,CP,1381,3816,66,3,"[3815, 3816, 3817]"
Normal-8.zip,0,Normal,1866,321,75,1,[321]
NCP-22.zip,2,NCP,86,1306,50,2,"[1305, 1306]"
CP-26.zip,1,CP,3725,5660,251,2,"[5660, 5661]"
NCP-18.zip,2,NCP,497,2150,126,2,"[2150, 2151]"
NCP-27.zip,2,NCP,1043,2615,45,1,[2615]
CP-4.zip,1,CP,1167,3385,149,1,[3385]
Normal-4.zip,0,Normal,782,217,340,1,[217]
NCP-15.zip,2,NCP,421,1995,161,2,"[1995, 1996]"
Normal-9.zip,0,Normal,1897,352,88,1,[352]
NCP-13.zip,2,NCP,365,1882,50,2,"[1881, 1882]"
CP-1.zip,1,CP,1067,3106,62,1,[3106]
CP-22.zip,1,CP,642,3004,128,1,[3004]
CP-20.zip,1,CP,2668,3258,52,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
CP-10.zip,1,CP,1406,3875,60,2,"[3874, 3875]"
CP-1.zip,1,CP,10,3158,285,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
NCP-21.zip,2,NCP,60,1254,59,2,"[1253, 1254]"
Normal-26.zip,0,Normal,3884,5396,62,2,"[5396, 5397]"
NCP-25.zip,2,NCP,3710,5537,66,1,[5537]
CP-9.zip,1,CP,1371,3795,60,3,"[3794, 3795, 3796]"
CP-20.zip,1,CP,2450,2928,92,2,"[2928, 2929]"
NCP-4.zip,2,NCP,166,1476,139,2,"[1476, 1477]"
NCP-20.zip,2,NCP,554,2266,54,2,"[2265, 2266]"
NCP-18.zip,2,NCP,491,2139,62,2,"[2138, 2139]"
CP-2.zip,1,CP,1098,3316,171,1,[3316]
CP-12.zip,1,CP,1465,4010,67,2,"[4009, 4010]"
NCP-20.zip,2,NCP,548,2254,61,2,"[2253, 2254]"
Normal-16.zip,0,Normal,2150,605,88,1,[605]
Normal-1.zip,0,Normal,1678,830,34,6,"[827, 828, 829, 830, 831, 832]"
NCP-16.zip,2,NCP,451,2056,51,3,"[2056, 2057, 2058]"
Normal-11.zip,0,Normal,1965,420,88,1,[420]
NCP-1.zip,2,NCP,101,1339,136,2,"[1339, 1340]"
Normal-12.zip,0,Normal,2008,463,92,1,[463]
CP-10.zip,1,CP,1402,3867,55,3,"[3865, 3866, 3867]"
NCP-2.zip,2,NCP,122,1386,62,2,"[1385, 1386]"
CP-20.zip,1,CP,2457,2941,108,1,[2941]
NCP-14.zip,2,NCP,38,1208,137,2,"[1208, 1209]"
Normal-10.zip,0,Normal,1933,388,103,1,[388]
CP-1.zip,1,CP,10,3152,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
NCP-20.zip,2,NCP,562,2283,48,2,"[2282, 2283]"
NCP-12.zip,2,NCP,335,1819,55,2,"[1818, 1819]"
NCP-21.zip,2,NCP,579,2316,63,2,"[2315, 2316]"
Normal-7.zip,0,Normal,1856,311,80,1,[311]
NCP-18.zip,2,NCP,506,2169,51,2,"[2168, 2169]"
CP-8.zip,1,CP,1339,3719,59,2,"[3718, 3719]"
CP-18.zip,1,CP,1652,4340,25,1,[4340]
NCP-11.zip,2,NCP,296,1737,139,2,"[1737, 1738]"
Normal-8.zip,0,Normal,1886,341,84,1,[341]
NCP-8.zip,2,NCP,250,1647,60,2,"[1646, 1647]"
CP-26.zip,1,CP,3720,5652,48,2,"[5652, 5653]"
CP-14.zip,1,CP,1537,4184,53,3,"[4182, 4183, 4184]"
NCP-17.zip,2,NCP,486,2128,64,2,"[2127, 2128]"
CP-8.zip,1,CP,1335,3711,62,3,"[3709, 3710, 3711]"
CP-27.zip,1,CP,3739,5683,19,1,[5683]
NCP-25.zip,2,NCP,3950,5464,41,1,[5464]
CP-12.zip,1,CP,1474,4029,62,2,"[4029, 4030]"
Normal-10.zip,0,Normal,1946,401,93,1,[401]
CP-19.zip,1,CP,1786,3193,81,3,"[3192, 3193, 3194]"
NCP-30.zip,2,NCP,947,2490,41,1,[2490]
NCP-14.zip,2,NCP,371,1893,141,2,"[1893, 1894]"
NCP-8.zip,2,NCP,2676,2694,54,1,[2694]
NCP-1.zip,2,NCP,1011,2574,117,2,"[2574, 2575]"
Normal-9.zip,0,Normal,1906,361,93,1,[361]
NCP-4.zip,2,NCP,147,1439,72,2,"[1438, 1439]"
CP-12.zip,1,CP,1485,4058,49,3,"[4056, 4057, 4058]"
Normal-7.zip,0,Normal,1838,293,86,1,[293]
CP-25.zip,1,CP,9,3150,72,4,"[3148, 3149, 3150, 3151]"
NCP-12.zip,2,NCP,330,1809,64,2,"[1808, 1809]"
NCP-8.zip,2,NCP,267,1681,54,2,"[1680, 1681]"
NCP-20.zip,2,NCP,553,2263,137,2,"[2263, 2264]"
NCP-29.zip,2,NCP,893,2433,24,2,"[2432, 2433]"
NCP-21.zip,2,NCP,582,2321,128,2,"[2321, 2322]"
Normal-24.zip,0,Normal,2642,152,38,1,[152]
CP-25.zip,1,CP,726,3088,183,1,[3088]
NCP-5.zip,2,NCP,171,1487,60,2,"[1486, 1487]"
CP-22.zip,1,CP,632,2994,132,1,[2994]
Normal-7.zip,0,Normal,1850,305,99,1,[305]
NCP-30.zip,2,NCP,945,2488,45,1,[2488]
Normal-19.zip,0,Normal,2244,699,98,1,[699]
CP-1.zip,1,CP,1073,3116,52,1,[3116]
Normal-21.zip,0,Normal,2310,765,91,1,[765]
CP-1.zip,1,CP,10,3153,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-1.zip,1,CP,1075,3119,70,2,"[3118, 3119]"
CP-12.zip,1,CP,1470,4020,54,2,"[4020, 4021]"
NCP-26.zip,2,NCP,3997,5519,56,1,[5519]
NCP-10.zip,2,NCP,274,1694,160,2,"[1694, 1695]"
Normal-15.zip,0,Normal,2089,544,98,1,[544]
CP-24.zip,1,CP,681,3043,102,1,[3043]
NCP-20.zip,2,NCP,573,2305,63,2,"[2304, 2305]"
CP-15.zip,1,CP,1557,4233,43,2,"[4232, 4233]"
NCP-30.zip,2,NCP,990,2543,59,1,[2543]
CP-7.zip,1,CP,1305,3640,20,2,"[3640, 3641]"
NCP-5.zip,2,NCP,183,1510,123,2,"[1510, 1511]"
CP-15.zip,1,CP,1582,4270,20,1,[4270]
CP-29.zip,1,CP,3817,5761,25,1,[5761]
NCP-20.zip,2,NCP,56,1245,164,2,"[1245, 1246]"
NCP-21.zip,2,NCP,58,1250,55,2,"[1249, 1250]"
CP-8.zip,1,CP,1335,3710,62,3,"[3709, 3710, 3711]"
Normal-3.zip,0,Normal,1766,1149,60,3,"[1149, 1150, 1151]"
NCP-10.zip,2,NCP,2716,2709,49,1,[2709]
CP-10.zip,1,CP,1402,3865,131,3,"[3865, 3866, 3867]"
CP-10.zip,1,CP,1391,3841,59,4,"[3839, 3840, 3841, 3842]"
Normal-22.zip,0,Normal,2594,104,42,1,[104]
CP-26.zip,1,CP,3733,5675,174,3,"[5673, 5674, 5675]"
Normal-25.zip,0,Normal,3715,5345,30,1,[5345]
Normal-3.zip,0,Normal,762,197,363,1,[197]
NCP-15.zip,2,NCP,420,1994,71,2,"[1993, 1994]"
CP-13.zip,1,CP,1489,4070,58,4,"[4067, 4068, 4069, 4070]"
Normal-12.zip,0,Normal,1996,451,90,1,[451]
NCP-13.zip,2,NCP,361,1874,60,2,"[1873, 1874]"
NCP-22.zip,2,NCP,885,2423,195,2,"[2422, 2423]"
NCP-29.zip,2,NCP,921,2463,36,1,[2463]
Normal-25.zip,0,Normal,3848,5360,192,1,[5360]
CP-28.zip,1,CP,3776,5720,30,1,[5720]
NCP-15.zip,2,NCP,402,1953,148,2,"[1953, 1954]"
Normal-19.zip,0,Normal,2232,687,99,1,[687]
CP-11.zip,1,CP,1447,3968,63,2,"[3967, 3968]"
Normal-17.zip,0,Normal,2176,631,91,1,[631]
NCP-12.zip,2,NCP,315,1778,46,2,"[1777, 1778]"
CP-2.zip,1,CP,1102,3320,182,1,[3320]
NCP-14.zip,2,NCP,373,1897,122,2,"[1897, 1898]"
NCP-17.zip,2,NCP,483,2121,137,2,"[2121, 2122]"
CP-4.zip,1,CP,1175,3393,189,1,[3393]
NCP-14.zip,2,NCP,392,1934,143,2,"[1934, 1935]"
CP-8.zip,1,CP,1321,3679,58,2,"[3678, 3679]"
NCP-16.zip,2,NCP,430,2013,152,2,"[2013, 2014]"
NCP-26.zip,2,NCP,3988,5512,53,1,[5512]
Normal-22.zip,0,Normal,2316,771,92,1,[771]
CP-14.zip,1,CP,1531,4170,59,2,"[4169, 4170]"
Normal-3.zip,0,Normal,748,183,261,1,[183]
NCP-23.zip,2,NCP,943,2486,334,1,[2486]
Normal-18.zip,0,Normal,2202,657,82,1,[657]
CP-27.zip,1,CP,3735,5679,26,1,[5679]
NCP-15.zip,2,NCP,409,1967,153,2,"[1967, 1968]"
CP-4.zip,1,CP,1171,3389,180,1,[3389]
CP-11.zip,1,CP,1452,3977,56,2,"[3977, 3978]"
Normal-1.zip,0,Normal,1684,875,71,5,"[870, 871, 873, 874, 875]"
CP-8.zip,1,CP,1333,3705,52,2,"[3705, 3706]"
NCP-3.zip,2,NCP,135,1414,138,2,"[1414, 1415]"
NCP-25.zip,2,NCP,3965,5506,53,1,[5506]
NCP-8.zip,2,NCP,258,1662,135,2,"[1662, 1663]"
Normal-10.zip,0,Normal,1926,381,87,1,[381]
CP-16.zip,1,CP,1596,4284,22,1,[4284]
CP-14.zip,1,CP,1554,4226,41,2,"[4226, 4227]"
CP-26.zip,1,CP,3645,5605,38,1,[5605]
CP-2.zip,1,CP,1110,3328,143,1,[3328]
NCP-22.zip,2,NCP,81,1296,53,2,"[1295, 1296]"
Normal-1.zip,0,Normal,1685,877,65,4,"[877, 878, 879, 880]"
NCP-29.zip,2,NCP,923,2465,19,1,[2465]
NCP-14.zip,2,NCP,399,1948,149,2,"[1948, 1949]"
NCP-18.zip,2,NCP,510,2176,102,2,"[2176, 2177]"
NCP-20.zip,2,NCP,558,2274,51,2,"[2273, 2274]"
Normal-1.zip,0,Normal,1678,832,62,6,"[827, 828, 829, 830, 831, 832]"
Normal-2.zip,0,Normal,1762,1131,70,2,"[1130, 1131]"
CP-19.zip,1,CP,2434,2898,102,3,"[2898, 2899, 2900]"
Normal-19.zip,0,Normal,2219,674,106,1,[674]
Normal-8.zip,0,Normal,1869,324,94,1,[324]
NCP-21.zip,2,NCP,70,1272,120,2,"[1272, 1273]"
NCP-10.zip,2,NCP,2710,2703,48,1,[2703]
Normal-9.zip,0,Normal,1904,359,94,1,[359]
NCP-20.zip,2,NCP,564,2287,60,2,"[2286, 2287]"
NCP-15.zip,2,NCP,424,2001,161,2,"[2001, 2002]"
CP-14.zip,1,CP,1529,4166,42,3,"[4165, 4166, 4167]"
Normal-16.zip,0,Normal,2138,593,72,1,[593]
CP-16.zip,1,CP,1613,4301,27,1,[4301]
CP-24.zip,1,CP,697,3059,114,1,[3059]
CP-10.zip,1,CP,1390,3836,215,3,"[3836, 3837, 3838]"
Normal-6.zip,0,Normal,1805,260,79,1,[260]
CP-10.zip,1,CP,1390,3837,56,3,"[3836, 3837, 3838]"
CP-3.zip,1,CP,1150,3368,214,1,[3368]
CP-2.zip,1,CP,1116,3334,183,1,[3334]
Normal-14.zip,0,Normal,2057,512,78,1,[512]
NCP-19.zip,2,NCP,532,2223,58,2,"[2222, 2223]"
CP-29.zip,1,CP,3810,5754,24,1,[5754]
CP-14.zip,1,CP,1539,4188,131,3,"[4188, 4189, 4190]"
CP-10.zip,1,CP,1385,3826,64,2,"[3825, 3826]"
NCP-29.zip,2,NCP,929,2471,21,1,[2471]
NCP-28.zip,2,NCP,856,2377,229,2,"[2376, 2377]"
NCP-15.zip,2,NCP,408,1966,55,2,"[1965, 1966]"
NCP-16.zip,2,NCP,43,1219,156,2,"[1219, 1220]"
CP-7.zip,1,CP,1319,3675,61,2,"[3674, 3675]"
NCP-1.zip,2,NCP,1022,2591,48,1,[2591]
Normal-20.zip,0,Normal,2254,709,75,1,[709]
NCP-22.zip,2,NCP,862,2385,33,1,[2385]
CP-29.zip,1,CP,3812,5756,27,1,[5756]
CP-11.zip,1,CP,1447,3967,63,2,"[3967, 3968]"
CP-15.zip,1,CP,1556,4230,40,2,"[4230, 4231]"
CP-1.zip,1,CP,1080,3125,64,1,[3125]
Normal-4.zip,0,Normal,778,213,114,1,[213]
CP-14.zip,1,CP,1529,4167,42,3,"[4165, 4166, 4167]"
CP-2.zip,1,CP,11,3167,283,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-20.zip,2,NCP,549,2256,36,2,"[2255, 2256]"
NCP-3.zip,2,NCP,1292,2733,66,1,[2733]
Normal-13.zip,0,Normal,2047,502,93,1,[502]
NCP-20.zip,2,NCP,549,2255,83,2,"[2255, 2256]"
CP-15.zip,1,CP,1563,4246,122,3,"[4245, 4246, 4247]"
NCP-25.zip,2,NCP,3956,5469,49,1,[5469]
NCP-22.zip,2,NCP,833,2346,484,1,[2346]
NCP-18.zip,2,NCP,499,2154,139,2,"[2154, 2155]"
CP-12.zip,1,CP,1487,4061,163,3,"[4061, 4062, 4063]"
CP-7.zip,1,CP,1306,3642,52,3,"[3642, 3643, 3644]"
NCP-17.zip,2,NCP,47,1228,58,2,"[1227, 1228]"
CP-8.zip,1,CP,1338,3716,67,2,"[3716, 3717]"
Normal-25.zip,0,Normal,3711,5341,27,1,[5341]
NCP-16.zip,2,NCP,452,2059,63,1,[2059]
Normal-23.zip,0,Normal,2604,114,36,1,[114]
NCP-28.zip,2,NCP,849,2368,224,1,[2368]
NCP-29.zip,2,NCP,886,2424,52,1,[2424]
NCP-28.zip,2,NCP,875,2408,218,1,[2408]
NCP-20.zip,2,NCP,573,2304,151,2,"[2304, 2305]"
NCP-22.zip,2,NCP,83,1300,70,2,"[1299, 1300]"
Normal-14.zip,0,Normal,2056,511,84,1,[511]
Normal-7.zip,0,Normal,1844,299,93,1,[299]
CP-13.zip,1,CP,1494,4083,154,3,"[4083, 4084, 4085]"
CP-5.zip,1,CP,1201,3419,171,1,[3419]
NCP-23.zip,2,NCP,897,2438,40,1,[2438]
Normal-27.zip,0,Normal,3914,5456,55,2,"[5456, 5457]"
CP-9.zip,1,CP,1354,3751,181,3,"[3751, 3752, 3753]"
NCP-29.zip,2,NCP,899,2440,34,2,"[2440, 2441]"
CP-10.zip,1,CP,1414,3891,151,3,"[3891, 3892, 3893]"
CP-14.zip,1,CP,1543,4202,57,3,"[4200, 4201, 4202]"
Normal-25.zip,0,Normal,3837,5349,208,1,[5349]
NCP-10.zip,2,NCP,272,1691,64,2,"[1690, 1691]"
Normal-9.zip,0,Normal,1905,360,93,1,[360]
CP-8.zip,1,CP,1340,3721,64,2,"[3720, 3721]"
NCP-5.zip,2,NCP,19,1170,146,2,"[1170, 1171]"
Normal-2.zip,0,Normal,1738,1041,75,1,[1041]
NCP-2.zip,2,NCP,108,1354,58,2,"[1353, 1354]"
Normal-25.zip,0,Normal,3844,5356,201,1,[5356]
CP-20.zip,1,CP,2459,2945,108,1,[2945]
CP-10.zip,1,CP,1414,3892,63,3,"[3891, 3892, 3893]"
Normal-18.zip,0,Normal,2201,656,66,1,[656]
NCP-21.zip,2,NCP,78,1289,166,2,"[1289, 1290]"
CP-18.zip,1,CP,1776,3539,76,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-1.zip,2,NCP,1010,2572,126,2,"[2572, 2573]"
CP-10.zip,1,CP,1409,3882,66,2,"[3881, 3882]"
CP-11.zip,1,CP,1441,3951,203,3,"[3951, 3952, 3953]"
CP-13.zip,1,CP,1512,4125,50,2,"[4125, 4126]"
CP-30.zip,1,CP,3934,5640,53,3,"[5638, 5639, 5640]"
NCP-4.zip,2,NCP,143,1430,128,2,"[1430, 1431]"
Normal-17.zip,0,Normal,2166,621,93,1,[621]
NCP-22.zip,2,NCP,83,1299,167,2,"[1299, 1300]"
CP-29.zip,1,CP,3804,5748,29,1,[5748]
CP-22.zip,1,CP,624,2986,90,1,[2986]
NCP-7.zip,2,NCP,231,1607,58,2,"[1606, 1607]"
NCP-8.zip,2,NCP,258,1663,57,2,"[1662, 1663]"
Normal-10.zip,0,Normal,1956,411,89,1,[411]
NCP-4.zip,2,NCP,165,1475,55,2,"[1474, 1475]"
Normal-2.zip,0,Normal,1753,1091,60,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
CP-6.zip,1,CP,1247,3465,218,1,[3465]
CP-17.zip,1,CP,1644,4332,23,1,[4332]
NCP-5.zip,2,NCP,188,1520,134,2,"[1520, 1521]"
CP-13.zip,1,CP,1509,4118,233,3,"[4118, 4119, 4120]"
CP-19.zip,1,CP,2434,2899,102,3,"[2898, 2899, 2900]"
Normal-27.zip,0,Normal,3914,5457,55,2,"[5456, 5457]"
NCP-3.zip,2,NCP,133,1410,100,2,"[1410, 1411]"
CP-24.zip,1,CP,690,3052,134,1,[3052]
NCP-6.zip,2,NCP,208,1560,134,2,"[1560, 1561]"
Normal-26.zip,0,Normal,3872,5384,29,1,[5384]
CP-7.zip,1,CP,1258,3476,202,1,[3476]
NCP-4.zip,2,NCP,154,1453,47,2,"[1452, 1453]"
CP-8.zip,1,CP,1335,3709,207,3,"[3709, 3710, 3711]"
CP-7.zip,1,CP,1305,3641,50,2,"[3640, 3641]"
CP-25.zip,1,CP,716,3078,640,1,[3078]
Normal-2.zip,0,Normal,1761,1125,45,5,"[1125, 1126, 1127, 1128, 1129]"
NCP-14.zip,2,NCP,38,1209,57,2,"[1208, 1209]"
Normal-1.zip,0,Normal,1685,878,65,4,"[877, 878, 879, 880]"
NCP-17.zip,2,NCP,467,2090,58,2,"[2089, 2090]"
CP-14.zip,1,CP,1539,4189,54,3,"[4188, 4189, 4190]"
NCP-16.zip,2,NCP,454,2063,58,2,"[2062, 2063]"
CP-13.zip,1,CP,1491,4076,48,3,"[4074, 4075, 4076]"
Normal-4.zip,0,Normal,794,229,341,1,[229]
NCP-19.zip,2,NCP,521,2199,58,2,"[2198, 2199]"
CP-7.zip,1,CP,1311,3656,67,3,"[3655, 3656, 3657]"
Normal-22.zip,0,Normal,2584,94,44,1,[94]
CP-23.zip,1,CP,678,3040,46,1,[3040]
CP-14.zip,1,CP,1539,4190,54,3,"[4188, 4189, 4190]"
CP-30.zip,1,CP,3937,5644,55,2,"[5643, 5644]"
NCP-15.zip,2,NCP,427,2007,132,2,"[2007, 2008]"
NCP-28.zip,2,NCP,843,2358,279,1,[2358]
NCP-14.zip,2,NCP,375,1903,49,3,"[1901, 1902, 1903]"
NCP-11.zip,2,NCP,306,1759,153,2,"[1759, 1760]"
NCP-16.zip,2,NCP,44,1221,124,2,"[1221, 1222]"
NCP-8.zip,2,NCP,256,1659,58,2,"[1658, 1659]"
CP-8.zip,1,CP,1338,3717,67,2,"[3716, 3717]"
CP-18.zip,1,CP,1780,3553,67,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-7.zip,1,CP,1267,3485,151,1,[3485]
CP-13.zip,1,CP,1509,4119,118,3,"[4118, 4119, 4120]"
Normal-3.zip,0,Normal,1766,1151,62,3,"[1149, 1150, 1151]"
CP-10.zip,1,CP,1405,3873,60,2,"[3872, 3873]"
CP-1.zip,1,CP,1079,3124,63,1,[3124]
CP-18.zip,1,CP,1780,3559,69,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
Normal-7.zip,0,Normal,1852,307,94,1,[307]
NCP-5.zip,2,NCP,194,1533,56,2,"[1532, 1533]"
CP-5.zip,1,CP,1195,3413,247,1,[3413]
NCP-20.zip,2,NCP,556,2270,53,2,"[2269, 2270]"
NCP-2.zip,2,NCP,108,1353,139,2,"[1353, 1354]"
NCP-16.zip,2,NCP,445,2045,58,2,"[2044, 2045]"
CP-13.zip,1,CP,1512,4126,50,2,"[4125, 4126]"
NCP-21.zip,2,NCP,64,1262,55,2,"[1261, 1262]"
CP-5.zip,1,CP,1211,3429,143,1,[3429]
NCP-1.zip,2,NCP,1042,2614,143,2,"[2613, 2614]"
NCP-21.zip,2,NCP,73,1280,55,3,"[1278, 1279, 1280]"
CP-9.zip,1,CP,1364,3776,133,3,"[3776, 3777, 3778]"
NCP-21.zip,2,NCP,58,1249,131,2,"[1249, 1250]"
CP-20.zip,1,CP,2668,3250,44,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-19.zip,2,NCP,518,2193,57,2,"[2192, 2193]"
NCP-21.zip,2,NCP,73,1279,57,3,"[1278, 1279, 1280]"
CP-26.zip,1,CP,3733,5674,159,3,"[5673, 5674, 5675]"
Normal-19.zip,0,Normal,2247,702,86,1,[702]
NCP-28.zip,2,NCP,867,2394,161,1,[2394]
CP-22.zip,1,CP,633,2995,114,1,[2995]
CP-9.zip,1,CP,1371,3796,60,3,"[3794, 3795, 3796]"
NCP-22.zip,2,NCP,86,1305,117,2,"[1305, 1306]"
NCP-14.zip,2,NCP,40,1213,63,2,"[1212, 1213]"
Normal-26.zip,0,Normal,3892,5415,72,1,[5415]
CP-7.zip,1,CP,1306,3644,237,3,"[3642, 3643, 3644]"
CP-24.zip,1,CP,702,3064,78,1,[3064]
NCP-26.zip,2,NCP,3975,5483,44,1,[5483]
CP-4.zip,1,CP,1164,3382,193,1,[3382]
Normal-11.zip,0,Normal,1960,415,98,1,[415]
CP-5.zip,1,CP,1203,3421,231,1,[3421]
CP-19.zip,1,CP,2434,2900,104,3,"[2898, 2899, 2900]"
NCP-29.zip,2,NCP,890,2429,203,1,[2429]
NCP-16.zip,2,NCP,448,2050,139,2,"[2050, 2051]"
CP-18.zip,1,CP,1780,3555,60,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-12.zip,1,CP,1457,3991,69,1,[3991]
Normal-3.zip,0,Normal,756,191,106,1,[191]
NCP-29.zip,2,NCP,900,2442,506,1,[2442]
CP-11.zip,1,CP,1432,3932,60,2,"[3932, 3933]"
NCP-17.zip,2,NCP,476,2107,127,2,"[2107, 2108]"
CP-28.zip,1,CP,3794,5738,26,1,[5738]
CP-23.zip,1,CP,669,3031,70,1,[3031]
Normal-9.zip,0,Normal,1911,366,96,1,[366]
Normal-9.zip,0,Normal,1919,374,99,1,[374]
NCP-12.zip,2,NCP,335,1818,129,2,"[1818, 1819]"
CP-18.zip,1,CP,1651,4339,31,1,[4339]
Normal-4.zip,0,Normal,798,233,122,1,[233]
NCP-18.zip,2,NCP,508,2173,61,2,"[2172, 2173]"
NCP-21.zip,2,NCP,67,1266,168,2,"[1266, 1267]"
NCP-6.zip,2,NCP,214,1573,60,2,"[1572, 1573]"
CP-10.zip,1,CP,1405,3872,60,2,"[3872, 3873]"
NCP-6.zip,2,NCP,208,1561,56,2,"[1560, 1561]"
NCP-14.zip,2,NCP,373,1898,52,2,"[1897, 1898]"
NCP-3.zip,2,NCP,1281,2722,65,1,[2722]
CP-24.zip,1,CP,707,3069,72,1,[3069]
NCP-28.zip,2,NCP,831,2344,278,1,[2344]
Normal-17.zip,0,Normal,2179,634,101,1,[634]
NCP-21.zip,2,NCP,60,1253,141,2,"[1253, 1254]"
NCP-8.zip,2,NCP,259,1665,65,2,"[1664, 1665]"
NCP-11.zip,2,NCP,311,1770,55,2,"[1769, 1770]"
Normal-1.zip,0,Normal,1678,828,58,6,"[827, 828, 829, 830, 831, 832]"
NCP-27.zip,2,NCP,1050,2623,46,2,"[2623, 2624]"
NCP-18.zip,2,NCP,490,2137,62,2,"[2136, 2137]"
Normal-27.zip,0,Normal,3900,5431,64,2,"[5431, 5432]"
Normal-15.zip,0,Normal,2110,565,83,1,[565]
NCP-13.zip,2,NCP,368,1887,129,2,"[1887, 1888]"
NCP-27.zip,2,NCP,817,2326,120,1,[2326]
Normal-1.zip,0,Normal,1678,831,62,6,"[827, 828, 829, 830, 831, 832]"
CP-15.zip,1,CP,1567,4255,59,2,"[4254, 4255]"
NCP-5.zip,2,NCP,178,1500,124,2,"[1500, 1501]"
NCP-13.zip,2,NCP,345,1841,147,2,"[1841, 1842]"
Normal-2.zip,0,Normal,1761,1128,60,5,"[1125, 1126, 1127, 1128, 1129]"
CP-8.zip,1,CP,1343,3727,56,2,"[3726, 3727]"
NCP-30.zip,2,NCP,936,2478,21,1,[2478]
NCP-11.zip,2,NCP,306,1760,64,2,"[1759, 1760]"
NCP-17.zip,2,NCP,487,2129,167,2,"[2129, 2130]"
CP-1.zip,1,CP,0,3138,245,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-30.zip,1,CP,3930,5629,62,2,"[5628, 5629]"
NCP-9.zip,2,NCP,2692,2700,48,1,[2700]
NCP-20.zip,2,NCP,556,2269,125,2,"[2269, 2270]"
CP-18.zip,1,CP,1775,3531,58,4,"[3530, 3531, 3532, 3533]"
NCP-23.zip,2,NCP,896,2437,39,1,[2437]
CP-21.zip,1,CP,5,3509,275,1,[3509]
Normal-19.zip,0,Normal,2217,672,71,1,[672]
NCP-1.zip,2,NCP,1010,2573,126,2,"[2572, 2573]"
Normal-1.zip,0,Normal,1710,975,78,2,"[975, 976]"
CP-14.zip,1,CP,1545,4206,65,2,"[4206, 4207]"
NCP-1.zip,2,NCP,100,1337,139,2,"[1337, 1338]"
NCP-26.zip,2,NCP,3998,5495,41,1,[5495]
CP-25.zip,1,CP,711,3073,112,1,[3073]
CP-24.zip,1,CP,699,3061,64,1,[3061]
CP-4.zip,1,CP,1173,3391,201,1,[3391]
CP-27.zip,1,CP,3740,5684,23,1,[5684]
CP-16.zip,1,CP,1590,4278,20,1,[4278]
Normal-2.zip,0,Normal,1762,1130,70,2,"[1130, 1131]"
Normal-1.zip,0,Normal,1679,833,66,6,"[833, 834, 835, 836, 837, 838]"
NCP-29.zip,2,NCP,928,2470,25,1,[2470]
CP-18.zip,1,CP,1775,3533,57,4,"[3530, 3531, 3532, 3533]"
Normal-3.zip,0,Normal,766,201,94,1,[201]
Normal-11.zip,0,Normal,1964,419,100,1,[419]
NCP-9.zip,2,NCP,2690,2657,48,1,[2657]
NCP-21.zip,2,NCP,78,1290,69,2,"[1289, 1290]"
Normal-16.zip,0,Normal,2147,602,95,1,[602]
NCP-19.zip,2,NCP,544,2246,62,2,"[2245, 2246]"
Normal-27.zip,0,Normal,3900,5432,64,2,"[5431, 5432]"
Normal-8.zip,0,Normal,1860,315,92,1,[315]
CP-21.zip,1,CP,601,2963,104,1,[2963]
CP-2.zip,1,CP,11,3164,287,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
CP-15.zip,1,CP,1563,4245,241,3,"[4245, 4246, 4247]"
================================================
FILE: Finetune/CC-CCII/csv/CC_CCII_fold2_train.csv
================================================
zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids
CP-6.zip,1,CP,1229,3447,144,1,[3447]
CP-26.zip,1,CP,3718,5647,51,2,"[5647, 5648]"
CP-3.zip,1,CP,1148,3366,158,1,[3366]
CP-5.zip,1,CP,1200,3418,309,1,[3418]
CP-1.zip,1,CP,1088,3221,54,4,"[3220, 3221, 3222, 3223]"
CP-21.zip,1,CP,585,2947,94,1,[2947]
CP-18.zip,1,CP,1772,3178,72,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
CP-1.zip,1,CP,1078,3123,68,1,[3123]
CP-12.zip,1,CP,1473,4028,51,3,"[4026, 4027, 4028]"
CP-15.zip,1,CP,1559,4237,53,2,"[4237, 4238]"
CP-7.zip,1,CP,1259,3477,162,1,[3477]
CP-14.zip,1,CP,1541,4194,142,3,"[4194, 4195, 4196]"
CP-18.zip,1,CP,1658,4346,29,1,[4346]
CP-9.zip,1,CP,1373,3801,55,2,"[3800, 3801]"
CP-12.zip,1,CP,1456,3990,52,3,"[3988, 3989, 3990]"
CP-9.zip,1,CP,1367,3787,58,3,"[3785, 3786, 3787]"
CP-1.zip,1,CP,1097,3315,119,1,[3315]
CP-11.zip,1,CP,1438,3944,46,2,"[3944, 3945]"
Normal-2.zip,0,Normal,1759,1115,64,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
CP-14.zip,1,CP,1523,4150,65,2,"[4150, 4151]"
CP-19.zip,1,CP,2447,2923,83,2,"[2923, 2924]"
CP-19.zip,1,CP,1788,3203,57,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-5.zip,1,CP,1220,3438,200,1,[3438]
CP-16.zip,1,CP,1593,4281,22,1,[4281]
Normal-18.zip,0,Normal,2200,655,94,1,[655]
CP-28.zip,1,CP,3784,5728,29,1,[5728]
CP-2.zip,1,CP,1109,3327,210,1,[3327]
CP-19.zip,1,CP,2444,2918,124,2,"[2918, 2919]"
Normal-2.zip,0,Normal,1760,1122,137,4,"[1121, 1122, 1123, 1124]"
CP-12.zip,1,CP,1476,4033,106,2,"[4033, 4034]"
CP-14.zip,1,CP,1538,4186,66,3,"[4185, 4186, 4187]"
Normal-13.zip,0,Normal,2046,501,79,1,[501]
CP-15.zip,1,CP,1565,4250,66,2,"[4250, 4251]"
CP-10.zip,1,CP,1407,3876,58,2,"[3876, 3877]"
Normal-27.zip,0,Normal,3905,5437,288,2,"[5437, 5438]"
NCP-13.zip,2,NCP,36,1204,141,2,"[1204, 1205]"
NCP-30.zip,2,NCP,941,2484,169,1,[2484]
Normal-2.zip,0,Normal,1758,1109,291,2,"[1109, 1110]"
CP-8.zip,1,CP,1342,3723,139,3,"[3723, 3724, 3725]"
CP-3.zip,1,CP,1132,3350,180,1,[3350]
CP-18.zip,1,CP,1773,3184,67,4,"[3182, 3183, 3184, 3185]"
NCP-17.zip,2,NCP,464,2083,60,2,"[2082, 2083]"
NCP-16.zip,2,NCP,447,2048,139,2,"[2048, 2049]"
NCP-3.zip,2,NCP,136,1416,126,2,"[1416, 1417]"
NCP-18.zip,2,NCP,501,2158,146,2,"[2158, 2159]"
CP-19.zip,1,CP,2439,2909,409,1,[2909]
NCP-19.zip,2,NCP,538,2233,142,2,"[2233, 2234]"
Normal-27.zip,0,Normal,3907,5440,63,2,"[5440, 5441]"
CP-18.zip,1,CP,1773,3182,61,4,"[3182, 3183, 3184, 3185]"
CP-8.zip,1,CP,1320,3677,62,2,"[3676, 3677]"
CP-9.zip,1,CP,1366,3782,138,3,"[3782, 3783, 3784]"
CP-7.zip,1,CP,1309,3651,49,2,"[3651, 3652]"
NCP-18.zip,2,NCP,492,2140,139,2,"[2140, 2141]"
NCP-21.zip,2,NCP,69,1271,48,2,"[1270, 1271]"
CP-13.zip,1,CP,1515,4131,137,3,"[4131, 4132, 4133]"
Normal-11.zip,0,Normal,1980,435,83,1,[435]
Normal-14.zip,0,Normal,2073,528,87,1,[528]
CP-3.zip,1,CP,1149,3367,157,1,[3367]
NCP-14.zip,2,NCP,376,1905,60,2,"[1904, 1905]"
NCP-8.zip,2,NCP,253,1653,58,2,"[1652, 1653]"
NCP-27.zip,2,NCP,1061,2638,75,1,[2638]
Normal-9.zip,0,Normal,1921,376,80,1,[376]
NCP-16.zip,2,NCP,453,2061,51,2,"[2060, 2061]"
NCP-10.zip,2,NCP,275,1697,64,2,"[1696, 1697]"
CP-24.zip,1,CP,708,3070,80,1,[3070]
NCP-20.zip,2,NCP,560,2277,124,2,"[2277, 2279]"
NCP-6.zip,2,NCP,207,1558,109,2,"[1558, 1559]"
NCP-2.zip,2,NCP,114,1370,53,2,"[1369, 1370]"
CP-10.zip,1,CP,1407,3877,58,2,"[3876, 3877]"
Normal-1.zip,0,Normal,1682,858,70,6,"[847, 848, 852, 853, 857, 858]"
CP-14.zip,1,CP,1548,4214,51,2,"[4213, 4214]"
Normal-2.zip,0,Normal,1760,1124,74,4,"[1121, 1122, 1123, 1124]"
NCP-14.zip,2,NCP,374,1900,58,2,"[1899, 1900]"
NCP-7.zip,2,NCP,2486,2645,50,1,[2645]
NCP-19.zip,2,NCP,542,2242,55,2,"[2241, 2242]"
Normal-25.zip,0,Normal,3836,5348,202,1,[5348]
Normal-11.zip,0,Normal,1961,416,91,1,[416]
NCP-27.zip,2,NCP,819,2329,33,1,[2329]
NCP-5.zip,2,NCP,184,1512,112,2,"[1512, 1513]"
NCP-15.zip,2,NCP,416,1984,139,2,"[1984, 1986]"
CP-14.zip,1,CP,1538,4187,65,3,"[4185, 4186, 4187]"
CP-8.zip,1,CP,1351,3746,56,1,[3746]
NCP-10.zip,2,NCP,281,1709,51,2,"[1708, 1709]"
CP-10.zip,1,CP,1415,3895,65,3,"[3894, 3895, 3896]"
Normal-1.zip,0,Normal,1682,848,67,6,"[847, 848, 852, 853, 857, 858]"
NCP-17.zip,2,NCP,485,2126,64,2,"[2125, 2126]"
NCP-18.zip,2,NCP,501,2159,61,2,"[2158, 2159]"
Normal-8.zip,0,Normal,1863,318,82,1,[318]
CP-18.zip,1,CP,1772,3176,81,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
CP-26.zip,1,CP,3652,5551,53,2,"[5551, 5552]"
Normal-5.zip,0,Normal,808,243,134,1,[243]
CP-28.zip,1,CP,3771,5715,23,1,[5715]
CP-26.zip,1,CP,3637,5596,35,1,[5596]
CP-12.zip,1,CP,1455,3987,58,3,"[3985, 3986, 3987]"
CP-8.zip,1,CP,1336,3712,60,2,"[3712, 3713]"
CP-30.zip,1,CP,4015,5564,226,1,[5564]
Normal-8.zip,0,Normal,1883,338,91,1,[338]
Normal-3.zip,0,Normal,1764,1145,62,4,"[1143, 1144, 1145, 1146]"
NCP-15.zip,2,NCP,42,1218,61,2,"[1216, 1218]"
NCP-7.zip,2,NCP,245,1636,149,2,"[1636, 1637]"
Normal-14.zip,0,Normal,2066,521,74,1,[521]
Normal-20.zip,0,Normal,2275,730,85,1,[730]
NCP-8.zip,2,NCP,268,1682,126,2,"[1682, 1683]"
CP-7.zip,1,CP,1307,3647,49,4,"[3645, 3646, 3647, 3648]"
Normal-15.zip,0,Normal,2106,561,93,1,[561]
CP-20.zip,1,CP,2772,3303,261,1,[3303]
NCP-25.zip,2,NCP,3970,5479,48,1,[5479]
CP-28.zip,1,CP,3772,5716,23,1,[5716]
NCP-5.zip,2,NCP,175,1494,131,2,"[1494, 1495]"
NCP-18.zip,2,NCP,507,2171,58,2,"[2170, 2171]"
NCP-19.zip,2,NCP,537,2231,143,2,"[2231, 2232]"
Normal-1.zip,0,Normal,1728,1014,66,4,"[1013, 1014, 1015, 1016]"
Normal-23.zip,0,Normal,2608,118,25,1,[118]
NCP-23.zip,2,NCP,90,1317,43,2,"[1316, 1317]"
NCP-2.zip,2,NCP,123,1388,62,2,"[1387, 1388]"
NCP-18.zip,2,NCP,507,2170,138,2,"[2170, 2171]"
NCP-14.zip,2,NCP,395,1940,171,2,"[1940, 1941]"
NCP-23.zip,2,NCP,946,2489,26,1,[2489]
CP-7.zip,1,CP,1308,3649,43,2,"[3649, 3650]"
NCP-17.zip,2,NCP,462,2078,161,2,"[2078, 2079]"
Normal-16.zip,0,Normal,2145,600,86,1,[600]
NCP-20.zip,2,NCP,560,2279,51,2,"[2277, 2279]"
CP-30.zip,1,CP,3931,5630,82,4,"[5630, 5631, 5632, 5633]"
CP-13.zip,1,CP,1501,4101,55,2,"[4100, 4101]"
CP-1.zip,1,CP,1,3144,248,5,"[3143, 3144, 3145, 3146, 3147]"
CP-25.zip,1,CP,713,3075,120,1,[3075]
CP-15.zip,1,CP,1562,4244,55,2,"[4243, 4244]"
CP-26.zip,1,CP,3643,5602,298,2,"[5602, 5603]"
CP-27.zip,1,CP,3748,5692,17,1,[5692]
CP-14.zip,1,CP,1524,4152,229,3,"[4152, 4153, 4154]"
Normal-6.zip,0,Normal,1800,255,92,1,[255]
Normal-1.zip,0,Normal,1711,978,63,2,"[977, 978]"
Normal-17.zip,0,Normal,2157,612,78,1,[612]
CP-8.zip,1,CP,1334,3707,133,2,"[3707, 3708]"
NCP-19.zip,2,NCP,545,2247,135,2,"[2247, 2248]"
CP-28.zip,1,CP,3790,5734,23,1,[5734]
NCP-30.zip,2,NCP,993,2546,203,1,[2546]
NCP-9.zip,2,NCP,2689,2656,47,1,[2656]
Normal-27.zip,0,Normal,3907,5441,66,2,"[5440, 5441]"
CP-26.zip,1,CP,3652,5552,52,2,"[5551, 5552]"
NCP-11.zip,2,NCP,287,1719,142,2,"[1719, 1720]"
NCP-2.zip,2,NCP,114,1369,125,2,"[1369, 1370]"
NCP-21.zip,2,NCP,581,2320,58,2,"[2319, 2320]"
Normal-26.zip,0,Normal,3887,5404,78,3,"[5400, 5401, 5404]"
NCP-12.zip,2,NCP,325,1799,50,2,"[1798, 1799]"
NCP-27.zip,2,NCP,1060,2637,81,1,[2637]
CP-13.zip,1,CP,1516,4135,62,2,"[4134, 4135]"
CP-15.zip,1,CP,1580,4268,21,1,[4268]
NCP-15.zip,2,NCP,428,2009,125,2,"[2009, 2010]"
NCP-19.zip,2,NCP,52,1237,135,2,"[1237, 1238]"
NCP-9.zip,2,NCP,2691,2658,44,1,[2658]
NCP-12.zip,2,NCP,34,1200,156,2,"[1200, 1201]"
NCP-19.zip,2,NCP,539,2235,131,2,"[2235, 2236]"
Normal-1.zip,0,Normal,1728,1015,72,4,"[1013, 1014, 1015, 1016]"
NCP-6.zip,2,NCP,222,1588,122,2,"[1588, 1589]"
NCP-10.zip,2,NCP,273,1693,54,2,"[1692, 1693]"
CP-29.zip,1,CP,3822,5766,20,1,[5766]
CP-10.zip,1,CP,1401,3864,51,3,"[3862, 3863, 3864]"
Normal-13.zip,0,Normal,2030,485,66,1,[485]
NCP-4.zip,2,NCP,164,1473,63,2,"[1472, 1473]"
CP-21.zip,1,CP,3,3504,35,1,[3504]
CP-9.zip,1,CP,1368,3788,69,2,"[3788, 3789]"
Normal-1.zip,0,Normal,1704,963,69,4,"[961, 962, 963, 964]"
CP-12.zip,1,CP,1466,4012,52,2,"[4011, 4012]"
Normal-11.zip,0,Normal,1971,426,100,1,[426]
NCP-16.zip,2,NCP,450,2055,34,2,"[2054, 2055]"
NCP-30.zip,2,NCP,962,2505,38,1,[2505]
NCP-8.zip,2,NCP,2675,2648,44,1,[2648]
NCP-25.zip,2,NCP,3955,5468,46,1,[5468]
NCP-18.zip,2,NCP,488,2131,139,2,"[2131, 2133]"
CP-12.zip,1,CP,1484,4053,181,3,"[4053, 4054, 4055]"
CP-9.zip,1,CP,1368,3789,69,2,"[3788, 3789]"
NCP-10.zip,2,NCP,28,1188,145,2,"[1188, 1189]"
CP-30.zip,1,CP,3931,5631,82,4,"[5630, 5631, 5632, 5633]"
NCP-10.zip,2,NCP,277,1701,64,2,"[1700, 1701]"
NCP-4.zip,2,NCP,148,1441,63,2,"[1440, 1441]"
CP-12.zip,1,CP,1481,4044,139,3,"[4044, 4045, 4046]"
Normal-21.zip,0,Normal,2288,743,96,1,[743]
CP-30.zip,1,CP,4017,5566,41,1,[5566]
CP-13.zip,1,CP,1499,4098,53,2,"[4097, 4098]"
CP-13.zip,1,CP,1516,4134,62,2,"[4134, 4135]"
Normal-13.zip,0,Normal,2049,504,88,1,[504]
CP-18.zip,1,CP,1772,3179,72,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
NCP-20.zip,2,NCP,57,1248,56,2,"[1247, 1248]"
Normal-1.zip,0,Normal,1704,961,71,4,"[961, 962, 963, 964]"
CP-9.zip,1,CP,1366,3783,57,3,"[3782, 3783, 3784]"
CP-32.zip,1,CP,2464,3228,66,1,[3228]
CP-15.zip,1,CP,1555,4228,62,2,"[4228, 4229]"
Normal-3.zip,0,Normal,758,193,122,1,[193]
NCP-12.zip,2,NCP,329,1806,157,2,"[1806, 1807]"
CP-7.zip,1,CP,1307,3646,259,4,"[3645, 3646, 3647, 3648]"
CP-26.zip,1,CP,3722,5657,205,2,"[5656, 5657]"
NCP-14.zip,2,NCP,382,1916,139,2,"[1916, 1917]"
CP-27.zip,1,CP,3752,5696,20,1,[5696]
Normal-16.zip,0,Normal,2129,584,75,1,[584]
NCP-13.zip,2,NCP,367,1885,158,2,"[1885, 1886]"
NCP-6.zip,2,NCP,204,1553,58,2,"[1552, 1553]"
CP-30.zip,1,CP,3918,5542,71,1,[5542]
Normal-11.zip,0,Normal,1979,434,87,1,[434]
Normal-2.zip,0,Normal,1741,1053,61,2,"[1053, 1054]"
Normal-10.zip,0,Normal,1945,400,87,1,[400]
Normal-26.zip,0,Normal,3882,5394,27,1,[5394]
CP-20.zip,1,CP,2456,2940,126,1,[2940]
NCP-5.zip,2,NCP,184,1513,48,2,"[1512, 1513]"
NCP-9.zip,2,NCP,2693,2659,49,1,[2659]
CP-8.zip,1,CP,1348,3739,197,3,"[3739, 3740, 3741]"
Normal-18.zip,0,Normal,2214,669,102,1,[669]
CP-10.zip,1,CP,1415,3896,65,3,"[3894, 3895, 3896]"
NCP-3.zip,2,NCP,1290,2731,66,1,[2731]
Normal-2.zip,0,Normal,1759,1111,62,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-15.zip,2,NCP,401,1951,139,2,"[1951, 1952]"
CP-7.zip,1,CP,1309,3652,49,2,"[3651, 3652]"
Normal-4.zip,0,Normal,787,222,320,1,[222]
NCP-20.zip,2,NCP,550,2258,60,2,"[2257, 2258]"
NCP-5.zip,2,NCP,195,1534,143,2,"[1534, 1535]"
NCP-13.zip,2,NCP,367,1886,66,2,"[1885, 1886]"
NCP-19.zip,2,NCP,530,2218,132,1,[2218]
Normal-6.zip,0,Normal,1811,266,95,1,[266]
NCP-30.zip,2,NCP,963,2506,21,1,[2506]
Normal-2.zip,0,Normal,1759,1112,62,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-13.zip,2,NCP,369,1890,58,2,"[1889, 1890]"
NCP-16.zip,2,NCP,457,2068,134,2,"[2068, 2069]"
NCP-26.zip,2,NCP,3981,5488,45,1,[5488]
NCP-22.zip,2,NCP,816,2325,50,1,[2325]
Normal-1.zip,0,Normal,1730,1019,63,5,"[1019, 1020, 1021, 1022, 1023]"
NCP-15.zip,2,NCP,419,1991,130,2,"[1991, 1992]"
CP-30.zip,1,CP,4016,5565,37,1,[5565]
CP-24.zip,1,CP,694,3056,135,1,[3056]
NCP-17.zip,2,NCP,470,2095,154,2,"[2095, 2096]"
Normal-4.zip,0,Normal,781,216,118,1,[216]
CP-13.zip,1,CP,1497,4093,68,3,"[4092, 4093, 4094]"
NCP-26.zip,2,NCP,3991,5515,43,1,[5515]
CP-8.zip,1,CP,1331,3701,62,2,"[3701, 3702]"
Normal-9.zip,0,Normal,1910,365,91,1,[365]
NCP-27.zip,2,NCP,820,2330,34,1,[2330]
CP-7.zip,1,CP,13,3171,65,4,"[3170, 3171, 3172, 3173]"
CP-20.zip,1,CP,2764,3295,39,1,[3295]
Normal-1.zip,0,Normal,1714,984,71,3,"[982, 983, 984]"
CP-13.zip,1,CP,1501,4100,55,2,"[4100, 4101]"
Normal-15.zip,0,Normal,2117,572,87,1,[572]
CP-30.zip,1,CP,3929,5627,70,2,"[5626, 5627]"
NCP-4.zip,2,NCP,158,1461,52,2,"[1460, 1461]"
CP-10.zip,1,CP,1389,3834,52,3,"[3833, 3834, 3835]"
CP-13.zip,1,CP,1497,4094,68,3,"[4092, 4093, 4094]"
CP-10.zip,1,CP,1415,3894,155,3,"[3894, 3895, 3896]"
CP-30.zip,1,CP,4014,5563,35,1,[5563]
NCP-17.zip,2,NCP,462,2079,67,2,"[2078, 2079]"
CP-29.zip,1,CP,3803,5747,23,1,[5747]
CP-1.zip,1,CP,1,3143,300,5,"[3143, 3144, 3145, 3146, 3147]"
NCP-11.zip,2,NCP,305,1756,157,2,"[1756, 1758]"
Normal-1.zip,0,Normal,1668,781,63,4,"[778, 779, 780, 781]"
CP-4.zip,1,CP,1174,3392,175,1,[3392]
Normal-14.zip,0,Normal,2060,515,77,1,[515]
Normal-22.zip,0,Normal,2602,112,32,1,[112]
CP-14.zip,1,CP,1541,4196,58,3,"[4194, 4195, 4196]"
Normal-12.zip,0,Normal,2019,474,87,1,[474]
CP-25.zip,1,CP,733,3095,84,1,[3095]
CP-13.zip,1,CP,1499,4097,53,2,"[4097, 4098]"
CP-19.zip,1,CP,1788,3201,55,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
Normal-1.zip,0,Normal,1704,964,69,4,"[961, 962, 963, 964]"
NCP-3.zip,2,NCP,1289,2730,62,1,[2730]
NCP-20.zip,2,NCP,567,2292,148,2,"[2292, 2293]"
Normal-13.zip,0,Normal,2027,482,89,1,[482]
Normal-2.zip,0,Normal,1759,1114,59,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
Normal-27.zip,0,Normal,3897,5424,75,4,"[5423, 5424, 5426, 5427]"
Normal-22.zip,0,Normal,2317,772,77,1,[772]
Normal-2.zip,0,Normal,1758,1110,59,2,"[1109, 1110]"
NCP-2.zip,2,NCP,121,1384,43,2,"[1383, 1384]"
NCP-13.zip,2,NCP,356,1864,53,2,"[1863, 1864]"
Normal-2.zip,0,Normal,1760,1121,85,4,"[1121, 1122, 1123, 1124]"
CP-29.zip,1,CP,3825,5769,25,1,[5769]
NCP-17.zip,2,NCP,46,1226,52,2,"[1225, 1226]"
NCP-19.zip,2,NCP,53,1240,60,2,"[1239, 1240]"
NCP-12.zip,2,NCP,314,1776,58,2,"[1775, 1776]"
Normal-21.zip,0,Normal,2290,745,88,1,[745]
Normal-2.zip,0,Normal,1760,1123,74,4,"[1121, 1122, 1123, 1124]"
Normal-24.zip,0,Normal,2666,176,35,1,[176]
CP-8.zip,1,CP,1346,3735,53,3,"[3733, 3734, 3735]"
Normal-17.zip,0,Normal,2164,619,84,1,[619]
NCP-8.zip,2,NCP,2672,2647,47,1,[2647]
NCP-2.zip,2,NCP,1277,2718,57,1,[2718]
CP-9.zip,1,CP,1370,3793,62,2,"[3792, 3793]"
Normal-10.zip,0,Normal,1928,383,87,1,[383]
CP-21.zip,1,CP,598,2960,646,1,[2960]
CP-20.zip,1,CP,2755,3286,34,1,[3286]
Normal-16.zip,0,Normal,2141,596,100,1,[596]
CP-14.zip,1,CP,1544,4205,50,3,"[4203, 4204, 4205]"
NCP-9.zip,2,NCP,270,1687,62,2,"[1686, 1687]"
CP-5.zip,1,CP,1222,3440,157,1,[3440]
CP-19.zip,1,CP,1791,3210,100,4,"[3210, 3211, 3212, 3213]"
NCP-16.zip,2,NCP,450,2054,78,2,"[2054, 2055]"
NCP-20.zip,2,NCP,557,2272,56,2,"[2271, 2272]"
NCP-3.zip,2,NCP,1284,2725,50,1,[2725]
CP-5.zip,1,CP,1205,3423,146,1,[3423]
NCP-1.zip,2,NCP,1017,2583,452,1,[2583]
Normal-2.zip,0,Normal,1736,1033,25,5,"[1032, 1033, 1034, 1035, 1036]"
NCP-1.zip,2,NCP,1021,2589,183,4,"[2587, 2588, 2589, 2590]"
NCP-7.zip,2,NCP,232,1608,146,2,"[1608, 1609]"
NCP-23.zip,2,NCP,950,2493,34,1,[2493]
CP-6.zip,1,CP,1246,3464,175,1,[3464]
Normal-1.zip,0,Normal,1671,794,67,3,"[793, 794, 795]"
CP-1.zip,1,CP,1095,3313,161,1,[3313]
CP-25.zip,1,CP,714,3076,98,1,[3076]
NCP-17.zip,2,NCP,479,2114,58,2,"[2113, 2114]"
Normal-24.zip,0,Normal,2652,162,35,1,[162]
Normal-2.zip,0,Normal,1752,1085,66,1,[1085]
Normal-24.zip,0,Normal,2638,148,38,1,[148]
NCP-8.zip,2,NCP,260,1666,163,2,"[1666, 1667]"
NCP-18.zip,2,NCP,509,2175,58,2,"[2174, 2175]"
Normal-14.zip,0,Normal,2079,534,92,1,[534]
Normal-3.zip,0,Normal,751,186,119,1,[186]
NCP-8.zip,2,NCP,263,1673,74,2,"[1672, 1673]"
CP-22.zip,1,CP,626,2988,174,1,[2988]
Normal-23.zip,0,Normal,2619,129,43,1,[129]
CP-1.zip,1,CP,1069,3109,77,4,"[3108, 3109, 3110, 3111]"
NCP-13.zip,2,NCP,360,1872,51,2,"[1871, 1872]"
NCP-23.zip,2,NCP,915,2457,31,1,[2457]
NCP-3.zip,2,NCP,131,1407,117,2,"[1407, 1408]"
NCP-21.zip,2,NCP,79,1292,55,2,"[1291, 1292]"
Normal-4.zip,0,Normal,779,214,290,1,[214]
CP-27.zip,1,CP,3734,5676,32,3,"[5676, 5677, 5678]"
Normal-15.zip,0,Normal,2104,559,101,1,[559]
CP-5.zip,1,CP,1218,3436,213,1,[3436]
NCP-3.zip,2,NCP,1291,2732,55,1,[2732]
NCP-19.zip,2,NCP,537,2232,60,2,"[2231, 2232]"
NCP-21.zip,2,NCP,71,1274,126,2,"[1274, 1275]"
NCP-5.zip,2,NCP,195,1535,60,2,"[1534, 1535]"
CP-9.zip,1,CP,1359,3766,46,3,"[3764, 3765, 3766]"
NCP-2.zip,2,NCP,119,1380,62,2,"[1379, 1380]"
Normal-19.zip,0,Normal,2241,696,86,1,[696]
Normal-15.zip,0,Normal,2112,567,84,1,[567]
NCP-20.zip,2,NCP,569,2296,142,2,"[2296, 2297]"
NCP-9.zip,2,NCP,2700,2666,43,1,[2666]
NCP-9.zip,2,NCP,2697,2663,46,1,[2663]
CP-29.zip,1,CP,3809,5753,19,1,[5753]
NCP-10.zip,2,NCP,2718,2674,42,1,[2674]
Normal-1.zip,0,Normal,1668,778,60,4,"[778, 779, 780, 781]"
NCP-18.zip,2,NCP,509,2174,138,2,"[2174, 2175]"
NCP-16.zip,2,NCP,456,2066,135,2,"[2066, 2067]"
NCP-5.zip,2,NCP,187,1519,57,2,"[1518, 1519]"
NCP-20.zip,2,NCP,57,1247,132,2,"[1247, 1248]"
Normal-1.zip,0,Normal,1715,986,71,2,"[985, 986]"
Normal-2.zip,0,Normal,1749,1069,61,4,"[1069, 1070, 1071, 1072]"
NCP-24.zip,2,NCP,984,2530,241,2,"[2529, 2530]"
Normal-1.zip,0,Normal,1682,847,67,6,"[847, 848, 852, 853, 857, 858]"
CP-1.zip,1,CP,1069,3110,77,4,"[3108, 3109, 3110, 3111]"
Normal-12.zip,0,Normal,2017,472,99,1,[472]
CP-10.zip,1,CP,1400,3861,54,2,"[3860, 3861]"
NCP-22.zip,2,NCP,881,2416,225,1,[2416]
CP-11.zip,1,CP,1420,3906,59,2,"[3905, 3906]"
NCP-6.zip,2,NCP,20,1172,127,2,"[1172, 1173]"
NCP-28.zip,2,NCP,846,2364,269,1,[2364]
Normal-14.zip,0,Normal,2075,530,93,1,[530]
CP-6.zip,1,CP,1238,3456,191,1,[3456]
CP-7.zip,1,CP,1263,3481,120,1,[3481]
CP-1.zip,1,CP,1088,3220,54,4,"[3220, 3221, 3222, 3223]"
CP-8.zip,1,CP,1320,3676,62,2,"[3676, 3677]"
NCP-15.zip,2,NCP,426,2005,139,2,"[2005, 2006]"
NCP-28.zip,2,NCP,869,2397,58,1,[2397]
NCP-11.zip,2,NCP,288,1721,114,2,"[1721, 1722]"
NCP-21.zip,2,NCP,581,2319,139,2,"[2319, 2320]"
Normal-26.zip,0,Normal,3878,5390,24,1,[5390]
Normal-13.zip,0,Normal,2041,496,95,1,[496]
Normal-25.zip,0,Normal,3845,5357,182,1,[5357]
Normal-22.zip,0,Normal,2599,109,39,1,[109]
Normal-4.zip,0,Normal,789,224,120,1,[224]
Normal-1.zip,0,Normal,1714,982,40,3,"[982, 983, 984]"
NCP-16.zip,2,NCP,434,2022,51,2,"[2021, 2022]"
NCP-28.zip,2,NCP,830,2343,120,1,[2343]
Normal-1.zip,0,Normal,1704,962,71,4,"[961, 962, 963, 964]"
NCP-5.zip,2,NCP,196,1537,55,2,"[1536, 1537]"
CP-8.zip,1,CP,1336,3713,60,2,"[3712, 3713]"
NCP-29.zip,2,NCP,895,2436,140,2,"[2435, 2436]"
NCP-29.zip,2,NCP,930,2472,23,1,[2472]
CP-12.zip,1,CP,1482,4047,181,3,"[4047, 4048, 4049]"
CP-10.zip,1,CP,1401,3862,201,3,"[3862, 3863, 3864]"
NCP-5.zip,2,NCP,182,1509,55,2,"[1508, 1509]"
CP-12.zip,1,CP,1483,4050,148,3,"[4050, 4051, 4052]"
Normal-2.zip,0,Normal,1741,1054,61,2,"[1053, 1054]"
CP-8.zip,1,CP,1324,3684,58,2,"[3684, 3685]"
NCP-9.zip,2,NCP,2681,2696,58,1,[2696]
CP-9.zip,1,CP,1367,3786,58,3,"[3785, 3786, 3787]"
CP-19.zip,1,CP,1790,3209,69,2,"[3208, 3209]"
CP-11.zip,1,CP,1430,3928,77,2,"[3928, 3929]"
Normal-18.zip,0,Normal,2207,662,99,1,[662]
Normal-11.zip,0,Normal,1972,427,97,1,[427]
CP-5.zip,1,CP,1221,3439,295,1,[3439]
NCP-15.zip,2,NCP,42,1216,146,2,"[1216, 1218]"
CP-22.zip,1,CP,640,3002,136,1,[3002]
NCP-7.zip,2,NCP,245,1637,62,2,"[1636, 1637]"
NCP-6.zip,2,NCP,215,1574,155,2,"[1574, 1575]"
NCP-29.zip,2,NCP,903,2445,87,1,[2445]
NCP-7.zip,2,NCP,232,1609,61,2,"[1608, 1609]"
NCP-2.zip,2,NCP,119,1379,147,2,"[1379, 1380]"
Normal-2.zip,0,Normal,1739,1042,278,3,"[1042, 1043, 1044]"
CP-28.zip,1,CP,3791,5735,26,1,[5735]
NCP-27.zip,2,NCP,828,2341,45,1,[2341]
NCP-12.zip,2,NCP,314,1775,139,2,"[1775, 1776]"
NCP-6.zip,2,NCP,20,1173,54,2,"[1172, 1173]"
CP-13.zip,1,CP,1490,4073,69,3,"[4071, 4072, 4073]"
NCP-20.zip,2,NCP,569,2297,60,2,"[2296, 2297]"
Normal-2.zip,0,Normal,1759,1113,59,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-30.zip,2,NCP,987,2536,71,2,"[2536, 2537]"
CP-15.zip,1,CP,1579,4267,20,1,[4267]
CP-19.zip,1,CP,1790,3208,69,2,"[3208, 3209]"
NCP-20.zip,2,NCP,568,2295,61,2,"[2294, 2295]"
Normal-13.zip,0,Normal,2036,491,102,1,[491]
NCP-26.zip,2,NCP,3973,5482,48,1,[5482]
CP-27.zip,1,CP,3743,5687,22,1,[5687]
Normal-11.zip,0,Normal,1981,436,91,1,[436]
NCP-2.zip,2,NCP,125,1391,127,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-9.zip,2,NCP,270,1686,147,2,"[1686, 1687]"
Normal-23.zip,0,Normal,2636,146,42,1,[146]
NCP-3.zip,2,NCP,1286,2727,64,1,[2727]
CP-10.zip,1,CP,1386,3828,66,2,"[3827, 3828]"
Normal-19.zip,0,Normal,2230,685,91,1,[685]
Normal-5.zip,0,Normal,805,240,327,1,[240]
Normal-26.zip,0,Normal,3891,5412,62,2,"[5411, 5412]"
NCP-27.zip,2,NCP,2671,2691,51,1,[2691]
NCP-27.zip,2,NCP,1059,2636,52,1,[2636]
CP-8.zip,1,CP,1344,3730,58,3,"[3728, 3729, 3730]"
Normal-24.zip,0,Normal,2662,172,41,1,[172]
Normal-3.zip,0,Normal,744,179,278,1,[179]
CP-5.zip,1,CP,1202,3420,207,1,[3420]
NCP-27.zip,2,NCP,1006,2567,19,2,"[2566, 2567]"
Normal-19.zip,0,Normal,2248,703,87,1,[703]
Normal-2.zip,0,Normal,1736,1034,25,5,"[1032, 1033, 1034, 1035, 1036]"
Normal-8.zip,0,Normal,1867,322,87,1,[322]
Normal-23.zip,0,Normal,2609,119,40,1,[119]
CP-11.zip,1,CP,1453,3980,56,3,"[3979, 3980, 3981]"
Normal-26.zip,0,Normal,3875,5387,24,1,[5387]
CP-26.zip,1,CP,3647,5607,32,1,[5607]
Normal-12.zip,0,Normal,2006,461,77,1,[461]
Normal-6.zip,0,Normal,1827,282,99,1,[282]
NCP-19.zip,2,NCP,533,2224,156,1,[2224]
NCP-11.zip,2,NCP,287,1720,60,2,"[1719, 1720]"
NCP-7.zip,2,NCP,2487,2687,38,1,[2687]
CP-3.zip,1,CP,1160,3378,318,1,[3378]
Normal-7.zip,0,Normal,1858,313,95,1,[313]
CP-13.zip,1,CP,1514,4129,61,2,"[4129, 4130]"
NCP-20.zip,2,NCP,561,2280,139,2,"[2280, 2281]"
CP-14.zip,1,CP,1527,4161,58,3,"[4160, 4161, 4162]"
CP-25.zip,1,CP,721,3083,86,1,[3083]
CP-13.zip,1,CP,1496,4091,55,2,"[4090, 4091]"
Normal-1.zip,0,Normal,1728,1013,66,4,"[1013, 1014, 1015, 1016]"
NCP-12.zip,2,NCP,317,1781,117,2,"[1781, 1782]"
CP-19.zip,1,CP,2437,2906,132,3,"[2905, 2906, 2907]"
NCP-5.zip,2,NCP,196,1536,131,2,"[1536, 1537]"
CP-11.zip,1,CP,1437,3942,57,2,"[3942, 3943]"
NCP-5.zip,2,NCP,182,1508,130,2,"[1508, 1509]"
CP-9.zip,1,CP,1363,3774,64,2,"[3774, 3775]"
CP-10.zip,1,CP,1401,3863,51,3,"[3862, 3863, 3864]"
NCP-10.zip,2,NCP,275,1696,153,2,"[1696, 1697]"
CP-30.zip,1,CP,3931,5633,68,4,"[5630, 5631, 5632, 5633]"
NCP-7.zip,2,NCP,234,1614,58,2,"[1613, 1614]"
NCP-3.zip,2,NCP,1296,2737,66,1,[2737]
NCP-11.zip,2,NCP,283,1712,62,1,[1712]
CP-9.zip,1,CP,1363,3775,64,2,"[3774, 3775]"
NCP-17.zip,2,NCP,464,2082,144,2,"[2082, 2083]"
CP-12.zip,1,CP,1473,4027,51,3,"[4026, 4027, 4028]"
CP-28.zip,1,CP,3781,5725,20,1,[5725]
NCP-14.zip,2,NCP,391,1933,55,2,"[1932, 1933]"
Normal-13.zip,0,Normal,2032,487,85,1,[487]
NCP-28.zip,2,NCP,872,2403,183,2,"[2403, 2404]"
NCP-17.zip,2,NCP,479,2113,139,2,"[2113, 2114]"
NCP-11.zip,2,NCP,305,1758,65,2,"[1756, 1758]"
NCP-1.zip,2,NCP,1021,2587,201,4,"[2587, 2588, 2589, 2590]"
NCP-30.zip,2,NCP,957,2500,50,1,[2500]
Normal-17.zip,0,Normal,2172,627,91,1,[627]
CP-7.zip,1,CP,1316,3667,147,3,"[3667, 3668, 3669]"
NCP-24.zip,2,NCP,971,2514,74,1,[2514]
NCP-18.zip,2,NCP,494,2145,65,2,"[2144, 2145]"
NCP-30.zip,2,NCP,987,2537,368,2,"[2536, 2537]"
Normal-13.zip,0,Normal,2048,503,94,1,[503]
CP-8.zip,1,CP,1347,3736,265,3,"[3736, 3737, 3738]"
NCP-15.zip,2,NCP,41,1215,63,2,"[1214, 1215]"
CP-12.zip,1,CP,1456,3989,52,3,"[3988, 3989, 3990]"
NCP-21.zip,2,NCP,80,1294,54,2,"[1293, 1294]"
CP-29.zip,1,CP,3808,5752,23,1,[5752]
CP-26.zip,1,CP,3732,5671,53,2,"[5671, 5672]"
NCP-8.zip,2,NCP,251,1648,131,2,"[1648, 1649]"
Normal-2.zip,0,Normal,1755,1099,71,4,"[1097, 1098, 1099, 1100]"
Normal-2.zip,0,Normal,1759,1120,66,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-28.zip,2,NCP,874,2407,341,1,[2407]
Normal-1.zip,0,Normal,1730,1023,59,5,"[1019, 1020, 1021, 1022, 1023]"
Normal-8.zip,0,Normal,1891,346,96,1,[346]
CP-4.zip,1,CP,1170,3388,180,1,[3388]
CP-10.zip,1,CP,1395,3849,63,2,"[3849, 3850]"
NCP-2.zip,2,NCP,123,1387,148,2,"[1387, 1388]"
NCP-16.zip,2,NCP,446,2047,61,2,"[2046, 2047]"
NCP-2.zip,2,NCP,111,1364,56,2,"[1363, 1364]"
CP-15.zip,1,CP,1568,4256,22,1,[4256]
NCP-21.zip,2,NCP,79,1291,131,2,"[1291, 1292]"
CP-10.zip,1,CP,1395,3850,63,2,"[3849, 3850]"
CP-14.zip,1,CP,1542,4199,54,3,"[4197, 4198, 4199]"
CP-15.zip,1,CP,1555,4229,62,2,"[4228, 4229]"
CP-21.zip,1,CP,606,2968,255,1,[2968]
CP-12.zip,1,CP,1480,4042,54,2,"[4042, 4043]"
NCP-27.zip,2,NCP,1063,2640,82,1,[2640]
Normal-7.zip,0,Normal,1831,286,99,1,[286]
CP-14.zip,1,CP,1552,4221,62,2,"[4221, 4222]"
NCP-19.zip,2,NCP,541,2240,51,2,"[2239, 2240]"
NCP-23.zip,2,NCP,91,1318,100,2,"[1318, 1319]"
Normal-2.zip,0,Normal,1739,1043,56,3,"[1042, 1043, 1044]"
CP-19.zip,1,CP,2437,2907,183,3,"[2905, 2906, 2907]"
CP-10.zip,1,CP,1396,3851,139,3,"[3851, 3852, 3853]"
CP-13.zip,1,CP,1490,4072,69,3,"[4071, 4072, 4073]"
CP-6.zip,1,CP,1242,3460,229,1,[3460]
NCP-17.zip,2,NCP,471,2098,59,2,"[2097, 2098]"
NCP-16.zip,2,NCP,434,2021,119,2,"[2021, 2022]"
NCP-16.zip,2,NCP,446,2046,146,2,"[2046, 2047]"
NCP-21.zip,2,NCP,69,1270,113,2,"[1270, 1271]"
Normal-9.zip,0,Normal,1896,351,98,1,[351]
NCP-9.zip,2,NCP,2709,2702,44,1,[2702]
NCP-29.zip,2,NCP,907,2449,287,1,[2449]
NCP-2.zip,2,NCP,106,1349,150,2,"[1349, 1350]"
NCP-17.zip,2,NCP,477,2109,139,2,"[2109, 2110]"
CP-27.zip,1,CP,3734,5677,163,3,"[5676, 5677, 5678]"
Normal-8.zip,0,Normal,1877,332,88,1,[332]
Normal-7.zip,0,Normal,1853,308,94,1,[308]
NCP-2.zip,2,NCP,1272,2713,62,1,[2713]
CP-13.zip,1,CP,1515,4132,57,3,"[4131, 4132, 4133]"
NCP-21.zip,2,NCP,68,1269,49,2,"[1268, 1269]"
CP-25.zip,1,CP,719,3081,128,1,[3081]
NCP-10.zip,2,NCP,276,1698,139,2,"[1698, 1699]"
NCP-11.zip,2,NCP,294,1734,57,2,"[1733, 1734]"
CP-8.zip,1,CP,1342,3724,58,3,"[3723, 3724, 3725]"
Normal-4.zip,0,Normal,783,218,118,1,[218]
Normal-11.zip,0,Normal,1977,432,96,1,[432]
CP-12.zip,1,CP,1460,3998,60,2,"[3998, 3999]"
NCP-12.zip,2,NCP,32,1197,61,2,"[1196, 1197]"
Normal-2.zip,0,Normal,1736,1035,55,5,"[1032, 1033, 1034, 1035, 1036]"
CP-7.zip,1,CP,1308,3650,219,2,"[3649, 3650]"
NCP-17.zip,2,NCP,485,2125,153,2,"[2125, 2126]"
Normal-24.zip,0,Normal,2649,159,26,1,[159]
CP-1.zip,1,CP,1082,3127,74,1,[3127]
CP-28.zip,1,CP,3788,5732,26,1,[5732]
Normal-3.zip,0,Normal,1764,1143,66,4,"[1143, 1144, 1145, 1146]"
NCP-2.zip,2,NCP,125,1392,132,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-1.zip,2,NCP,1013,2577,524,1,[2577]
CP-22.zip,1,CP,630,2992,118,1,[2992]
Normal-27.zip,0,Normal,3897,5423,70,4,"[5423, 5424, 5426, 5427]"
CP-1.zip,1,CP,1088,3222,50,4,"[3220, 3221, 3222, 3223]"
NCP-11.zip,2,NCP,294,1733,136,2,"[1733, 1734]"
CP-3.zip,1,CP,1135,3353,202,1,[3353]
CP-10.zip,1,CP,1408,3879,59,3,"[3878, 3879, 3880]"
CP-19.zip,1,CP,1791,3213,71,4,"[3210, 3211, 3212, 3213]"
Normal-1.zip,0,Normal,1709,974,61,2,"[973, 974]"
CP-11.zip,1,CP,1438,3945,46,2,"[3944, 3945]"
CP-8.zip,1,CP,1325,3687,64,2,"[3686, 3687]"
CP-20.zip,1,CP,2761,3292,38,1,[3292]
NCP-17.zip,2,NCP,470,2096,64,2,"[2095, 2096]"
NCP-4.zip,2,NCP,164,1472,150,2,"[1472, 1473]"
NCP-14.zip,2,NCP,380,1912,148,2,"[1912, 1913]"
CP-7.zip,1,CP,1266,3484,134,1,[3484]
CP-10.zip,1,CP,1400,3860,54,2,"[3860, 3861]"
NCP-10.zip,2,NCP,281,1708,121,2,"[1708, 1709]"
NCP-14.zip,2,NCP,397,1944,158,2,"[1944, 1945]"
CP-27.zip,1,CP,3734,5678,32,3,"[5676, 5677, 5678]"
CP-15.zip,1,CP,1559,4238,53,2,"[4237, 4238]"
Normal-26.zip,0,Normal,3888,5406,63,1,[5406]
NCP-11.zip,2,NCP,308,1764,49,2,"[1763, 1764]"
NCP-16.zip,2,NCP,435,2024,62,2,"[2023, 2024]"
NCP-11.zip,2,NCP,285,1715,149,2,"[1715, 1716]"
NCP-20.zip,2,NCP,568,2294,144,2,"[2294, 2295]"
NCP-20.zip,2,NCP,550,2257,143,2,"[2257, 2258]"
NCP-6.zip,2,NCP,218,1581,58,2,"[1580, 1581]"
Normal-15.zip,0,Normal,2092,547,87,1,[547]
CP-10.zip,1,CP,1396,3853,58,3,"[3851, 3852, 3853]"
Normal-12.zip,0,Normal,2010,465,91,1,[465]
Normal-18.zip,0,Normal,2194,649,89,1,[649]
NCP-10.zip,2,NCP,276,1699,58,2,"[1698, 1699]"
CP-27.zip,1,CP,3746,5690,17,1,[5690]
Normal-24.zip,0,Normal,2656,166,34,1,[166]
CP-29.zip,1,CP,3802,5746,26,1,[5746]
CP-17.zip,1,CP,1641,4329,26,1,[4329]
Normal-2.zip,0,Normal,1749,1072,66,4,"[1069, 1070, 1071, 1072]"
CP-9.zip,1,CP,1373,3800,55,2,"[3800, 3801]"
Normal-22.zip,0,Normal,2596,106,44,1,[106]
Normal-14.zip,0,Normal,2072,527,77,1,[527]
Normal-20.zip,0,Normal,2251,706,89,1,[706]
CP-12.zip,1,CP,1482,4049,75,3,"[4047, 4048, 4049]"
CP-6.zip,1,CP,1231,3449,375,1,[3449]
CP-28.zip,1,CP,3797,5741,28,1,[5741]
CP-7.zip,1,CP,1307,3648,242,4,"[3645, 3646, 3647, 3648]"
NCP-1.zip,2,NCP,1030,2600,279,1,[2600]
CP-11.zip,1,CP,1448,3970,62,2,"[3969, 3970]"
Normal-20.zip,0,Normal,2255,710,95,1,[710]
CP-2.zip,1,CP,1124,3342,215,1,[3342]
NCP-28.zip,2,NCP,872,2404,46,2,"[2403, 2404]"
Normal-3.zip,0,Normal,1765,1147,60,2,"[1147, 1148]"
NCP-11.zip,2,NCP,289,1724,47,2,"[1723, 1724]"
CP-11.zip,1,CP,1442,3956,58,3,"[3954, 3955, 3956]"
CP-1.zip,1,CP,1081,3126,68,1,[3126]
Normal-20.zip,0,Normal,2263,718,108,1,[718]
NCP-19.zip,2,NCP,524,2204,191,1,[2204]
Normal-4.zip,0,Normal,784,219,105,1,[219]
CP-8.zip,1,CP,1337,3715,60,2,"[3714, 3715]"
NCP-28.zip,2,NCP,841,2356,282,1,[2356]
NCP-26.zip,2,NCP,3983,5510,40,1,[5510]
CP-20.zip,1,CP,2767,3298,35,1,[3298]
Normal-19.zip,0,Normal,2229,684,87,1,[684]
NCP-15.zip,2,NCP,429,2012,55,2,"[2011, 2012]"
CP-19.zip,1,CP,1788,3197,52,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-15.zip,1,CP,1558,4235,62,3,"[4234, 4235, 4236]"
NCP-3.zip,2,NCP,1280,2721,50,1,[2721]
NCP-4.zip,2,NCP,148,1440,150,2,"[1440, 1441]"
Normal-22.zip,0,Normal,2582,92,39,1,[92]
Normal-23.zip,0,Normal,2623,133,35,1,[133]
CP-13.zip,1,CP,1496,4090,55,2,"[4090, 4091]"
CP-30.zip,1,CP,3835,5779,23,1,[5779]
CP-11.zip,1,CP,1442,3954,139,3,"[3954, 3955, 3956]"
NCP-15.zip,2,NCP,429,2011,131,2,"[2011, 2012]"
CP-17.zip,1,CP,1621,4309,29,1,[4309]
CP-6.zip,1,CP,1244,3462,87,1,[3462]
NCP-1.zip,2,NCP,1021,2590,181,4,"[2587, 2588, 2589, 2590]"
NCP-9.zip,2,NCP,2706,2672,51,1,[2672]
NCP-14.zip,2,NCP,391,1932,131,2,"[1932, 1933]"
CP-3.zip,1,CP,1134,3352,330,1,[3352]
CP-8.zip,1,CP,1346,3734,53,3,"[3733, 3734, 3735]"
NCP-12.zip,2,NCP,320,1789,58,2,"[1788, 1789]"
NCP-21.zip,2,NCP,77,1287,126,2,"[1287, 1288]"
CP-17.zip,1,CP,1647,4335,23,1,[4335]
CP-11.zip,1,CP,1453,3979,221,3,"[3979, 3980, 3981]"
Normal-2.zip,0,Normal,1759,1117,65,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-17.zip,2,NCP,481,2118,68,2,"[2117, 2118]"
NCP-3.zip,2,NCP,1279,2720,66,1,[2720]
CP-8.zip,1,CP,1346,3733,53,3,"[3733, 3734, 3735]"
Normal-10.zip,0,Normal,1954,409,88,1,[409]
CP-17.zip,1,CP,1648,4336,29,1,[4336]
CP-14.zip,1,CP,1524,4154,58,3,"[4152, 4153, 4154]"
Normal-18.zip,0,Normal,2216,671,97,1,[671]
NCP-27.zip,2,NCP,179,1503,43,2,"[1503, 1502]"
CP-19.zip,1,CP,1788,3202,55,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-12.zip,1,CP,1482,4048,75,3,"[4047, 4048, 4049]"
NCP-29.zip,2,NCP,913,2455,268,1,[2455]
CP-19.zip,1,CP,2444,2919,112,2,"[2918, 2919]"
CP-22.zip,1,CP,639,3001,136,1,[3001]
NCP-2.zip,2,NCP,121,1383,100,2,"[1383, 1384]"
CP-8.zip,1,CP,1324,3685,58,2,"[3684, 3685]"
CP-11.zip,1,CP,1430,3929,77,2,"[3928, 3929]"
NCP-15.zip,2,NCP,401,1952,58,2,"[1951, 1952]"
Normal-4.zip,0,Normal,788,223,336,1,[223]
Normal-27.zip,0,Normal,3898,5428,74,1,[5428]
Normal-21.zip,0,Normal,2312,767,88,1,[767]
Normal-17.zip,0,Normal,2170,625,62,1,[625]
NCP-3.zip,2,NCP,130,1406,59,2,"[1405, 1406]"
CP-3.zip,1,CP,1154,3372,169,1,[3372]
Normal-3.zip,0,Normal,1765,1148,60,2,"[1147, 1148]"
Normal-11.zip,0,Normal,1962,417,78,1,[417]
CP-18.zip,1,CP,1667,4355,26,1,[4355]
CP-1.zip,1,CP,1066,3105,59,1,[3105]
NCP-1.zip,2,NCP,1047,2619,473,1,[2619]
NCP-10.zip,2,NCP,2711,2704,44,1,[2704]
Normal-19.zip,0,Normal,2237,692,85,1,[692]
NCP-11.zip,2,NCP,289,1723,110,2,"[1723, 1724]"
NCP-7.zip,2,NCP,240,1626,66,2,"[1625, 1626]"
Normal-11.zip,0,Normal,1974,429,96,1,[429]
Normal-26.zip,0,Normal,3887,5401,67,3,"[5400, 5401, 5404]"
Normal-26.zip,0,Normal,3891,5411,67,2,"[5411, 5412]"
Normal-18.zip,0,Normal,2191,646,106,1,[646]
NCP-28.zip,2,NCP,840,2355,55,1,[2355]
Normal-6.zip,0,Normal,1814,269,88,1,[269]
NCP-12.zip,2,NCP,329,1807,66,2,"[1806, 1807]"
CP-24.zip,1,CP,686,3048,133,1,[3048]
CP-19.zip,1,CP,2432,2894,124,1,[2894]
Normal-10.zip,0,Normal,1952,407,107,1,[407]
CP-13.zip,1,CP,1515,4133,57,3,"[4131, 4132, 4133]"
CP-8.zip,1,CP,1347,3737,34,3,"[3736, 3737, 3738]"
Normal-2.zip,0,Normal,1754,1095,69,4,"[1093, 1094, 1095, 1096]"
CP-22.zip,1,CP,622,2984,459,1,[2984]
CP-3.zip,1,CP,1141,3359,350,1,[3359]
CP-14.zip,1,CP,1533,4173,100,3,"[4173, 4174, 4175]"
Normal-10.zip,0,Normal,1935,390,91,1,[390]
Normal-22.zip,0,Normal,2320,775,91,1,[775]
Normal-25.zip,0,Normal,3859,5371,216,1,[5371]
Normal-12.zip,0,Normal,2018,473,93,1,[473]
CP-9.zip,1,CP,1359,3764,181,3,"[3764, 3765, 3766]"
CP-20.zip,1,CP,2452,2931,298,1,[2931]
NCP-23.zip,2,NCP,90,1316,100,2,"[1316, 1317]"
Normal-2.zip,0,Normal,1744,1058,71,2,"[1058, 1059]"
NCP-18.zip,2,NCP,492,2141,58,2,"[2140, 2141]"
Normal-13.zip,0,Normal,2053,508,81,1,[508]
Normal-17.zip,0,Normal,2156,611,82,1,[611]
NCP-19.zip,2,NCP,541,2239,121,2,"[2239, 2240]"
NCP-19.zip,2,NCP,531,2221,58,2,"[2220, 2221]"
CP-19.zip,1,CP,2448,2925,104,2,"[2925, 2926]"
CP-31.zip,1,CP,4044,5593,276,1,[5593]
CP-8.zip,1,CP,1345,3732,55,2,"[3731, 3732]"
Normal-3.zip,0,Normal,743,178,340,1,[178]
Normal-23.zip,0,Normal,2613,123,40,1,[123]
Normal-1.zip,0,Normal,1714,983,71,3,"[982, 983, 984]"
NCP-8.zip,2,NCP,268,1683,53,2,"[1682, 1683]"
CP-8.zip,1,CP,1347,3738,34,3,"[3736, 3737, 3738]"
CP-25.zip,1,CP,718,3080,466,1,[3080]
Normal-13.zip,0,Normal,2024,479,86,1,[479]
Normal-1.zip,0,Normal,1668,780,63,4,"[778, 779, 780, 781]"
CP-17.zip,1,CP,1636,4324,26,1,[4324]
NCP-20.zip,2,NCP,55,1244,63,2,"[1243, 1244]"
CP-32.zip,1,CP,2463,3227,77,1,[3227]
NCP-16.zip,2,NCP,435,2023,153,2,"[2023, 2024]"
NCP-2.zip,2,NCP,106,1350,63,2,"[1349, 1350]"
CP-27.zip,1,CP,3753,5697,20,1,[5697]
NCP-15.zip,2,NCP,415,1983,63,2,"[1982, 1983]"
NCP-5.zip,2,NCP,191,1527,54,2,"[1526, 1527]"
CP-3.zip,1,CP,1142,3360,138,1,[3360]
NCP-19.zip,2,NCP,531,2220,139,2,"[2220, 2221]"
CP-14.zip,1,CP,1524,4153,58,3,"[4152, 4153, 4154]"
CP-19.zip,1,CP,1788,3196,49,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-14.zip,1,CP,1533,4175,42,3,"[4173, 4174, 4175]"
NCP-7.zip,2,NCP,244,1635,69,2,"[1634, 1635]"
Normal-7.zip,0,Normal,1848,303,95,1,[303]
CP-24.zip,1,CP,703,3065,120,1,[3065]
Normal-1.zip,0,Normal,1731,1024,36,1,[1024]
CP-11.zip,1,CP,1448,3969,62,2,"[3969, 3970]"
Normal-25.zip,0,Normal,3850,5362,180,1,[5362]
CP-7.zip,1,CP,13,3172,255,4,"[3170, 3171, 3172, 3173]"
CP-14.zip,1,CP,1533,4174,42,3,"[4173, 4174, 4175]"
NCP-3.zip,2,NCP,130,1405,140,2,"[1405, 1406]"
CP-23.zip,1,CP,647,3009,384,1,[3009]
Normal-24.zip,0,Normal,2637,147,36,1,[147]
NCP-28.zip,2,NCP,848,2367,283,2,"[2366, 2367]"
Normal-9.zip,0,Normal,1903,358,86,1,[358]
Normal-26.zip,0,Normal,3889,5408,65,2,"[5407, 5408]"
NCP-20.zip,2,NCP,567,2293,60,2,"[2292, 2293]"
CP-22.zip,1,CP,621,2983,174,1,[2983]
CP-10.zip,1,CP,1389,3835,51,3,"[3833, 3834, 3835]"
CP-9.zip,1,CP,1362,3773,61,2,"[3772, 3773]"
Normal-27.zip,0,Normal,3897,5426,72,4,"[5423, 5424, 5426, 5427]"
NCP-15.zip,2,NCP,428,2010,53,2,"[2009, 2010]"
Normal-3.zip,0,Normal,759,194,297,1,[194]
CP-13.zip,1,CP,1497,4092,68,3,"[4092, 4093, 4094]"
Normal-19.zip,0,Normal,2246,701,87,1,[701]
CP-3.zip,1,CP,1130,3348,166,1,[3348]
CP-14.zip,1,CP,1552,4222,62,2,"[4221, 4222]"
NCP-26.zip,2,NCP,3994,5518,52,1,[5518]
NCP-27.zip,2,NCP,328,1805,43,2,"[1804, 1805]"
NCP-13.zip,2,NCP,369,1889,138,2,"[1889, 1890]"
CP-20.zip,1,CP,2756,3287,56,1,[3287]
CP-22.zip,1,CP,638,3000,116,1,[3000]
CP-6.zip,1,CP,1250,3468,451,1,[3468]
CP-19.zip,1,CP,2437,2905,316,3,"[2905, 2906, 2907]"
Normal-16.zip,0,Normal,2130,585,88,1,[585]
NCP-14.zip,2,NCP,376,1904,142,2,"[1904, 1905]"
Normal-10.zip,0,Normal,1932,387,91,1,[387]
NCP-16.zip,2,NCP,453,2060,121,2,"[2060, 2061]"
NCP-5.zip,2,NCP,191,1526,128,2,"[1526, 1527]"
CP-12.zip,1,CP,1476,4034,53,2,"[4033, 4034]"
NCP-5.zip,2,NCP,175,1495,55,2,"[1494, 1495]"
NCP-21.zip,2,NCP,71,1275,53,2,"[1274, 1275]"
Normal-10.zip,0,Normal,1925,380,90,1,[380]
NCP-30.zip,2,NCP,994,2548,226,2,"[2547, 2548]"
CP-4.zip,1,CP,1192,3410,184,1,[3410]
Normal-23.zip,0,Normal,2631,141,38,1,[141]
NCP-9.zip,2,NCP,2684,2697,50,1,[2697]
CP-27.zip,1,CP,3757,5701,22,1,[5701]
NCP-3.zip,2,NCP,1288,2729,61,1,[2729]
NCP-18.zip,2,NCP,505,2166,157,2,"[2166, 2167]"
CP-8.zip,1,CP,1348,3741,59,3,"[3739, 3740, 3741]"
Normal-24.zip,0,Normal,2651,161,34,1,[161]
Normal-23.zip,0,Normal,2618,128,35,1,[128]
CP-8.zip,1,CP,1331,3702,62,2,"[3701, 3702]"
NCP-14.zip,2,NCP,398,1947,70,2,"[1946, 1947]"
NCP-4.zip,2,NCP,158,1460,122,2,"[1460, 1461]"
NCP-23.zip,2,NCP,89,1312,157,4,"[1311, 1312, 1313, 1315]"
Normal-2.zip,0,Normal,1759,1116,64,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
CP-17.zip,1,CP,1645,4333,26,1,[4333]
CP-10.zip,1,CP,1408,3880,59,3,"[3878, 3879, 3880]"
CP-30.zip,1,CP,3917,5541,62,1,[5541]
NCP-30.zip,2,NCP,933,2475,23,1,[2475]
CP-8.zip,1,CP,1344,3728,142,3,"[3728, 3729, 3730]"
NCP-17.zip,2,NCP,459,2072,133,2,"[2072, 2073]"
NCP-4.zip,2,NCP,150,1445,75,2,"[1444, 1445]"
CP-12.zip,1,CP,1455,3986,58,3,"[3985, 3986, 3987]"
Normal-27.zip,0,Normal,3897,5427,72,4,"[5423, 5424, 5426, 5427]"
CP-18.zip,1,CP,1772,3177,81,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
CP-27.zip,1,CP,3745,5689,23,1,[5689]
NCP-29.zip,2,NCP,920,2462,183,1,[2462]
NCP-9.zip,2,NCP,2688,2655,56,1,[2655]
Normal-8.zip,0,Normal,1887,342,94,1,[342]
CP-1.zip,1,CP,1076,3120,70,1,[3120]
Normal-15.zip,0,Normal,2100,555,94,1,[555]
NCP-11.zip,2,NCP,285,1716,62,2,"[1715, 1716]"
CP-8.zip,1,CP,1344,3729,59,3,"[3728, 3729, 3730]"
Normal-12.zip,0,Normal,2021,476,85,1,[476]
Normal-15.zip,0,Normal,2105,560,87,1,[560]
CP-9.zip,1,CP,1366,3784,57,3,"[3782, 3783, 3784]"
CP-18.zip,1,CP,1772,3181,75,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
NCP-15.zip,2,NCP,426,2006,58,2,"[2005, 2006]"
NCP-1.zip,2,NCP,1020,2586,45,1,[2586]
NCP-13.zip,2,NCP,356,1863,124,2,"[1863, 1864]"
Normal-8.zip,0,Normal,1865,320,99,1,[320]
NCP-30.zip,2,NCP,994,2547,226,2,"[2547, 2548]"
Normal-12.zip,0,Normal,2011,466,93,1,[466]
CP-18.zip,1,CP,1773,3185,67,4,"[3182, 3183, 3184, 3185]"
NCP-18.zip,2,NCP,505,2167,66,2,"[2166, 2167]"
CP-8.zip,1,CP,1328,3694,69,2,"[3693, 3694]"
NCP-2.zip,2,NCP,1278,2719,61,1,[2719]
CP-25.zip,1,CP,736,3098,494,1,[3098]
CP-24.zip,1,CP,7,3512,299,2,"[3511, 3512]"
Normal-27.zip,0,Normal,3913,5455,71,2,"[5454, 5455]"
NCP-6.zip,2,NCP,218,1580,139,2,"[1580, 1581]"
Normal-4.zip,0,Normal,795,230,120,1,[230]
NCP-6.zip,2,NCP,207,1559,46,2,"[1558, 1559]"
NCP-5.zip,2,NCP,189,1523,58,2,"[1522, 1523]"
Normal-22.zip,0,Normal,2314,769,84,1,[769]
CP-14.zip,1,CP,1541,4195,58,3,"[4194, 4195, 4196]"
Normal-26.zip,0,Normal,3866,5378,27,1,[5378]
NCP-30.zip,2,NCP,938,2481,78,2,"[2480, 2481]"
NCP-1.zip,2,NCP,1041,2612,126,1,[2612]
Normal-24.zip,0,Normal,2664,174,28,1,[174]
CP-14.zip,1,CP,1542,4198,54,3,"[4197, 4198, 4199]"
CP-8.zip,1,CP,1332,3704,41,2,"[3703, 3704]"
CP-14.zip,1,CP,1527,4160,142,3,"[4160, 4161, 4162]"
Normal-2.zip,0,Normal,1749,1071,66,4,"[1069, 1070, 1071, 1072]"
CP-7.zip,1,CP,13,3170,271,4,"[3170, 3171, 3172, 3173]"
CP-20.zip,1,CP,2769,3300,36,1,[3300]
Normal-11.zip,0,Normal,1973,428,90,1,[428]
CP-28.zip,1,CP,3783,5727,26,1,[5727]
NCP-12.zip,2,NCP,320,1788,139,2,"[1788, 1789]"
Normal-10.zip,0,Normal,1929,384,91,1,[384]
Normal-7.zip,0,Normal,1841,296,79,1,[296]
Normal-8.zip,0,Normal,1881,336,91,1,[336]
NCP-25.zip,2,NCP,3964,5475,41,1,[5475]
CP-12.zip,1,CP,1480,4043,54,2,"[4042, 4043]"
NCP-23.zip,2,NCP,91,1319,43,2,"[1318, 1319]"
NCP-11.zip,2,NCP,30,1193,56,1,[1193]
NCP-29.zip,2,NCP,924,2466,18,1,[2466]
CP-16.zip,1,CP,1614,4302,23,1,[4302]
Normal-14.zip,0,Normal,2061,516,88,1,[516]
NCP-27.zip,2,NCP,826,2339,54,1,[2339]
Normal-13.zip,0,Normal,2038,493,80,1,[493]
Normal-1.zip,0,Normal,1715,985,71,2,"[985, 986]"
CP-28.zip,1,CP,3782,5726,25,1,[5726]
CP-21.zip,1,CP,2777,3308,22,1,[3308]
CP-8.zip,1,CP,1328,3693,69,2,"[3693, 3694]"
NCP-17.zip,2,NCP,468,2091,154,2,"[2091, 2092]"
NCP-13.zip,2,NCP,36,1205,59,2,"[1204, 1205]"
Normal-12.zip,0,Normal,2000,455,93,1,[455]
CP-19.zip,1,CP,2448,2926,102,2,"[2925, 2926]"
NCP-10.zip,2,NCP,2728,2711,54,1,[2711]
NCP-8.zip,2,NCP,263,1672,177,2,"[1672, 1673]"
CP-30.zip,1,CP,3831,5775,25,1,[5775]
Normal-1.zip,0,Normal,1709,973,61,2,"[973, 974]"
CP-27.zip,1,CP,3751,5695,22,1,[5695]
CP-11.zip,1,CP,1453,3981,56,3,"[3979, 3980, 3981]"
CP-16.zip,1,CP,1617,4305,23,1,[4305]
CP-19.zip,1,CP,1788,3198,53,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-19.zip,1,CP,2447,2924,86,2,"[2923, 2924]"
NCP-20.zip,2,NCP,55,1243,150,2,"[1243, 1244]"
CP-1.zip,1,CP,1069,3108,77,4,"[3108, 3109, 3110, 3111]"
CP-29.zip,1,CP,3827,5771,26,1,[5771]
CP-16.zip,1,CP,1599,4287,17,1,[4287]
NCP-12.zip,2,NCP,34,1201,64,2,"[1200, 1201]"
NCP-19.zip,2,NCP,523,2202,148,2,"[2202, 2203]"
CP-19.zip,1,CP,2429,2890,100,1,[2890]
NCP-9.zip,2,NCP,2695,2661,45,1,[2661]
Normal-1.zip,0,Normal,1730,1022,59,5,"[1019, 1020, 1021, 1022, 1023]"
CP-24.zip,1,CP,7,3511,298,2,"[3511, 3512]"
NCP-27.zip,2,NCP,1045,2617,30,1,[2617]
Normal-15.zip,0,Normal,2088,543,75,1,[543]
Normal-25.zip,0,Normal,3853,5365,205,1,[5365]
Normal-14.zip,0,Normal,2076,531,77,1,[531]
NCP-22.zip,2,NCP,84,1301,127,2,"[1301, 1302]"
CP-18.zip,1,CP,1660,4348,23,1,[4348]
NCP-26.zip,2,NCP,3980,5487,38,1,[5487]
CP-20.zip,1,CP,2758,3289,35,1,[3289]
Normal-6.zip,0,Normal,1808,263,95,1,[263]
Normal-2.zip,0,Normal,1739,1044,56,3,"[1042, 1043, 1044]"
CP-1.zip,1,CP,1068,3107,62,1,[3107]
Normal-14.zip,0,Normal,2083,538,87,1,[538]
CP-12.zip,1,CP,1484,4054,46,3,"[4053, 4054, 4055]"
CP-29.zip,1,CP,3811,5755,23,1,[5755]
CP-14.zip,1,CP,1548,4213,51,2,"[4213, 4214]"
NCP-20.zip,2,NCP,561,2281,58,2,"[2280, 2281]"
CP-14.zip,1,CP,1544,4204,51,3,"[4203, 4204, 4205]"
NCP-27.zip,2,NCP,1062,2639,176,1,[2639]
CP-25.zip,1,CP,735,3097,110,1,[3097]
CP-2.zip,1,CP,1115,3333,180,1,[3333]
CP-27.zip,1,CP,3756,5700,20,1,[5700]
Normal-5.zip,0,Normal,813,248,136,1,[248]
Normal-19.zip,0,Normal,2221,676,103,1,[676]
Normal-27.zip,0,Normal,3902,5434,73,1,[5434]
CP-11.zip,1,CP,1437,3943,57,2,"[3942, 3943]"
NCP-2.zip,2,NCP,126,1398,64,2,"[1396, 1398]"
Normal-20.zip,0,Normal,2265,720,87,1,[720]
CP-16.zip,1,CP,1589,4277,23,1,[4277]
Normal-16.zip,0,Normal,2149,604,85,1,[604]
NCP-19.zip,2,NCP,523,2203,62,2,"[2202, 2203]"
CP-12.zip,1,CP,1455,3985,138,3,"[3985, 3986, 3987]"
CP-30.zip,1,CP,4040,5589,38,1,[5589]
NCP-1.zip,2,NCP,1049,2622,205,1,[2622]
Normal-1.zip,0,Normal,1674,811,74,2,"[810, 811]"
NCP-19.zip,2,NCP,539,2236,55,2,"[2235, 2236]"
Normal-1.zip,0,Normal,1668,779,60,4,"[778, 779, 780, 781]"
NCP-19.zip,2,NCP,542,2241,130,2,"[2241, 2242]"
CP-25.zip,1,CP,739,3101,112,1,[3101]
CP-9.zip,1,CP,1367,3785,140,3,"[3785, 3786, 3787]"
CP-14.zip,1,CP,1549,4215,61,2,"[4215, 4216]"
NCP-19.zip,2,NCP,53,1239,144,2,"[1239, 1240]"
Normal-1.zip,0,Normal,1730,1021,294,5,"[1019, 1020, 1021, 1022, 1023]"
NCP-29.zip,2,NCP,918,2460,213,1,[2460]
NCP-23.zip,2,NCP,89,1311,138,4,"[1311, 1312, 1313, 1315]"
Normal-2.zip,0,Normal,1759,1119,66,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
CP-9.zip,1,CP,1359,3765,46,3,"[3764, 3765, 3766]"
NCP-25.zip,2,NCP,3706,5533,54,1,[5533]
CP-8.zip,1,CP,1325,3686,65,2,"[3686, 3687]"
NCP-19.zip,2,NCP,545,2248,57,2,"[2247, 2248]"
NCP-15.zip,2,NCP,418,1989,143,2,"[1989, 1990]"
CP-7.zip,1,CP,1261,3479,198,1,[3479]
NCP-29.zip,2,NCP,895,2435,143,2,"[2435, 2436]"
CP-12.zip,1,CP,1483,4051,62,3,"[4050, 4051, 4052]"
CP-12.zip,1,CP,1460,3999,60,2,"[3998, 3999]"
CP-12.zip,1,CP,1456,3988,122,3,"[3988, 3989, 3990]"
Normal-12.zip,0,Normal,2014,469,98,1,[469]
CP-14.zip,1,CP,1542,4197,180,3,"[4197, 4198, 4199]"
Normal-2.zip,0,Normal,1755,1098,73,4,"[1097, 1098, 1099, 1100]"
NCP-14.zip,2,NCP,382,1917,58,2,"[1916, 1917]"
NCP-4.zip,2,NCP,153,1451,58,2,"[1450, 1451]"
Normal-27.zip,0,Normal,3913,5454,68,2,"[5454, 5455]"
Normal-1.zip,0,Normal,1674,810,74,2,"[810, 811]"
Normal-2.zip,0,Normal,1736,1036,55,5,"[1032, 1033, 1034, 1035, 1036]"
Normal-1.zip,0,Normal,1682,852,81,6,"[847, 848, 852, 853, 857, 858]"
Normal-4.zip,0,Normal,796,231,287,1,[231]
NCP-11.zip,2,NCP,292,1729,138,2,"[1729, 1730]"
NCP-12.zip,2,NCP,327,1803,55,2,"[1802, 1803]"
Normal-25.zip,0,Normal,3712,5342,28,1,[5342]
CP-4.zip,1,CP,1182,3400,130,1,[3400]
CP-2.zip,1,CP,1113,3331,197,1,[3331]
NCP-22.zip,2,NCP,888,2426,55,1,[2426]
Normal-25.zip,0,Normal,3846,5358,209,1,[5358]
CP-9.zip,1,CP,1362,3772,61,2,"[3772, 3773]"
CP-4.zip,1,CP,1193,3411,190,1,[3411]
Normal-5.zip,0,Normal,802,237,298,1,[237]
CP-23.zip,1,CP,655,3017,511,1,[3017]
NCP-13.zip,2,NCP,360,1871,121,2,"[1871, 1872]"
NCP-30.zip,2,NCP,977,2521,257,1,[2521]
NCP-26.zip,2,NCP,3990,5514,51,1,[5514]
Normal-3.zip,0,Normal,768,203,130,1,[203]
Normal-1.zip,0,Normal,1713,980,71,2,"[980, 981]"
CP-26.zip,1,CP,3732,5672,53,2,"[5671, 5672]"
CP-20.zip,1,CP,2762,3293,33,1,[3293]
Normal-20.zip,0,Normal,2267,722,100,1,[722]
NCP-5.zip,2,NCP,189,1522,139,2,"[1522, 1523]"
NCP-28.zip,2,NCP,848,2366,57,2,"[2366, 2367]"
NCP-6.zip,2,NCP,215,1575,65,2,"[1574, 1575]"
Normal-27.zip,0,Normal,3905,5438,58,2,"[5437, 5438]"
CP-4.zip,1,CP,1163,3381,239,1,[3381]
CP-18.zip,1,CP,1665,4353,25,1,[4353]
Normal-25.zip,0,Normal,3842,5354,189,1,[5354]
Normal-22.zip,0,Normal,2583,93,46,1,[93]
NCP-11.zip,2,NCP,308,1763,116,2,"[1763, 1764]"
CP-4.zip,1,CP,1180,3398,150,1,[3398]
CP-7.zip,1,CP,1316,3668,63,3,"[3667, 3668, 3669]"
CP-5.zip,1,CP,1213,3431,159,1,[3431]
Normal-10.zip,0,Normal,1947,402,89,1,[402]
CP-24.zip,1,CP,698,3060,124,1,[3060]
CP-15.zip,1,CP,1562,4243,55,2,"[4243, 4244]"
NCP-25.zip,2,NCP,3962,5473,58,1,[5473]
CP-18.zip,1,CP,1772,3180,75,6,"[3176, 3177, 3178, 3179, 3180, 3181]"
Normal-13.zip,0,Normal,2029,484,94,1,[484]
NCP-16.zip,2,NCP,443,2041,50,2,"[2040, 2041]"
NCP-24.zip,2,NCP,984,2529,259,2,"[2529, 2530]"
CP-18.zip,1,CP,1773,3183,61,4,"[3182, 3183, 3184, 3185]"
CP-5.zip,1,CP,1194,3412,158,1,[3412]
NCP-14.zip,2,NCP,39,1211,58,2,"[1210, 1211]"
CP-13.zip,1,CP,15,3174,98,1,[3174]
CP-28.zip,1,CP,3775,5719,29,1,[5719]
NCP-17.zip,2,NCP,477,2110,58,2,"[2109, 2110]"
Normal-16.zip,0,Normal,2133,588,73,1,[588]
NCP-4.zip,2,NCP,150,1444,181,2,"[1444, 1445]"
CP-4.zip,1,CP,1188,3406,308,1,[3406]
NCP-8.zip,2,NCP,251,1649,55,2,"[1648, 1649]"
CP-1.zip,1,CP,1094,3312,329,1,[3312]
NCP-12.zip,2,NCP,327,1802,130,2,"[1802, 1803]"
Normal-7.zip,0,Normal,1830,285,84,1,[285]
CP-12.zip,1,CP,1481,4045,58,3,"[4044, 4045, 4046]"
NCP-19.zip,2,NCP,52,1238,57,2,"[1237, 1238]"
NCP-20.zip,2,NCP,557,2271,132,2,"[2271, 2272]"
NCP-14.zip,2,NCP,398,1946,167,2,"[1946, 1947]"
NCP-8.zip,2,NCP,260,1667,68,2,"[1666, 1667]"
Normal-2.zip,0,Normal,1754,1094,73,4,"[1093, 1094, 1095, 1096]"
Normal-2.zip,0,Normal,1736,1032,124,5,"[1032, 1033, 1034, 1035, 1036]"
NCP-11.zip,2,NCP,292,1730,58,2,"[1729, 1730]"
CP-25.zip,1,CP,725,3087,80,1,[3087]
CP-15.zip,1,CP,1558,4234,62,3,"[4234, 4235, 4236]"
Normal-17.zip,0,Normal,2161,616,99,1,[616]
NCP-23.zip,2,NCP,970,2513,62,1,[2513]
NCP-10.zip,2,NCP,277,1700,152,2,"[1700, 1701]"
NCP-14.zip,2,NCP,395,1941,71,2,"[1940, 1941]"
Normal-2.zip,0,Normal,1755,1100,71,4,"[1097, 1098, 1099, 1100]"
CP-26.zip,1,CP,3718,5648,254,2,"[5647, 5648]"
Normal-25.zip,0,Normal,3841,5353,188,1,[5353]
Normal-23.zip,0,Normal,2621,131,41,1,[131]
NCP-20.zip,2,NCP,555,2267,133,2,"[2267, 2268]"
NCP-7.zip,2,NCP,244,1634,165,2,"[1634, 1635]"
Normal-6.zip,0,Normal,1821,276,102,1,[276]
NCP-17.zip,2,NCP,459,2073,56,2,"[2072, 2073]"
NCP-2.zip,2,NCP,124,1390,58,2,"[1389, 1390]"
Normal-18.zip,0,Normal,2185,640,100,1,[640]
NCP-5.zip,2,NCP,193,1530,124,2,"[1530, 1531]"
NCP-8.zip,2,NCP,253,1652,139,2,"[1652, 1653]"
NCP-23.zip,2,NCP,89,1313,58,4,"[1311, 1312, 1313, 1315]"
CP-5.zip,1,CP,1216,3434,307,1,[3434]
NCP-30.zip,2,NCP,979,2523,345,1,[2523]
NCP-23.zip,2,NCP,97,1331,41,2,"[1330, 1331]"
NCP-20.zip,2,NCP,555,2268,56,2,"[2267, 2268]"
Normal-16.zip,0,Normal,2126,581,84,1,[581]
NCP-18.zip,2,NCP,488,2133,58,2,"[2131, 2133]"
NCP-10.zip,2,NCP,28,1189,61,2,"[1188, 1189]"
NCP-15.zip,2,NCP,41,1214,151,2,"[1214, 1215]"
NCP-12.zip,2,NCP,32,1196,145,2,"[1196, 1197]"
CP-26.zip,1,CP,3722,5656,50,2,"[5656, 5657]"
CP-15.zip,1,CP,1573,4261,22,1,[4261]
NCP-27.zip,2,NCP,1028,2598,147,1,[2598]
Normal-18.zip,0,Normal,2197,652,105,1,[652]
Normal-16.zip,0,Normal,2152,607,66,1,[607]
NCP-14.zip,2,NCP,380,1913,62,2,"[1912, 1913]"
Normal-15.zip,0,Normal,2093,548,72,1,[548]
NCP-3.zip,2,NCP,1299,2740,63,1,[2740]
CP-8.zip,1,CP,1348,3740,59,3,"[3739, 3740, 3741]"
Normal-6.zip,0,Normal,1822,277,101,1,[277]
Normal-4.zip,0,Normal,800,235,116,1,[235]
CP-10.zip,1,CP,1386,3827,66,2,"[3827, 3828]"
Normal-12.zip,0,Normal,2004,459,106,1,[459]
NCP-25.zip,2,NCP,3957,5470,47,1,[5470]
CP-5.zip,1,CP,1204,3422,294,1,[3422]
CP-11.zip,1,CP,1420,3905,59,2,"[3905, 3906]"
CP-17.zip,1,CP,1649,4337,23,1,[4337]
CP-28.zip,1,CP,3769,5713,18,1,[5713]
Normal-8.zip,0,Normal,1868,323,91,1,[323]
CP-1.zip,1,CP,1087,3219,400,1,[3219]
CP-26.zip,1,CP,3640,5599,295,1,[5599]
NCP-7.zip,2,NCP,248,1642,139,2,"[1642, 1643]"
NCP-2.zip,2,NCP,125,1395,55,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-25.zip,2,NCP,3941,5538,38,1,[5538]
CP-15.zip,1,CP,1558,4236,62,3,"[4234, 4235, 4236]"
NCP-16.zip,2,NCP,443,2040,117,2,"[2040, 2041]"
Normal-15.zip,0,Normal,2102,557,100,1,[557]
Normal-2.zip,0,Normal,1755,1097,73,4,"[1097, 1098, 1099, 1100]"
Normal-9.zip,0,Normal,1924,379,98,1,[379]
CP-13.zip,1,CP,1517,4136,64,2,"[4136, 4137]"
CP-1.zip,1,CP,1,3146,70,5,"[3143, 3144, 3145, 3146, 3147]"
Normal-19.zip,0,Normal,2226,681,99,1,[681]
CP-13.zip,1,CP,1517,4137,64,2,"[4136, 4137]"
NCP-23.zip,2,NCP,95,1326,165,2,"[1326, 1327]"
NCP-19.zip,2,NCP,538,2234,60,2,"[2233, 2234]"
CP-6.zip,1,CP,1253,3471,130,1,[3471]
NCP-7.zip,2,NCP,242,1629,133,2,"[1629, 1630]"
CP-8.zip,1,CP,1337,3714,60,2,"[3714, 3715]"
NCP-23.zip,2,NCP,912,2454,373,1,[2454]
Normal-23.zip,0,Normal,2622,132,38,1,[132]
Normal-8.zip,0,Normal,1871,326,73,1,[326]
NCP-5.zip,2,NCP,193,1531,52,2,"[1530, 1531]"
Normal-24.zip,0,Normal,2646,156,41,1,[156]
CP-14.zip,1,CP,1538,4185,159,3,"[4185, 4186, 4187]"
CP-23.zip,1,CP,667,3029,226,1,[3029]
CP-1.zip,1,CP,1,3147,70,5,"[3143, 3144, 3145, 3146, 3147]"
NCP-27.zip,2,NCP,1006,2566,42,2,"[2566, 2567]"
Normal-1.zip,0,Normal,1711,977,63,2,"[977, 978]"
NCP-14.zip,2,NCP,374,1899,139,2,"[1899, 1900]"
NCP-16.zip,2,NCP,457,2069,57,2,"[2068, 2069]"
CP-22.zip,1,CP,634,2996,680,1,[2996]
NCP-23.zip,2,NCP,905,2447,26,1,[2447]
Normal-2.zip,0,Normal,1759,1118,65,10,"[1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120]"
NCP-11.zip,2,NCP,290,1725,103,2,"[1725, 1726]"
NCP-21.zip,2,NCP,77,1288,53,2,"[1287, 1288]"
CP-30.zip,1,CP,4018,5567,33,1,[5567]
CP-12.zip,1,CP,1483,4052,62,3,"[4050, 4051, 4052]"
CP-24.zip,1,CP,692,3054,74,1,[3054]
NCP-6.zip,2,NCP,204,1552,139,2,"[1552, 1553]"
NCP-7.zip,2,NCP,24,1179,146,2,"[1179, 1180]"
CP-6.zip,1,CP,1251,3469,133,1,[3469]
Normal-1.zip,0,Normal,1682,857,70,6,"[847, 848, 852, 853, 857, 858]"
NCP-10.zip,2,NCP,2712,2705,42,1,[2705]
CP-2.zip,1,CP,1100,3318,201,1,[3318]
Normal-1.zip,0,Normal,1671,795,67,3,"[793, 794, 795]"
NCP-17.zip,2,NCP,461,2077,67,2,"[2076, 2077]"
CP-15.zip,1,CP,1564,4249,51,2,"[4248, 4249]"
NCP-4.zip,2,NCP,153,1450,137,2,"[1450, 1451]"
CP-4.zip,1,CP,1166,3384,202,1,[3384]
NCP-28.zip,2,NCP,851,2370,145,1,[2370]
NCP-23.zip,2,NCP,95,1327,69,2,"[1326, 1327]"
Normal-18.zip,0,Normal,2196,651,95,1,[651]
CP-27.zip,1,CP,3749,5693,20,1,[5693]
Normal-6.zip,0,Normal,1797,252,85,1,[252]
CP-14.zip,1,CP,1544,4203,122,3,"[4203, 4204, 4205]"
CP-8.zip,1,CP,1345,3731,55,2,"[3731, 3732]"
NCP-8.zip,2,NCP,2678,2649,55,1,[2649]
NCP-23.zip,2,NCP,89,1315,66,4,"[1311, 1312, 1313, 1315]"
Normal-17.zip,0,Normal,2167,622,76,1,[622]
CP-22.zip,1,CP,631,2993,130,1,[2993]
CP-16.zip,1,CP,1618,4306,26,1,[4306]
NCP-17.zip,2,NCP,471,2097,139,2,"[2097, 2098]"
NCP-15.zip,2,NCP,416,1986,58,2,"[1984, 1986]"
CP-10.zip,1,CP,1389,3833,121,3,"[3833, 3834, 3835]"
CP-24.zip,1,CP,696,3058,74,1,[3058]
NCP-26.zip,2,NCP,3996,5494,37,1,[5494]
CP-15.zip,1,CP,1565,4251,66,2,"[4250, 4251]"
NCP-7.zip,2,NCP,248,1643,58,2,"[1642, 1643]"
NCP-30.zip,2,NCP,932,2474,20,1,[2474]
CP-8.zip,1,CP,1332,3703,41,2,"[3703, 3704]"
Normal-2.zip,0,Normal,1754,1093,73,4,"[1093, 1094, 1095, 1096]"
NCP-3.zip,2,NCP,131,1408,50,2,"[1407, 1408]"
NCP-13.zip,2,NCP,37,1206,147,2,"[1206, 1207]"
NCP-7.zip,2,NCP,242,1630,56,2,"[1629, 1630]"
CP-26.zip,1,CP,3643,5603,257,2,"[5602, 5603]"
Normal-24.zip,0,Normal,2639,149,28,1,[149]
Normal-13.zip,0,Normal,2037,492,82,1,[492]
CP-16.zip,1,CP,1610,4298,22,1,[4298]
NCP-15.zip,2,NCP,415,1982,149,2,"[1982, 1983]"
NCP-2.zip,2,NCP,125,1394,55,5,"[1391, 1392, 1393, 1394, 1395]"
Normal-23.zip,0,Normal,2616,126,39,1,[126]
CP-26.zip,1,CP,3635,5594,291,1,[5594]
Normal-18.zip,0,Normal,2211,666,85,1,[666]
NCP-17.zip,2,NCP,481,2117,163,2,"[2117, 2118]"
NCP-13.zip,2,NCP,37,1207,62,2,"[1206, 1207]"
Normal-2.zip,0,Normal,1749,1070,61,4,"[1069, 1070, 1071, 1072]"
NCP-29.zip,2,NCP,927,2469,20,1,[2469]
CP-6.zip,1,CP,1226,3444,190,1,[3444]
NCP-14.zip,2,NCP,394,1938,147,2,"[1938, 1939]"
CP-19.zip,1,CP,1791,3212,71,4,"[3210, 3211, 3212, 3213]"
CP-8.zip,1,CP,1334,3708,56,2,"[3707, 3708]"
NCP-12.zip,2,NCP,324,1796,120,2,"[1796, 1797]"
CP-30.zip,1,CP,3929,5626,71,2,"[5626, 5627]"
Normal-7.zip,0,Normal,1832,287,91,1,[287]
Normal-1.zip,0,Normal,1713,981,71,2,"[980, 981]"
NCP-2.zip,2,NCP,111,1363,133,2,"[1363, 1364]"
Normal-3.zip,0,Normal,1764,1144,66,4,"[1143, 1144, 1145, 1146]"
CP-15.zip,1,CP,1560,4239,63,2,"[4239, 4240]"
NCP-22.zip,2,NCP,84,1302,54,2,"[1301, 1302]"
Normal-2.zip,0,Normal,1744,1059,71,2,"[1058, 1059]"
CP-21.zip,1,CP,590,2952,86,1,[2952]
Normal-9.zip,0,Normal,1901,356,83,1,[356]
NCP-17.zip,2,NCP,461,2076,160,2,"[2076, 2077]"
CP-24.zip,1,CP,683,3045,138,1,[3045]
Normal-11.zip,0,Normal,1983,438,105,1,[438]
NCP-14.zip,2,NCP,39,1210,139,2,"[1210, 1211]"
NCP-18.zip,2,NCP,494,2144,156,2,"[2144, 2145]"
NCP-14.zip,2,NCP,388,1927,68,2,"[1926, 1927]"
NCP-28.zip,2,NCP,853,2373,664,1,[2373]
Normal-22.zip,0,Normal,2588,98,33,1,[98]
NCP-17.zip,2,NCP,46,1225,124,2,"[1225, 1226]"
NCP-2.zip,2,NCP,126,1396,152,2,"[1396, 1398]"
NCP-15.zip,2,NCP,418,1990,58,2,"[1989, 1990]"
Normal-3.zip,0,Normal,765,200,136,1,[200]
CP-9.zip,1,CP,1370,3792,62,2,"[3792, 3793]"
CP-13.zip,1,CP,1490,4071,166,3,"[4071, 4072, 4073]"
CP-5.zip,1,CP,1212,3430,187,1,[3430]
NCP-29.zip,2,NCP,894,2434,16,1,[2434]
CP-19.zip,1,CP,1788,3199,58,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
CP-12.zip,1,CP,1466,4011,52,2,"[4011, 4012]"
CP-1.zip,1,CP,1088,3223,50,4,"[3220, 3221, 3222, 3223]"
NCP-25.zip,2,NCP,3947,5503,41,1,[5503]
CP-30.zip,1,CP,3931,5632,143,4,"[5630, 5631, 5632, 5633]"
NCP-2.zip,2,NCP,124,1389,139,2,"[1389, 1390]"
CP-7.zip,1,CP,1307,3645,53,4,"[3645, 3646, 3647, 3648]"
NCP-27.zip,2,NCP,823,2334,183,1,[2334]
Normal-1.zip,0,Normal,1728,1016,72,4,"[1013, 1014, 1015, 1016]"
Normal-2.zip,0,Normal,1754,1096,69,4,"[1093, 1094, 1095, 1096]"
CP-12.zip,1,CP,1473,4026,51,3,"[4026, 4027, 4028]"
Normal-3.zip,0,Normal,1764,1146,62,4,"[1143, 1144, 1145, 1146]"
CP-2.zip,1,CP,1103,3321,180,1,[3321]
CP-4.zip,1,CP,1181,3399,238,1,[3399]
CP-19.zip,1,CP,2436,2904,138,1,[2904]
CP-28.zip,1,CP,3795,5739,23,1,[5739]
CP-29.zip,1,CP,3805,5749,20,1,[5749]
NCP-3.zip,2,NCP,1300,2741,60,1,[2741]
NCP-23.zip,2,NCP,898,2439,48,1,[2439]
Normal-23.zip,0,Normal,2612,122,31,1,[122]
NCP-7.zip,2,NCP,24,1180,61,2,"[1179, 1180]"
Normal-6.zip,0,Normal,1807,262,95,1,[262]
NCP-30.zip,2,NCP,996,2551,189,2,"[2551, 2552]"
Normal-9.zip,0,Normal,1893,348,82,1,[348]
NCP-11.zip,2,NCP,290,1726,44,2,"[1725, 1726]"
NCP-21.zip,2,NCP,80,1293,129,2,"[1293, 1294]"
Normal-24.zip,0,Normal,2655,165,37,1,[165]
NCP-30.zip,2,NCP,996,2552,218,2,"[2551, 2552]"
CP-18.zip,1,CP,1653,4341,29,1,[4341]
NCP-5.zip,2,NCP,187,1518,136,2,"[1518, 1519]"
NCP-26.zip,2,NCP,3993,5517,39,1,[5517]
NCP-10.zip,2,NCP,273,1692,128,2,"[1692, 1693]"
NCP-5.zip,2,NCP,179,1502,122,2,"[1503, 1502]"
Normal-26.zip,0,Normal,3887,5400,67,3,"[5400, 5401, 5404]"
NCP-7.zip,2,NCP,234,1613,139,2,"[1613, 1614]"
Normal-1.zip,0,Normal,1725,1006,60,1,[1006]
NCP-15.zip,2,NCP,419,1992,55,2,"[1991, 1992]"
CP-14.zip,1,CP,1523,4151,65,2,"[4150, 4151]"
NCP-23.zip,2,NCP,938,2480,195,2,"[2480, 2481]"
NCP-13.zip,2,NCP,342,1835,149,2,"[1835, 1836]"
CP-24.zip,1,CP,680,3042,86,1,[3042]
NCP-14.zip,2,NCP,394,1939,62,2,"[1938, 1939]"
NCP-11.zip,2,NCP,288,1722,49,2,"[1721, 1722]"
CP-14.zip,1,CP,1527,4162,58,3,"[4160, 4161, 4162]"
CP-6.zip,1,CP,1241,3459,132,1,[3459]
CP-10.zip,1,CP,1408,3878,198,3,"[3878, 3879, 3880]"
NCP-14.zip,2,NCP,397,1945,66,2,"[1944, 1945]"
CP-1.zip,1,CP,1,3145,248,5,"[3143, 3144, 3145, 3146, 3147]"
Normal-15.zip,0,Normal,2111,566,95,1,[566]
Normal-12.zip,0,Normal,2007,462,85,1,[462]
NCP-6.zip,2,NCP,222,1589,52,2,"[1588, 1589]"
Normal-25.zip,0,Normal,3856,5368,220,1,[5368]
CP-6.zip,1,CP,1245,3463,306,1,[3463]
CP-9.zip,1,CP,1380,3814,56,1,[3814]
CP-11.zip,1,CP,1442,3955,58,3,"[3954, 3955, 3956]"
Normal-26.zip,0,Normal,3889,5407,68,2,"[5407, 5408]"
Normal-4.zip,0,Normal,773,208,321,1,[208]
CP-23.zip,1,CP,671,3033,448,1,[3033]
CP-23.zip,1,CP,674,3036,126,1,[3036]
CP-19.zip,1,CP,1788,3200,54,8,"[3196, 3197, 3198, 3199, 3200, 3201, 3202, 3203]"
NCP-12.zip,2,NCP,328,1804,112,2,"[1804, 1805]"
Normal-22.zip,0,Normal,2581,91,44,1,[91]
CP-7.zip,1,CP,1316,3669,62,3,"[3667, 3668, 3669]"
NCP-1.zip,2,NCP,1046,2618,70,1,[2618]
NCP-16.zip,2,NCP,456,2067,57,2,"[2066, 2067]"
Normal-1.zip,0,Normal,1730,1020,63,5,"[1019, 1020, 1021, 1022, 1023]"
NCP-17.zip,2,NCP,468,2092,65,2,"[2091, 2092]"
NCP-7.zip,2,NCP,2488,2688,40,1,[2688]
CP-10.zip,1,CP,1396,3852,58,3,"[3851, 3852, 3853]"
NCP-16.zip,2,NCP,447,2049,58,2,"[2048, 2049]"
Normal-8.zip,0,Normal,1864,319,88,1,[319]
CP-15.zip,1,CP,1560,4240,63,2,"[4239, 4240]"
CP-12.zip,1,CP,1484,4055,46,3,"[4053, 4054, 4055]"
Normal-1.zip,0,Normal,1682,853,81,6,"[847, 848, 852, 853, 857, 858]"
Normal-22.zip,0,Normal,2580,90,37,1,[90]
CP-2.zip,1,CP,1128,3346,196,1,[3346]
NCP-7.zip,2,NCP,240,1625,158,2,"[1625, 1626]"
Normal-15.zip,0,Normal,2086,541,91,1,[541]
Normal-7.zip,0,Normal,1837,292,94,1,[292]
CP-1.zip,1,CP,1069,3111,77,4,"[3108, 3109, 3110, 3111]"
CP-14.zip,1,CP,1549,4216,61,2,"[4215, 4216]"
Normal-11.zip,0,Normal,1970,425,88,1,[425]
NCP-13.zip,2,NCP,342,1836,61,2,"[1835, 1836]"
CP-25.zip,1,CP,728,3090,86,1,[3090]
NCP-21.zip,2,NCP,68,1268,115,2,"[1268, 1269]"
CP-8.zip,1,CP,1342,3725,58,3,"[3723, 3724, 3725]"
CP-12.zip,1,CP,1481,4046,58,3,"[4044, 4045, 4046]"
CP-5.zip,1,CP,1210,3428,156,1,[3428]
NCP-3.zip,2,NCP,136,1417,53,2,"[1416, 1417]"
NCP-2.zip,2,NCP,125,1393,54,5,"[1391, 1392, 1393, 1394, 1395]"
NCP-23.zip,2,NCP,97,1330,97,2,"[1330, 1331]"
NCP-1.zip,2,NCP,1021,2588,209,4,"[2587, 2588, 2589, 2590]"
NCP-12.zip,2,NCP,317,1782,50,2,"[1781, 1782]"
NCP-14.zip,2,NCP,388,1926,162,2,"[1926, 1927]"
CP-26.zip,1,CP,3641,5600,300,1,[5600]
Normal-3.zip,0,Normal,760,195,117,1,[195]
NCP-12.zip,2,NCP,325,1798,117,2,"[1798, 1799]"
Normal-1.zip,0,Normal,1671,793,72,3,"[793, 794, 795]"
Normal-5.zip,0,Normal,807,242,132,1,[242]
CP-19.zip,1,CP,1791,3211,55,4,"[3210, 3211, 3212, 3213]"
Normal-4.zip,0,Normal,792,227,108,1,[227]
CP-15.zip,1,CP,1564,4248,51,2,"[4248, 4249]"
NCP-12.zip,2,NCP,324,1797,51,2,"[1796, 1797]"
CP-13.zip,1,CP,1514,4130,61,2,"[4129, 4130]"
CP-30.zip,1,CP,4013,5562,29,1,[5562]
CP-7.zip,1,CP,13,3173,255,4,"[3170, 3171, 3172, 3173]"
CP-5.zip,1,CP,1214,3432,282,1,[3432]
Normal-8.zip,0,Normal,1878,333,88,1,[333]
Normal-21.zip,0,Normal,2297,752,83,1,[752]
CP-20.zip,1,CP,2668,3259,52,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
CP-11.zip,1,CP,1436,3940,45,2,"[3940, 3941]"
NCP-13.zip,2,NCP,364,1880,56,2,"[1879, 1880]"
CP-9.zip,1,CP,1369,3790,67,2,"[3790, 3791]"
NCP-21.zip,2,NCP,65,1263,128,2,"[1263, 1264]"
CP-23.zip,1,CP,661,3023,116,1,[3023]
CP-30.zip,1,CP,3937,5643,66,2,"[5643, 5644]"
CP-25.zip,1,CP,8,3514,36,2,"[3513, 3514]"
NCP-15.zip,2,NCP,421,1996,67,2,"[1995, 1996]"
CP-25.zip,1,CP,738,3100,110,1,[3100]
NCP-11.zip,2,NCP,304,1755,67,2,"[1754, 1755]"
NCP-22.zip,2,NCP,834,2348,226,2,"[2347, 2348]"
Normal-1.zip,0,Normal,1680,840,66,6,"[839, 840, 841, 842, 843, 844]"
CP-13.zip,1,CP,1519,4141,68,2,"[4141, 4142]"
NCP-12.zip,2,NCP,315,1777,107,2,"[1777, 1778]"
Normal-2.zip,0,Normal,1753,1088,66,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
CP-8.zip,1,CP,1341,3722,57,1,[3722]
CP-13.zip,1,CP,1491,4075,48,3,"[4074, 4075, 4076]"
CP-28.zip,1,CP,3785,5729,28,1,[5729]
NCP-6.zip,2,NCP,212,1568,165,2,"[1568, 1569]"
CP-12.zip,1,CP,1477,4035,54,2,"[4035, 4036]"
CP-16.zip,1,CP,1605,4293,23,1,[4293]
NCP-29.zip,2,NCP,926,2468,24,1,[2468]
CP-10.zip,1,CP,1394,3847,62,2,"[3847, 3848]"
NCP-21.zip,2,NCP,580,2318,58,2,"[2317, 2318]"
NCP-19.zip,2,NCP,526,2208,137,2,"[2208, 2209]"
CP-13.zip,1,CP,1494,4085,65,3,"[4083, 4084, 4085]"
Normal-27.zip,0,Normal,3895,5421,71,4,"[5418, 5419, 5420, 5421]"
NCP-8.zip,2,NCP,267,1680,129,2,"[1680, 1681]"
NCP-18.zip,2,NCP,49,1232,61,2,"[1231, 1232]"
CP-21.zip,1,CP,589,2951,300,1,[2951]
CP-25.zip,1,CP,8,3513,42,2,"[3513, 3514]"
CP-27.zip,1,CP,3765,5709,20,1,[5709]
NCP-4.zip,2,NCP,147,1438,173,2,"[1438, 1439]"
Normal-27.zip,0,Normal,3904,5436,82,1,[5436]
NCP-14.zip,2,NCP,384,1921,54,2,"[1920, 1921]"
CP-18.zip,1,CP,1780,3560,69,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-14.zip,1,CP,1522,4148,61,2,"[4148, 4149]"
NCP-8.zip,2,NCP,256,1658,139,2,"[1658, 1659]"
CP-10.zip,1,CP,1406,3874,60,2,"[3874, 3875]"
CP-4.zip,1,CP,1177,3395,210,1,[3395]
Normal-1.zip,0,Normal,1673,804,291,6,"[804, 805, 806, 807, 808, 809]"
NCP-2.zip,2,NCP,122,1385,149,2,"[1385, 1386]"
CP-9.zip,1,CP,1354,3752,46,3,"[3751, 3752, 3753]"
NCP-23.zip,2,NCP,922,2464,240,1,[2464]
CP-20.zip,1,CP,2668,3251,58,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-6.zip,0,Normal,1796,251,96,1,[251]
CP-9.zip,1,CP,1374,3803,50,2,"[3802, 3803]"
NCP-7.zip,2,NCP,237,1620,61,2,"[1619, 1620]"
NCP-13.zip,2,NCP,363,1878,58,2,"[1877, 1878]"
CP-1.zip,1,CP,1084,3130,67,1,[3130]
Normal-14.zip,0,Normal,2082,537,78,1,[537]
CP-18.zip,1,CP,1656,4344,26,1,[4344]
NCP-18.zip,2,NCP,491,2138,149,2,"[2138, 2139]"
CP-22.zip,1,CP,609,2971,76,1,[2971]
Normal-18.zip,0,Normal,2198,653,88,1,[653]
NCP-6.zip,2,NCP,212,1569,69,2,"[1568, 1569]"
CP-21.zip,1,CP,607,2969,178,1,[2969]
NCP-9.zip,2,NCP,269,1685,64,2,"[1684, 1685]"
CP-9.zip,1,CP,1364,3777,56,3,"[3776, 3777, 3778]"
CP-17.zip,1,CP,1622,4310,27,1,[4310]
CP-16.zip,1,CP,1601,4289,19,1,[4289]
CP-10.zip,1,CP,1388,3832,51,2,"[3831, 3832]"
Normal-27.zip,0,Normal,3908,5442,56,1,[5442]
CP-25.zip,1,CP,732,3094,159,1,[3094]
NCP-14.zip,2,NCP,40,1212,149,2,"[1212, 1213]"
NCP-21.zip,2,NCP,65,1264,54,2,"[1263, 1264]"
CP-12.zip,1,CP,1477,4036,54,2,"[4035, 4036]"
Normal-10.zip,0,Normal,1953,408,94,1,[408]
CP-15.zip,1,CP,1577,4265,22,1,[4265]
Normal-14.zip,0,Normal,2055,510,91,1,[510]
Normal-17.zip,0,Normal,2154,609,94,1,[609]
Normal-27.zip,0,Normal,3895,5418,61,4,"[5418, 5419, 5420, 5421]"
Normal-19.zip,0,Normal,2227,682,73,1,[682]
Normal-11.zip,0,Normal,1975,430,101,1,[430]
CP-15.zip,1,CP,1584,4272,20,1,[4272]
Normal-20.zip,0,Normal,2262,717,84,1,[717]
CP-14.zip,1,CP,1543,4200,190,3,"[4200, 4201, 4202]"
Normal-3.zip,0,Normal,753,188,300,1,[188]
CP-12.zip,1,CP,1475,4032,50,2,"[4031, 4032]"
NCP-16.zip,2,NCP,458,2071,55,2,"[2070, 2071]"
NCP-5.zip,2,NCP,180,1504,136,2,"[1504, 1505]"
CP-30.zip,1,CP,3938,5645,94,1,[5645]
CP-9.zip,1,CP,1364,3778,56,3,"[3776, 3777, 3778]"
Normal-23.zip,0,Normal,2632,142,39,1,[142]
Normal-5.zip,0,Normal,810,245,324,1,[245]
NCP-5.zip,2,NCP,174,1493,56,2,"[1492, 1493]"
CP-17.zip,1,CP,1632,4320,23,1,[4320]
NCP-2.zip,2,NCP,112,1366,56,2,"[1365, 1366]"
CP-18.zip,1,CP,1780,3554,67,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-20.zip,1,CP,2668,3252,51,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-6.zip,0,Normal,1820,275,83,1,[275]
Normal-1.zip,0,Normal,1673,809,57,6,"[804, 805, 806, 807, 808, 809]"
Normal-18.zip,0,Normal,2204,659,94,1,[659]
CP-14.zip,1,CP,1531,4169,59,2,"[4169, 4170]"
CP-12.zip,1,CP,1474,4030,62,2,"[4029, 4030]"
Normal-18.zip,0,Normal,2215,670,80,1,[670]
NCP-21.zip,2,NCP,579,2315,150,2,"[2315, 2316]"
NCP-28.zip,2,NCP,854,2374,265,1,[2374]
Normal-25.zip,0,Normal,3838,5350,201,1,[5350]
CP-9.zip,1,CP,1352,3747,61,1,[3747]
Normal-1.zip,0,Normal,1719,994,76,2,"[993, 994]"
NCP-28.zip,2,NCP,852,2372,47,2,"[2371, 2372]"
Normal-19.zip,0,Normal,2225,680,94,1,[680]
Normal-16.zip,0,Normal,2148,603,86,1,[603]
NCP-19.zip,2,NCP,544,2245,147,2,"[2245, 2246]"
CP-29.zip,1,CP,3826,5770,26,1,[5770]
NCP-7.zip,2,NCP,229,1602,156,2,"[1602, 1603]"
Normal-1.zip,0,Normal,1673,807,283,6,"[804, 805, 806, 807, 808, 809]"
Normal-6.zip,0,Normal,1823,278,85,1,[278]
NCP-27.zip,2,NCP,824,2335,259,1,[2335]
CP-18.zip,1,CP,1776,3535,64,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-18.zip,2,NCP,513,2183,68,2,"[2182, 2183]"
CP-30.zip,1,CP,3934,5639,77,3,"[5638, 5639, 5640]"
CP-4.zip,1,CP,1168,3386,203,1,[3386]
NCP-12.zip,2,NCP,323,1794,116,2,"[1794, 1795]"
CP-8.zip,1,CP,1340,3720,64,2,"[3720, 3721]"
CP-5.zip,1,CP,1223,3441,232,1,[3441]
NCP-4.zip,2,NCP,166,1477,58,2,"[1476, 1477]"
NCP-6.zip,2,NCP,219,1583,65,2,"[1582, 1583]"
NCP-1.zip,2,NCP,101,1340,57,2,"[1339, 1340]"
NCP-11.zip,2,NCP,298,1742,145,2,"[1742, 1743]"
Normal-1.zip,0,Normal,1684,874,71,5,"[870, 871, 873, 874, 875]"
CP-14.zip,1,CP,1554,4227,41,2,"[4226, 4227]"
NCP-18.zip,2,NCP,489,2134,139,2,"[2134, 2135]"
Normal-23.zip,0,Normal,2615,125,36,1,[125]
NCP-8.zip,2,NCP,2674,2693,45,1,[2693]
NCP-6.zip,2,NCP,226,1596,142,2,"[1596, 1597]"
NCP-10.zip,2,NCP,274,1695,67,2,"[1694, 1695]"
Normal-10.zip,0,Normal,1944,399,97,1,[399]
CP-6.zip,1,CP,1236,3454,159,1,[3454]
CP-20.zip,1,CP,2668,3257,53,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
CP-23.zip,1,CP,670,3032,78,1,[3032]
NCP-20.zip,2,NCP,548,2253,144,2,"[2253, 2254]"
CP-18.zip,1,CP,1769,3516,23,1,[3516]
Normal-3.zip,0,Normal,754,189,308,1,[189]
NCP-7.zip,2,NCP,239,1623,146,2,"[1623, 1624]"
NCP-14.zip,2,NCP,392,1935,58,2,"[1934, 1935]"
Normal-6.zip,0,Normal,1824,279,86,1,[279]
Normal-2.zip,0,Normal,1753,1087,77,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
NCP-30.zip,2,NCP,997,2554,49,2,"[2553, 2554]"
CP-26.zip,1,CP,3727,5663,42,1,[5663]
CP-11.zip,1,CP,1433,3934,62,2,"[3934, 3935]"
Normal-18.zip,0,Normal,2187,642,92,1,[642]
NCP-2.zip,2,NCP,112,1365,133,2,"[1365, 1366]"
NCP-6.zip,2,NCP,219,1582,156,2,"[1582, 1583]"
Normal-10.zip,0,Normal,1939,394,93,1,[394]
CP-18.zip,1,CP,1775,3532,57,4,"[3530, 3531, 3532, 3533]"
CP-2.zip,1,CP,11,3165,268,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
Normal-12.zip,0,Normal,2012,467,102,1,[467]
CP-21.zip,1,CP,587,2949,151,1,[2949]
Normal-15.zip,0,Normal,2116,571,92,1,[571]
CP-1.zip,1,CP,10,3156,289,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
Normal-27.zip,0,Normal,3895,5419,61,4,"[5418, 5419, 5420, 5421]"
Normal-25.zip,0,Normal,3854,5366,197,1,[5366]
Normal-4.zip,0,Normal,771,206,306,1,[206]
NCP-3.zip,2,NCP,129,1403,132,2,"[1403, 1404]"
Normal-13.zip,0,Normal,2042,497,90,1,[497]
Normal-2.zip,0,Normal,1753,1090,296,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
NCP-17.zip,2,NCP,478,2111,145,2,"[2111, 2112]"
Normal-17.zip,0,Normal,2171,626,92,1,[626]
CP-10.zip,1,CP,1410,3884,51,2,"[3883, 3884]"
CP-3.zip,1,CP,1140,3358,370,1,[3358]
NCP-22.zip,2,NCP,885,2422,52,2,"[2422, 2423]"
NCP-27.zip,2,NCP,1050,2624,428,2,"[2623, 2624]"
NCP-17.zip,2,NCP,478,2112,61,2,"[2111, 2112]"
CP-20.zip,1,CP,2668,3254,47,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-16.zip,2,NCP,433,2019,120,2,"[2019, 2020]"
NCP-19.zip,2,NCP,517,2191,58,2,"[2190, 2191]"
Normal-24.zip,0,Normal,2657,167,27,1,[167]
CP-8.zip,1,CP,1339,3718,59,2,"[3718, 3719]"
NCP-17.zip,2,NCP,482,2119,139,2,"[2119, 2120]"
CP-17.zip,1,CP,1635,4323,27,1,[4323]
Normal-10.zip,0,Normal,1930,385,98,1,[385]
Normal-1.zip,0,Normal,1679,837,70,6,"[833, 834, 835, 836, 837, 838]"
NCP-25.zip,2,NCP,3942,5539,37,1,[5539]
Normal-17.zip,0,Normal,2180,635,95,1,[635]
Normal-1.zip,0,Normal,1680,839,66,6,"[839, 840, 841, 842, 843, 844]"
Normal-1.zip,0,Normal,1705,965,69,2,"[965, 966]"
NCP-5.zip,2,NCP,174,1492,134,2,"[1492, 1493]"
NCP-14.zip,2,NCP,386,1923,62,1,[1923]
CP-22.zip,1,CP,625,2987,100,1,[2987]
CP-20.zip,1,CP,2450,2929,90,2,"[2928, 2929]"
Normal-10.zip,0,Normal,1949,404,92,1,[404]
CP-14.zip,1,CP,1546,4208,58,2,"[4208, 4209]"
NCP-21.zip,2,NCP,63,1260,58,2,"[1259, 1260]"
Normal-23.zip,0,Normal,2624,134,38,1,[134]
NCP-10.zip,2,NCP,272,1690,153,2,"[1690, 1691]"
CP-5.zip,1,CP,1209,3427,313,1,[3427]
NCP-11.zip,2,NCP,293,1731,122,2,"[1731, 1732]"
CP-9.zip,1,CP,1383,3822,71,2,"[3821, 3822]"
Normal-4.zip,0,Normal,793,228,94,1,[228]
NCP-2.zip,2,NCP,1057,2633,570,1,[2633]
Normal-1.zip,0,Normal,1679,835,67,6,"[833, 834, 835, 836, 837, 838]"
CP-4.zip,1,CP,1185,3403,131,1,[3403]
CP-11.zip,1,CP,1446,3965,63,2,"[3965, 3966]"
CP-15.zip,1,CP,1576,4264,23,1,[4264]
CP-12.zip,1,CP,1487,4062,68,3,"[4061, 4062, 4063]"
CP-9.zip,1,CP,1381,3817,66,3,"[3815, 3816, 3817]"
CP-28.zip,1,CP,3767,5711,17,1,[5711]
Normal-23.zip,0,Normal,2610,120,41,1,[120]
CP-10.zip,1,CP,1394,3848,62,2,"[3847, 3848]"
NCP-4.zip,2,NCP,160,1465,61,2,"[1464, 1465]"
CP-14.zip,1,CP,1543,4201,57,3,"[4200, 4201, 4202]"
CP-23.zip,1,CP,652,3014,277,1,[3014]
CP-16.zip,1,CP,1607,4295,17,1,[4295]
Normal-18.zip,0,Normal,2213,668,84,1,[668]
Normal-16.zip,0,Normal,2121,576,87,1,[576]
Normal-23.zip,0,Normal,2627,137,41,1,[137]
NCP-21.zip,2,NCP,582,2322,54,2,"[2321, 2322]"
CP-19.zip,1,CP,2431,2893,361,1,[2893]
Normal-1.zip,0,Normal,1717,989,67,2,"[989, 990]"
CP-10.zip,1,CP,1385,3825,64,2,"[3825, 3826]"
CP-5.zip,1,CP,1198,3416,162,1,[3416]
NCP-21.zip,2,NCP,578,2314,55,2,"[2313, 2314]"
NCP-20.zip,2,NCP,56,1246,68,2,"[1245, 1246]"
NCP-19.zip,2,NCP,532,2222,139,2,"[2222, 2223]"
Normal-21.zip,0,Normal,2283,738,87,1,[738]
Normal-19.zip,0,Normal,2222,677,78,1,[677]
CP-9.zip,1,CP,1361,3770,50,2,"[3770, 3771]"
NCP-15.zip,2,NCP,420,1993,177,2,"[1993, 1994]"
CP-18.zip,1,CP,1776,3538,76,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
Normal-1.zip,0,Normal,1706,968,64,2,"[967, 968]"
CP-20.zip,1,CP,2668,3253,51,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-5.zip,2,NCP,171,1486,143,2,"[1486, 1487]"
Normal-3.zip,0,Normal,750,185,281,1,[185]
CP-18.zip,1,CP,1780,3565,80,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-13.zip,2,NCP,362,1876,63,2,"[1875, 1876]"
CP-6.zip,1,CP,1234,3452,191,1,[3452]
Normal-1.zip,0,Normal,1684,873,133,5,"[870, 871, 873, 874, 875]"
Normal-6.zip,0,Normal,1812,267,99,1,[267]
NCP-17.zip,2,NCP,474,2103,114,2,"[2103, 2104]"
Normal-7.zip,0,Normal,1857,312,80,1,[312]
Normal-12.zip,0,Normal,1992,447,104,1,[447]
CP-18.zip,1,CP,1664,4352,20,1,[4352]
Normal-27.zip,0,Normal,3895,5420,71,4,"[5418, 5419, 5420, 5421]"
NCP-19.zip,2,NCP,517,2190,139,2,"[2190, 2191]"
Normal-23.zip,0,Normal,2625,135,39,1,[135]
Normal-5.zip,0,Normal,811,246,124,1,[246]
CP-4.zip,1,CP,1162,3380,212,1,[3380]
CP-22.zip,1,CP,611,2973,76,1,[2973]
CP-9.zip,1,CP,1381,3815,261,3,"[3815, 3816, 3817]"
CP-9.zip,1,CP,1371,3794,200,3,"[3794, 3795, 3796]"
NCP-16.zip,2,NCP,432,2017,128,2,"[2017, 2018]"
Normal-20.zip,0,Normal,2278,733,90,1,[733]
Normal-19.zip,0,Normal,2240,695,78,1,[695]
CP-28.zip,1,CP,3786,5730,29,1,[5730]
Normal-15.zip,0,Normal,2097,552,89,1,[552]
NCP-18.zip,2,NCP,500,2156,162,2,"[2156, 2157]"
CP-9.zip,1,CP,1374,3802,50,2,"[3802, 3803]"
Normal-23.zip,0,Normal,2606,116,33,1,[116]
CP-26.zip,1,CP,3651,5550,395,1,[5550]
Normal-9.zip,0,Normal,1912,367,92,1,[367]
NCP-25.zip,2,NCP,3953,5466,44,1,[5466]
CP-25.zip,1,CP,724,3086,100,1,[3086]
Normal-21.zip,0,Normal,2292,747,82,1,[747]
CP-7.zip,1,CP,1262,3480,384,1,[3480]
Normal-10.zip,0,Normal,1931,386,80,1,[386]
NCP-20.zip,2,NCP,563,2284,141,2,"[2284, 2285]"
CP-2.zip,1,CP,1123,3341,213,1,[3341]
NCP-17.zip,2,NCP,486,2127,153,2,"[2127, 2128]"
CP-26.zip,1,CP,3733,5673,32,3,"[5673, 5674, 5675]"
CP-3.zip,1,CP,1152,3370,69,1,[3370]
NCP-28.zip,2,NCP,838,2353,89,1,[2353]
Normal-1.zip,0,Normal,1717,990,67,2,"[989, 990]"
NCP-30.zip,2,NCP,997,2553,54,2,"[2553, 2554]"
NCP-17.zip,2,NCP,48,1230,61,2,"[1229, 1230]"
NCP-17.zip,2,NCP,467,2089,138,2,"[2089, 2090]"
NCP-20.zip,2,NCP,564,2286,143,2,"[2286, 2287]"
Normal-7.zip,0,Normal,1854,309,82,1,[309]
Normal-2.zip,0,Normal,1747,1065,60,1,[1065]
NCP-19.zip,2,NCP,535,2228,47,2,"[2227, 2228]"
NCP-26.zip,2,NCP,3974,5508,52,1,[5508]
Normal-7.zip,0,Normal,1829,284,92,1,[284]
Normal-1.zip,0,Normal,1673,808,57,6,"[804, 805, 806, 807, 808, 809]"
NCP-2.zip,2,NCP,1271,2712,56,1,[2712]
CP-30.zip,1,CP,3934,5638,59,3,"[5638, 5639, 5640]"
NCP-26.zip,2,NCP,3979,5486,52,1,[5486]
NCP-20.zip,2,NCP,554,2265,128,2,"[2265, 2266]"
NCP-6.zip,2,NCP,221,1587,53,2,"[1586, 1587]"
NCP-20.zip,2,NCP,558,2273,119,2,"[2273, 2274]"
CP-8.zip,1,CP,1321,3678,58,2,"[3678, 3679]"
NCP-6.zip,2,NCP,226,1597,60,2,"[1596, 1597]"
NCP-21.zip,2,NCP,76,1286,51,2,"[1285, 1286]"
NCP-1.zip,2,NCP,1042,2613,143,2,"[2613, 2614]"
NCP-13.zip,2,NCP,366,1884,67,2,"[1883, 1884]"
NCP-18.zip,2,NCP,490,2136,147,2,"[2136, 2137]"
NCP-28.zip,2,NCP,856,2376,227,2,"[2376, 2377]"
CP-19.zip,1,CP,2445,2920,283,2,"[2920, 2921]"
Normal-1.zip,0,Normal,1673,806,59,6,"[804, 805, 806, 807, 808, 809]"
CP-25.zip,1,CP,9,3151,72,4,"[3148, 3149, 3150, 3151]"
Normal-25.zip,0,Normal,3847,5359,219,1,[5359]
Normal-12.zip,0,Normal,2005,460,77,1,[460]
CP-30.zip,1,CP,3936,5642,59,1,[5642]
NCP-12.zip,2,NCP,326,1800,117,2,"[1800, 1801]"
Normal-13.zip,0,Normal,2045,500,85,1,[500]
CP-15.zip,1,CP,1583,4271,18,1,[4271]
Normal-20.zip,0,Normal,2261,716,83,1,[716]
Normal-20.zip,0,Normal,2276,731,91,1,[731]
CP-18.zip,1,CP,1776,3536,75,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-27.zip,2,NCP,1034,2605,19,1,[2605]
NCP-16.zip,2,NCP,445,2044,139,2,"[2044, 2045]"
CP-12.zip,1,CP,1461,4001,53,2,"[4000, 4001]"
CP-12.zip,1,CP,1485,4056,114,3,"[4056, 4057, 4058]"
NCP-7.zip,2,NCP,231,1606,139,2,"[1606, 1607]"
NCP-13.zip,2,NCP,343,1838,55,2,"[1837, 1838]"
NCP-6.zip,2,NCP,202,1548,161,2,"[1548, 1549]"
Normal-17.zip,0,Normal,2160,615,96,1,[615]
CP-28.zip,1,CP,3780,5724,27,1,[5724]
CP-9.zip,1,CP,1354,3753,46,3,"[3751, 3752, 3753]"
CP-16.zip,1,CP,1598,4286,23,1,[4286]
CP-19.zip,1,CP,2445,2921,119,2,"[2920, 2921]"
CP-9.zip,1,CP,1361,3771,50,2,"[3770, 3771]"
NCP-15.zip,2,NCP,412,1974,54,2,"[1973, 1974]"
Normal-8.zip,0,Normal,1861,316,76,1,[316]
Normal-3.zip,0,Normal,1766,1150,57,3,"[1149, 1150, 1151]"
Normal-17.zip,0,Normal,2182,637,96,1,[637]
Normal-7.zip,0,Normal,1833,288,102,1,[288]
Normal-9.zip,0,Normal,1894,349,99,1,[349]
Normal-22.zip,0,Normal,2319,774,101,1,[774]
Normal-1.zip,0,Normal,1680,844,64,6,"[839, 840, 841, 842, 843, 844]"
CP-24.zip,1,CP,679,3041,94,1,[3041]
CP-30.zip,1,CP,3832,5776,23,1,[5776]
CP-25.zip,1,CP,720,3082,84,1,[3082]
Normal-19.zip,0,Normal,2235,690,89,1,[690]
CP-11.zip,1,CP,1429,3927,52,2,"[3926, 3927]"
Normal-7.zip,0,Normal,1835,290,83,1,[290]
NCP-7.zip,2,NCP,239,1624,61,2,"[1623, 1624]"
Normal-27.zip,0,Normal,3899,5430,76,2,"[5429, 5430]"
CP-4.zip,1,CP,1165,3383,151,1,[3383]
NCP-3.zip,2,NCP,1297,2738,56,1,[2738]
NCP-22.zip,2,NCP,832,2345,25,1,[2345]
NCP-25.zip,2,NCP,3952,5505,46,1,[5505]
NCP-26.zip,2,NCP,3977,5509,56,1,[5509]
CP-16.zip,1,CP,1609,4297,20,1,[4297]
Normal-21.zip,0,Normal,2294,749,103,1,[749]
NCP-25.zip,2,NCP,3967,5507,46,1,[5507]
CP-13.zip,1,CP,1495,4089,48,4,"[4086, 4087, 4088, 4089]"
CP-7.zip,1,CP,1317,3672,58,3,"[3670, 3671, 3672]"
Normal-26.zip,0,Normal,3877,5389,25,1,[5389]
CP-20.zip,1,CP,2766,3297,41,1,[3297]
CP-18.zip,1,CP,1661,4349,32,1,[4349]
NCP-19.zip,2,NCP,535,2227,112,2,"[2227, 2228]"
CP-2.zip,1,CP,1120,3338,159,1,[3338]
NCP-2.zip,2,NCP,118,1377,142,2,"[1377, 1378]"
Normal-7.zip,0,Normal,1843,298,96,1,[298]
NCP-15.zip,2,NCP,400,1950,155,1,[1950]
NCP-25.zip,2,NCP,3704,5531,60,1,[5531]
Normal-15.zip,0,Normal,2095,550,99,1,[550]
Normal-1.zip,0,Normal,1684,870,68,5,"[870, 871, 873, 874, 875]"
NCP-16.zip,2,NCP,44,1222,52,2,"[1221, 1222]"
NCP-11.zip,2,NCP,31,1194,137,2,"[1194, 1195]"
NCP-15.zip,2,NCP,409,1968,64,2,"[1967, 1968]"
NCP-16.zip,2,NCP,451,2057,48,3,"[2056, 2057, 2058]"
Normal-2.zip,0,Normal,1753,1086,77,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
NCP-8.zip,2,NCP,262,1670,139,2,"[1670, 1671]"
Normal-10.zip,0,Normal,1955,410,93,1,[410]
Normal-6.zip,0,Normal,1826,281,104,1,[281]
NCP-28.zip,2,NCP,852,2371,47,2,"[2371, 2372]"
NCP-27.zip,2,NCP,1000,2558,39,1,[2558]
CP-1.zip,1,CP,1072,3115,52,1,[3115]
Normal-13.zip,0,Normal,2052,507,71,1,[507]
CP-7.zip,1,CP,1314,3663,30,2,"[3663, 3664]"
NCP-21.zip,2,NCP,67,1267,70,2,"[1266, 1267]"
NCP-3.zip,2,NCP,132,1409,117,1,[1409]
Normal-18.zip,0,Normal,2205,660,91,1,[660]
Normal-14.zip,0,Normal,2054,509,88,1,[509]
Normal-5.zip,0,Normal,809,244,114,1,[244]
NCP-27.zip,2,NCP,1029,2599,39,1,[2599]
NCP-26.zip,2,NCP,3972,5481,58,1,[5481]
Normal-13.zip,0,Normal,2026,481,85,1,[481]
NCP-17.zip,2,NCP,47,1227,139,2,"[1227, 1228]"
CP-27.zip,1,CP,3763,5707,20,1,[5707]
Normal-6.zip,0,Normal,1798,253,93,1,[253]
NCP-9.zip,2,NCP,2703,2669,41,1,[2669]
CP-1.zip,1,CP,1071,3113,57,2,"[3113, 3114]"
NCP-16.zip,2,NCP,430,2014,64,2,"[2013, 2014]"
NCP-4.zip,2,NCP,144,1432,139,2,"[1432, 1433]"
Normal-4.zip,0,Normal,780,215,116,1,[215]
Normal-12.zip,0,Normal,2020,475,88,1,[475]
NCP-13.zip,2,NCP,366,1883,161,2,"[1883, 1884]"
Normal-2.zip,0,Normal,1761,1127,18,5,"[1125, 1126, 1127, 1128, 1129]"
NCP-29.zip,2,NCP,899,2441,42,2,"[2440, 2441]"
CP-16.zip,1,CP,1612,4300,26,1,[4300]
NCP-15.zip,2,NCP,412,1973,129,2,"[1973, 1974]"
NCP-10.zip,2,NCP,2717,2710,42,1,[2710]
CP-19.zip,1,CP,1792,3214,71,2,"[3214, 3215]"
Normal-20.zip,0,Normal,2269,724,113,1,[724]
CP-11.zip,1,CP,1451,3976,51,2,"[3975, 3976]"
Normal-11.zip,0,Normal,1978,433,94,1,[433]
NCP-3.zip,2,NCP,1282,2723,70,1,[2723]
CP-23.zip,1,CP,654,3016,74,1,[3016]
NCP-13.zip,2,NCP,345,1842,62,2,"[1841, 1842]"
CP-22.zip,1,CP,610,2972,70,1,[2972]
CP-29.zip,1,CP,3799,5743,23,1,[5743]
NCP-18.zip,2,NCP,506,2168,124,2,"[2168, 2169]"
Normal-19.zip,0,Normal,2218,673,84,1,[673]
NCP-7.zip,2,NCP,243,1632,31,3,"[1631, 1632, 1633]"
NCP-25.zip,2,NCP,3948,5504,50,1,[5504]
CP-7.zip,1,CP,1312,3658,65,2,"[3658, 3659]"
NCP-16.zip,2,NCP,451,2058,23,3,"[2056, 2057, 2058]"
CP-12.zip,1,CP,1461,4000,53,2,"[4000, 4001]"
CP-1.zip,1,CP,10,3154,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-10.zip,1,CP,1388,3831,51,2,"[3831, 3832]"
Normal-1.zip,0,Normal,1702,957,69,2,"[957, 958]"
Normal-17.zip,0,Normal,2181,636,100,1,[636]
NCP-19.zip,2,NCP,521,2198,139,2,"[2198, 2199]"
Normal-9.zip,0,Normal,1922,377,87,1,[377]
Normal-8.zip,0,Normal,1872,327,86,1,[327]
CP-9.zip,1,CP,1369,3791,67,2,"[3790, 3791]"
CP-29.zip,1,CP,3815,5759,23,1,[5759]
NCP-2.zip,2,NCP,118,1378,60,2,"[1377, 1378]"
CP-19.zip,1,CP,1793,3216,69,1,[3216]
NCP-5.zip,2,NCP,178,1501,52,2,"[1500, 1501]"
CP-13.zip,1,CP,1495,4087,50,4,"[4086, 4087, 4088, 4089]"
CP-18.zip,1,CP,1780,3566,41,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-8.zip,1,CP,1323,3682,62,2,"[3682, 3683]"
CP-20.zip,1,CP,2754,3285,30,1,[3285]
Normal-26.zip,0,Normal,3865,5377,24,1,[5377]
Normal-23.zip,0,Normal,2614,124,37,1,[124]
CP-12.zip,1,CP,1465,4009,67,2,"[4009, 4010]"
CP-14.zip,1,CP,1537,4183,53,3,"[4182, 4183, 4184]"
Normal-1.zip,0,Normal,1719,993,76,2,"[993, 994]"
NCP-3.zip,2,NCP,128,1401,122,2,"[1401, 1402]"
CP-28.zip,1,CP,3778,5722,25,1,[5722]
NCP-1.zip,2,NCP,1018,2584,252,1,[2584]
NCP-9.zip,2,NCP,27,1187,33,2,"[1186, 1187]"
CP-13.zip,1,CP,1494,4084,65,3,"[4083, 4084, 4085]"
NCP-13.zip,2,NCP,344,1839,152,2,"[1839, 1840]"
CP-21.zip,1,CP,604,2966,134,1,[2966]
NCP-1.zip,2,NCP,1037,2608,32,1,[2608]
CP-12.zip,1,CP,1485,4057,49,3,"[4056, 4057, 4058]"
NCP-16.zip,2,NCP,45,1223,152,2,"[1223, 1224]"
Normal-14.zip,0,Normal,2058,513,95,1,[513]
NCP-12.zip,2,NCP,323,1795,49,2,"[1794, 1795]"
NCP-26.zip,2,NCP,3999,5496,52,1,[5496]
Normal-15.zip,0,Normal,2107,562,92,1,[562]
CP-12.zip,1,CP,1478,4038,53,2,"[4037, 4038]"
Normal-15.zip,0,Normal,2099,554,85,1,[554]
NCP-21.zip,2,NCP,64,1261,132,2,"[1261, 1262]"
CP-9.zip,1,CP,1384,3824,66,2,"[3823, 3824]"
NCP-18.zip,2,NCP,511,2178,132,2,"[2178, 2179]"
CP-6.zip,1,CP,1227,3445,307,1,[3445]
Normal-23.zip,0,Normal,2633,143,40,1,[143]
NCP-10.zip,2,NCP,2722,2678,53,1,[2678]
NCP-15.zip,2,NCP,427,2008,56,2,"[2007, 2008]"
NCP-23.zip,2,NCP,94,1324,153,2,"[1324, 1325]"
CP-19.zip,1,CP,2446,2922,690,1,[2922]
CP-26.zip,1,CP,3728,5664,229,1,[5664]
CP-20.zip,1,CP,2668,3249,45,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-27.zip,0,Normal,3899,5429,75,2,"[5429, 5430]"
Normal-9.zip,0,Normal,1902,357,93,1,[357]
NCP-9.zip,2,NCP,27,1186,75,2,"[1186, 1187]"
NCP-18.zip,2,NCP,508,2172,145,2,"[2172, 2173]"
Normal-8.zip,0,Normal,1862,317,91,1,[317]
NCP-3.zip,2,NCP,128,1402,52,2,"[1401, 1402]"
NCP-8.zip,2,NCP,257,1660,152,2,"[1660, 1661]"
NCP-30.zip,2,NCP,973,2516,57,1,[2516]
CP-9.zip,1,CP,1357,3759,61,3,"[3758, 3759, 3760]"
Normal-26.zip,0,Normal,3864,5376,178,1,[5376]
CP-25.zip,1,CP,727,3089,104,1,[3089]
NCP-8.zip,2,NCP,259,1664,155,2,"[1664, 1665]"
CP-10.zip,1,CP,1390,3838,56,3,"[3836, 3837, 3838]"
Normal-21.zip,0,Normal,2295,750,79,1,[750]
NCP-18.zip,2,NCP,49,1231,146,2,"[1231, 1232]"
CP-10.zip,1,CP,1391,3840,59,4,"[3839, 3840, 3841, 3842]"
NCP-17.zip,2,NCP,48,1229,145,2,"[1229, 1230]"
NCP-21.zip,2,NCP,73,1278,130,3,"[1278, 1279, 1280]"
NCP-11.zip,2,NCP,296,1738,58,2,"[1737, 1738]"
NCP-3.zip,2,NCP,129,1404,56,2,"[1403, 1404]"
NCP-12.zip,2,NCP,330,1808,153,2,"[1808, 1809]"
CP-14.zip,1,CP,1529,4165,100,3,"[4165, 4166, 4167]"
CP-4.zip,1,CP,1187,3405,325,1,[3405]
NCP-11.zip,2,NCP,307,1761,136,2,"[1761, 1762]"
CP-26.zip,1,CP,3725,5661,258,2,"[5660, 5661]"
Normal-10.zip,0,Normal,1950,405,102,1,[405]
CP-15.zip,1,CP,1563,4247,61,3,"[4245, 4246, 4247]"
NCP-4.zip,2,NCP,144,1433,58,2,"[1432, 1433]"
NCP-28.zip,2,NCP,855,2375,39,1,[2375]
Normal-1.zip,0,Normal,1726,1008,69,2,"[1007, 1008]"
CP-22.zip,1,CP,629,2991,304,1,[2991]
NCP-4.zip,2,NCP,142,1428,141,2,"[1428, 1429]"
CP-21.zip,1,CP,592,2954,104,1,[2954]
CP-1.zip,1,CP,10,3159,293,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-9.zip,1,CP,1357,3760,61,3,"[3758, 3759, 3760]"
Normal-24.zip,0,Normal,2648,158,32,1,[158]
NCP-9.zip,2,NCP,269,1684,153,2,"[1684, 1685]"
Normal-15.zip,0,Normal,2108,563,101,1,[563]
CP-25.zip,1,CP,9,3148,290,4,"[3148, 3149, 3150, 3151]"
NCP-13.zip,2,NCP,364,1879,132,2,"[1879, 1880]"
Normal-23.zip,0,Normal,2605,115,35,1,[115]
NCP-10.zip,2,NCP,282,1711,51,2,"[1710, 1711]"
CP-14.zip,1,CP,1546,4209,58,2,"[4208, 4209]"
NCP-29.zip,2,NCP,925,2467,22,1,[2467]
Normal-21.zip,0,Normal,2296,751,102,1,[751]
CP-2.zip,1,CP,1114,3332,361,1,[3332]
NCP-5.zip,2,NCP,19,1171,61,2,"[1170, 1171]"
NCP-13.zip,2,NCP,363,1877,139,2,"[1877, 1878]"
CP-12.zip,1,CP,1475,4031,50,2,"[4031, 4032]"
NCP-14.zip,2,NCP,399,1949,62,2,"[1948, 1949]"
CP-17.zip,1,CP,1626,4314,26,1,[4314]
CP-18.zip,1,CP,1780,3556,60,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
Normal-19.zip,0,Normal,2236,691,83,1,[691]
CP-15.zip,1,CP,1572,4260,19,1,[4260]
CP-6.zip,1,CP,1240,3458,137,1,[3458]
NCP-21.zip,2,NCP,76,1285,121,2,"[1285, 1286]"
CP-22.zip,1,CP,623,2985,463,1,[2985]
CP-27.zip,1,CP,3760,5704,23,1,[5704]
CP-23.zip,1,CP,672,3034,86,1,[3034]
NCP-1.zip,2,NCP,1026,2596,21,1,[2596]
CP-22.zip,1,CP,635,2997,106,1,[2997]
NCP-14.zip,2,NCP,375,1901,115,3,"[1901, 1902, 1903]"
NCP-11.zip,2,NCP,304,1754,161,2,"[1754, 1755]"
NCP-15.zip,2,NCP,408,1965,131,2,"[1965, 1966]"
NCP-9.zip,2,NCP,2702,2668,41,1,[2668]
CP-11.zip,1,CP,1452,3978,56,2,"[3977, 3978]"
NCP-29.zip,2,NCP,891,2430,22,1,[2430]
NCP-16.zip,2,NCP,458,2070,131,2,"[2070, 2071]"
Normal-2.zip,0,Normal,1753,1092,60,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
Normal-1.zip,0,Normal,1702,958,69,2,"[957, 958]"
Normal-2.zip,0,Normal,1761,1126,45,5,"[1125, 1126, 1127, 1128, 1129]"
CP-12.zip,1,CP,1487,4063,68,3,"[4061, 4062, 4063]"
NCP-25.zip,2,NCP,3958,5471,38,1,[5471]
CP-15.zip,1,CP,1556,4231,40,2,"[4230, 4231]"
NCP-16.zip,2,NCP,431,2015,160,2,"[2015, 2016]"
Normal-2.zip,0,Normal,1745,1060,298,3,"[1060, 1061, 1062]"
NCP-23.zip,2,NCP,906,2448,55,1,[2448]
CP-2.zip,1,CP,11,3163,265,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-17.zip,2,NCP,487,2130,70,2,"[2129, 2130]"
CP-16.zip,1,CP,1600,4288,19,1,[4288]
NCP-21.zip,2,NCP,580,2317,139,2,"[2317, 2318]"
Normal-1.zip,0,Normal,1673,805,59,6,"[804, 805, 806, 807, 808, 809]"
CP-29.zip,1,CP,3801,5745,26,1,[5745]
Normal-1.zip,0,Normal,1726,1007,69,2,"[1007, 1008]"
NCP-29.zip,2,NCP,893,2432,25,2,"[2432, 2433]"
CP-3.zip,1,CP,1143,3361,177,1,[3361]
CP-8.zip,1,CP,1343,3726,56,2,"[3726, 3727]"
NCP-2.zip,2,NCP,115,1371,118,2,"[1371, 1372]"
NCP-11.zip,2,NCP,31,1195,57,2,"[1194, 1195]"
CP-1.zip,1,CP,1071,3114,57,2,"[3113, 3114]"
NCP-23.zip,2,NCP,951,2494,38,1,[2494]
Normal-1.zip,0,Normal,1706,967,64,2,"[967, 968]"
NCP-8.zip,2,NCP,262,1671,58,2,"[1670, 1671]"
Normal-10.zip,0,Normal,1943,398,94,1,[398]
NCP-8.zip,2,NCP,257,1661,64,2,"[1660, 1661]"
Normal-24.zip,0,Normal,2644,154,39,1,[154]
NCP-15.zip,2,NCP,407,1964,52,2,"[1963, 1964]"
Normal-26.zip,0,Normal,3883,5395,61,1,[5395]
NCP-9.zip,2,NCP,2685,2698,52,1,[2698]
NCP-30.zip,2,NCP,992,2545,213,1,[2545]
CP-21.zip,1,CP,596,2958,255,1,[2958]
CP-7.zip,1,CP,1314,3664,30,2,"[3663, 3664]"
NCP-16.zip,2,NCP,432,2018,54,2,"[2017, 2018]"
NCP-14.zip,2,NCP,371,1894,59,2,"[1893, 1894]"
NCP-7.zip,2,NCP,2482,2685,45,1,[2685]
Normal-1.zip,0,Normal,1679,834,66,6,"[833, 834, 835, 836, 837, 838]"
CP-29.zip,1,CP,3824,5768,23,1,[5768]
Normal-2.zip,0,Normal,1753,1089,66,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
Normal-7.zip,0,Normal,1859,314,85,1,[314]
NCP-21.zip,2,NCP,578,2313,130,2,"[2313, 2314]"
CP-10.zip,1,CP,1402,3866,55,3,"[3865, 3866, 3867]"
Normal-4.zip,0,Normal,791,226,138,1,[226]
Normal-13.zip,0,Normal,2039,494,101,1,[494]
Normal-15.zip,0,Normal,2115,570,94,1,[570]
CP-12.zip,1,CP,1470,4021,54,2,"[4020, 4021]"
CP-24.zip,1,CP,695,3057,201,1,[3057]
Normal-12.zip,0,Normal,1994,449,95,1,[449]
Normal-5.zip,0,Normal,804,239,325,1,[239]
CP-17.zip,1,CP,1623,4311,23,1,[4311]
Normal-18.zip,0,Normal,2208,663,95,1,[663]
NCP-19.zip,2,NCP,526,2209,58,2,"[2208, 2209]"
NCP-16.zip,2,NCP,45,1224,64,2,"[1223, 1224]"
Normal-1.zip,0,Normal,1679,838,70,6,"[833, 834, 835, 836, 837, 838]"
CP-2.zip,1,CP,11,3161,244,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
Normal-19.zip,0,Normal,2239,694,89,1,[694]
NCP-7.zip,2,NCP,243,1631,145,3,"[1631, 1632, 1633]"
NCP-7.zip,2,NCP,243,1633,61,3,"[1631, 1632, 1633]"
CP-18.zip,1,CP,1780,3561,63,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-11.zip,1,CP,1429,3926,52,2,"[3926, 3927]"
NCP-7.zip,2,NCP,237,1619,146,2,"[1619, 1620]"
CP-7.zip,1,CP,1319,3674,61,2,"[3674, 3675]"
NCP-28.zip,2,NCP,829,2342,36,1,[2342]
Normal-18.zip,0,Normal,2186,641,84,1,[641]
Normal-16.zip,0,Normal,2127,582,84,1,[582]
CP-5.zip,1,CP,1197,3415,191,1,[3415]
CP-10.zip,1,CP,1414,3893,63,3,"[3891, 3892, 3893]"
NCP-14.zip,2,NCP,384,1920,127,2,"[1920, 1921]"
CP-7.zip,1,CP,1317,3671,116,3,"[3670, 3671, 3672]"
NCP-22.zip,2,NCP,81,1295,125,2,"[1295, 1296]"
CP-3.zip,1,CP,1156,3374,173,1,[3374]
Normal-2.zip,0,Normal,1761,1129,60,5,"[1125, 1126, 1127, 1128, 1129]"
NCP-8.zip,2,NCP,252,1651,58,2,"[1650, 1651]"
NCP-25.zip,2,NCP,3959,5472,44,1,[5472]
Normal-11.zip,0,Normal,1988,443,90,1,[443]
CP-30.zip,1,CP,3833,5777,23,1,[5777]
NCP-26.zip,2,NCP,3985,5491,50,1,[5491]
CP-20.zip,1,CP,2668,3255,28,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
Normal-14.zip,0,Normal,2077,532,92,1,[532]
Normal-14.zip,0,Normal,2059,514,95,1,[514]
CP-29.zip,1,CP,3829,5773,26,1,[5773]
NCP-15.zip,2,NCP,402,1954,62,2,"[1953, 1954]"
CP-29.zip,1,CP,3800,5744,29,1,[5744]
CP-9.zip,1,CP,1383,3821,71,2,"[3821, 3822]"
NCP-6.zip,2,NCP,225,1594,135,2,"[1594, 1595]"
CP-27.zip,1,CP,3759,5703,23,1,[5703]
CP-4.zip,1,CP,1190,3408,173,1,[3408]
NCP-29.zip,2,NCP,889,2427,38,2,"[2427, 2428]"
NCP-14.zip,2,NCP,375,1902,40,3,"[1901, 1902, 1903]"
Normal-19.zip,0,Normal,2238,693,91,1,[693]
NCP-2.zip,2,NCP,1273,2714,56,1,[2714]
NCP-18.zip,2,NCP,497,2151,53,2,"[2150, 2151]"
CP-25.zip,1,CP,715,3077,609,1,[3077]
CP-7.zip,1,CP,1264,3482,126,1,[3482]
CP-1.zip,1,CP,10,3157,46,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
Normal-20.zip,0,Normal,2266,721,94,1,[721]
CP-11.zip,1,CP,1433,3935,62,2,"[3934, 3935]"
NCP-18.zip,2,NCP,511,2179,56,2,"[2178, 2179]"
CP-3.zip,1,CP,1138,3356,158,1,[3356]
Normal-20.zip,0,Normal,2249,704,66,1,[704]
Normal-6.zip,0,Normal,1809,264,94,1,[264]
CP-14.zip,1,CP,1547,4210,142,3,"[4210, 4211, 4212]"
CP-21.zip,1,CP,586,2948,174,1,[2948]
CP-23.zip,1,CP,650,3012,102,1,[3012]
CP-14.zip,1,CP,1522,4149,61,2,"[4148, 4149]"
NCP-8.zip,2,NCP,250,1646,144,2,"[1646, 1647]"
Normal-26.zip,0,Normal,3884,5397,298,2,"[5396, 5397]"
CP-28.zip,1,CP,3773,5717,20,1,[5717]
Normal-21.zip,0,Normal,2309,764,88,1,[764]
NCP-12.zip,2,NCP,326,1801,50,2,"[1800, 1801]"
Normal-1.zip,0,Normal,1729,1017,74,2,"[1017, 1018]"
Normal-1.zip,0,Normal,1684,871,68,5,"[870, 871, 873, 874, 875]"
CP-15.zip,1,CP,1567,4254,118,2,"[4254, 4255]"
NCP-4.zip,2,NCP,163,1470,154,2,"[1470, 1471]"
Normal-1.zip,0,Normal,1705,966,69,2,"[965, 966]"
CP-11.zip,1,CP,1446,3966,63,2,"[3965, 3966]"
NCP-6.zip,2,NCP,225,1595,57,2,"[1594, 1595]"
NCP-11.zip,2,NCP,293,1732,52,2,"[1731, 1732]"
NCP-28.zip,2,NCP,839,2354,209,1,[2354]
NCP-18.zip,2,NCP,513,2182,163,2,"[2182, 2183]"
Normal-8.zip,0,Normal,1889,344,87,1,[344]
CP-2.zip,1,CP,1112,3330,154,1,[3330]
Normal-26.zip,0,Normal,3874,5386,28,1,[5386]
CP-29.zip,1,CP,3813,5757,21,1,[5757]
CP-7.zip,1,CP,1317,3670,229,3,"[3670, 3671, 3672]"
NCP-20.zip,2,NCP,553,2264,58,2,"[2263, 2264]"
CP-29.zip,1,CP,3820,5764,31,1,[5764]
NCP-17.zip,2,NCP,482,2120,58,2,"[2119, 2120]"
NCP-7.zip,2,NCP,233,1610,86,2,"[1610, 1612]"
NCP-18.zip,2,NCP,500,2157,68,2,"[2156, 2157]"
Normal-4.zip,0,Normal,799,234,118,1,[234]
NCP-23.zip,2,NCP,94,1325,64,2,"[1324, 1325]"
CP-18.zip,1,CP,1780,3563,60,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-23.zip,2,NCP,902,2444,45,1,[2444]
CP-2.zip,1,CP,11,3162,260,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-3.zip,2,NCP,135,1415,58,2,"[1414, 1415]"
CP-8.zip,1,CP,1350,3745,55,1,[3745]
Normal-14.zip,0,Normal,2065,520,81,1,[520]
NCP-5.zip,2,NCP,188,1521,57,2,"[1520, 1521]"
Normal-2.zip,0,Normal,1745,1061,60,3,"[1060, 1061, 1062]"
NCP-15.zip,2,NCP,424,2002,64,2,"[2001, 2002]"
Normal-4.zip,0,Normal,790,225,126,1,[225]
NCP-4.zip,2,NCP,142,1429,59,2,"[1428, 1429]"
CP-7.zip,1,CP,1310,3653,51,2,"[3653, 3654]"
CP-14.zip,1,CP,1537,4182,53,3,"[4182, 4183, 4184]"
CP-17.zip,1,CP,1625,4313,26,1,[4313]
Normal-1.zip,0,Normal,1680,843,64,6,"[839, 840, 841, 842, 843, 844]"
NCP-11.zip,2,NCP,311,1769,134,2,"[1769, 1770]"
CP-1.zip,1,CP,1075,3118,553,2,"[3118, 3119]"
Normal-4.zip,0,Normal,770,205,116,1,[205]
CP-7.zip,1,CP,1311,3655,160,3,"[3655, 3656, 3657]"
Normal-1.zip,0,Normal,1724,1005,55,1,[1005]
NCP-20.zip,2,NCP,563,2285,59,2,"[2284, 2285]"
NCP-4.zip,2,NCP,163,1471,65,2,"[1470, 1471]"
Normal-15.zip,0,Normal,2114,569,101,1,[569]
Normal-12.zip,0,Normal,2016,471,89,1,[471]
CP-23.zip,1,CP,657,3019,343,1,[3019]
Normal-1.zip,0,Normal,1729,1018,74,2,"[1017, 1018]"
CP-18.zip,1,CP,1780,3558,73,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-5.zip,2,NCP,183,1511,52,2,"[1510, 1511]"
CP-1.zip,1,CP,1074,3117,61,1,[3117]
Normal-8.zip,0,Normal,1870,325,88,1,[325]
CP-6.zip,1,CP,1254,3472,125,1,[3472]
CP-21.zip,1,CP,2775,3306,43,1,[3306]
CP-16.zip,1,CP,1587,4275,20,1,[4275]
NCP-26.zip,2,NCP,3984,5490,54,1,[5490]
CP-27.zip,1,CP,3747,5691,20,1,[5691]
CP-13.zip,1,CP,1495,4088,48,4,"[4086, 4087, 4088, 4089]"
CP-9.zip,1,CP,1384,3823,66,2,"[3823, 3824]"
NCP-1.zip,2,NCP,100,1338,58,2,"[1337, 1338]"
NCP-27.zip,2,NCP,1025,2595,252,1,[2595]
NCP-18.zip,2,NCP,510,2177,43,2,"[2176, 2177]"
NCP-11.zip,2,NCP,298,1743,61,2,"[1742, 1743]"
Normal-17.zip,0,Normal,2174,629,88,1,[629]
CP-23.zip,1,CP,677,3039,309,1,[3039]
Normal-21.zip,0,Normal,2284,739,80,1,[739]
Normal-18.zip,0,Normal,2193,648,85,1,[648]
NCP-27.zip,2,NCP,1015,2579,39,1,[2579]
NCP-6.zip,2,NCP,214,1572,144,2,"[1572, 1573]"
CP-6.zip,1,CP,1248,3466,141,1,[3466]
Normal-27.zip,0,Normal,3901,5433,66,1,[5433]
CP-13.zip,1,CP,1519,4142,68,2,"[4141, 4142]"
NCP-14.zip,2,NCP,385,1922,64,1,[1922]
CP-7.zip,1,CP,1311,3657,67,3,"[3655, 3656, 3657]"
CP-14.zip,1,CP,1547,4212,58,3,"[4210, 4211, 4212]"
CP-4.zip,1,CP,1186,3404,204,1,[3404]
NCP-4.zip,2,NCP,165,1474,131,2,"[1474, 1475]"
CP-1.zip,1,CP,10,3160,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-3.zip,1,CP,1157,3375,204,1,[3375]
NCP-11.zip,2,NCP,307,1762,57,2,"[1761, 1762]"
CP-11.zip,1,CP,1441,3952,53,3,"[3951, 3952, 3953]"
NCP-21.zip,2,NCP,63,1259,139,2,"[1259, 1260]"
Normal-6.zip,0,Normal,1806,261,100,1,[261]
CP-17.zip,1,CP,1627,4315,26,1,[4315]
Normal-14.zip,0,Normal,2064,519,91,1,[519]
NCP-5.zip,2,NCP,180,1505,57,2,"[1504, 1505]"
Normal-16.zip,0,Normal,2134,589,72,1,[589]
Normal-14.zip,0,Normal,2063,518,99,1,[518]
CP-11.zip,1,CP,1451,3975,51,2,"[3975, 3976]"
Normal-24.zip,0,Normal,2647,157,34,1,[157]
NCP-21.zip,2,NCP,66,1265,58,1,[1265]
Normal-25.zip,0,Normal,3843,5355,180,1,[5355]
CP-25.zip,1,CP,729,3091,106,1,[3091]
CP-20.zip,1,CP,2668,3256,53,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-6.zip,2,NCP,200,1544,123,2,"[1544, 1545]"
Normal-1.zip,0,Normal,1685,879,65,4,"[877, 878, 879, 880]"
NCP-24.zip,2,NCP,972,2515,120,1,[2515]
CP-14.zip,1,CP,1547,4211,58,3,"[4210, 4211, 4212]"
CP-18.zip,1,CP,1775,3530,58,4,"[3530, 3531, 3532, 3533]"
CP-11.zip,1,CP,1427,3921,43,2,"[3921, 3922]"
CP-18.zip,1,CP,1776,3534,64,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-13.zip,2,NCP,368,1888,54,2,"[1887, 1888]"
CP-23.zip,1,CP,644,3006,134,1,[3006]
CP-7.zip,1,CP,1312,3659,65,2,"[3658, 3659]"
NCP-4.zip,2,NCP,139,1422,132,2,"[1422, 1423]"
NCP-15.zip,2,NCP,422,1998,63,2,"[1997, 1998]"
CP-10.zip,1,CP,1391,3842,59,4,"[3839, 3840, 3841, 3842]"
CP-11.zip,1,CP,1441,3953,53,3,"[3951, 3952, 3953]"
NCP-4.zip,2,NCP,154,1452,110,2,"[1452, 1453]"
NCP-6.zip,2,NCP,202,1549,67,2,"[1548, 1549]"
CP-11.zip,1,CP,1436,3941,45,2,"[3940, 3941]"
NCP-16.zip,2,NCP,431,2016,67,2,"[2015, 2016]"
Normal-26.zip,0,Normal,3870,5382,30,1,[5382]
Normal-17.zip,0,Normal,2159,614,89,1,[614]
CP-11.zip,1,CP,1427,3922,43,2,"[3921, 3922]"
CP-6.zip,1,CP,1228,3446,307,1,[3446]
NCP-15.zip,2,NCP,422,1997,156,2,"[1997, 1998]"
Normal-1.zip,0,Normal,1679,836,67,6,"[833, 834, 835, 836, 837, 838]"
CP-16.zip,1,CP,1604,4292,22,1,[4292]
CP-4.zip,1,CP,1179,3397,153,1,[3397]
NCP-6.zip,2,NCP,221,1586,125,2,"[1586, 1587]"
CP-18.zip,1,CP,1780,3564,41,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-4.zip,2,NCP,139,1423,56,2,"[1422, 1423]"
Normal-1.zip,0,Normal,1685,880,65,4,"[877, 878, 879, 880]"
CP-18.zip,1,CP,1780,3557,73,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-7.zip,1,CP,1269,3487,172,1,[3487]
Normal-1.zip,0,Normal,1680,841,69,6,"[839, 840, 841, 842, 843, 844]"
CP-13.zip,1,CP,1491,4074,113,3,"[4074, 4075, 4076]"
NCP-13.zip,2,NCP,344,1840,63,2,"[1839, 1840]"
NCP-17.zip,2,NCP,476,2108,53,2,"[2107, 2108]"
Normal-12.zip,0,Normal,1997,452,104,1,[452]
Normal-2.zip,0,Normal,1745,1062,60,3,"[1060, 1061, 1062]"
Normal-19.zip,0,Normal,2224,679,82,1,[679]
CP-2.zip,1,CP,1101,3319,187,1,[3319]
Normal-26.zip,0,Normal,3873,5385,25,1,[5385]
CP-15.zip,1,CP,1578,4266,22,1,[4266]
Normal-22.zip,0,Normal,2591,101,37,1,[101]
Normal-11.zip,0,Normal,1966,421,90,1,[421]
NCP-17.zip,2,NCP,480,2115,139,2,"[2115, 2116]"
CP-19.zip,1,CP,2,3503,34,1,[3503]
CP-16.zip,1,CP,1616,4304,29,1,[4304]
CP-10.zip,1,CP,1410,3883,51,2,"[3883, 3884]"
CP-24.zip,1,CP,701,3063,66,1,[3063]
NCP-6.zip,2,NCP,200,1545,52,2,"[1544, 1545]"
CP-1.zip,1,CP,10,3155,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
NCP-4.zip,2,NCP,160,1464,146,2,"[1464, 1465]"
Normal-8.zip,0,Normal,1890,345,99,1,[345]
NCP-9.zip,2,NCP,2694,2660,39,1,[2660]
CP-30.zip,1,CP,3930,5628,62,2,"[5628, 5629]"
CP-25.zip,1,CP,9,3149,290,4,"[3148, 3149, 3150, 3151]"
Normal-13.zip,0,Normal,2022,477,92,1,[477]
Normal-1.zip,0,Normal,1680,842,69,6,"[839, 840, 841, 842, 843, 844]"
NCP-7.zip,2,NCP,229,1603,65,2,"[1602, 1603]"
Normal-1.zip,0,Normal,1712,979,70,1,[979]
Normal-12.zip,0,Normal,2002,457,96,1,[457]
CP-6.zip,1,CP,1233,3451,150,1,[3451]
NCP-18.zip,2,NCP,489,2135,58,2,"[2134, 2135]"
CP-7.zip,1,CP,1310,3654,51,2,"[3653, 3654]"
CP-22.zip,1,CP,636,2998,102,1,[2998]
NCP-21.zip,2,NCP,70,1273,51,2,"[1272, 1273]"
Normal-23.zip,0,Normal,2603,113,41,1,[113]
CP-8.zip,1,CP,1323,3683,62,2,"[3682, 3683]"
Normal-20.zip,0,Normal,2274,729,85,1,[729]
NCP-29.zip,2,NCP,889,2428,121,2,"[2427, 2428]"
NCP-1.zip,2,NCP,1040,2611,113,1,[2611]
Normal-21.zip,0,Normal,2298,753,80,1,[753]
CP-19.zip,1,CP,1792,3215,71,2,"[3214, 3215]"
Normal-27.zip,0,Normal,3916,5459,77,1,[5459]
Normal-21.zip,0,Normal,2311,766,91,1,[766]
NCP-13.zip,2,NCP,343,1837,130,2,"[1837, 1838]"
NCP-26.zip,2,NCP,3989,5513,45,1,[5513]
CP-13.zip,1,CP,1495,4086,112,4,"[4086, 4087, 4088, 4089]"
Normal-5.zip,0,Normal,812,247,126,1,[247]
Normal-15.zip,0,Normal,2098,553,84,1,[553]
Normal-16.zip,0,Normal,2119,574,93,1,[574]
CP-25.zip,1,CP,731,3093,82,1,[3093]
CP-16.zip,1,CP,1597,4285,23,1,[4285]
CP-26.zip,1,CP,3726,5662,232,1,[5662]
CP-4.zip,1,CP,1183,3401,294,1,[3401]
CP-10.zip,1,CP,1391,3839,59,4,"[3839, 3840, 3841, 3842]"
NCP-23.zip,2,NCP,901,2443,320,1,[2443]
Normal-11.zip,0,Normal,1957,412,78,1,[412]
NCP-17.zip,2,NCP,474,2104,48,2,"[2103, 2104]"
NCP-9.zip,2,NCP,2698,2664,57,1,[2664]
NCP-7.zip,2,NCP,233,1612,45,2,"[1610, 1612]"
NCP-9.zip,2,NCP,2686,2699,48,1,[2699]
CP-18.zip,1,CP,1776,3537,75,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
CP-3.zip,1,CP,1158,3376,193,1,[3376]
CP-27.zip,1,CP,3755,5699,23,1,[5699]
CP-13.zip,1,CP,1509,4120,59,3,"[4118, 4119, 4120]"
NCP-29.zip,2,NCP,910,2452,76,1,[2452]
CP-2.zip,1,CP,11,3166,274,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-16.zip,2,NCP,433,2020,51,2,"[2019, 2020]"
Normal-26.zip,0,Normal,3863,5375,231,1,[5375]
Normal-7.zip,0,Normal,1851,306,102,1,[306]
NCP-23.zip,2,NCP,917,2459,272,1,[2459]
NCP-26.zip,2,NCP,3986,5492,42,1,[5492]
CP-12.zip,1,CP,1478,4037,53,2,"[4037, 4038]"
NCP-2.zip,2,NCP,115,1372,50,2,"[1371, 1372]"
NCP-13.zip,2,NCP,362,1875,151,2,"[1875, 1876]"
Normal-22.zip,0,Normal,2592,102,39,1,[102]
CP-9.zip,1,CP,1357,3758,61,3,"[3758, 3759, 3760]"
Normal-6.zip,0,Normal,1825,280,81,1,[280]
Normal-4.zip,0,Normal,775,210,134,1,[210]
NCP-13.zip,2,NCP,365,1881,117,2,"[1881, 1882]"
CP-24.zip,1,CP,709,3071,302,1,[3071]
CP-17.zip,1,CP,1630,4318,23,1,[4318]
CP-15.zip,1,CP,1557,4232,43,2,"[4232, 4233]"
NCP-23.zip,2,NCP,956,2499,156,1,[2499]
CP-2.zip,1,CP,1106,3324,164,1,[3324]
Normal-9.zip,0,Normal,1895,350,92,1,[350]
CP-21.zip,1,CP,599,2961,68,1,[2961]
NCP-16.zip,2,NCP,448,2051,58,2,"[2050, 2051]"
CP-5.zip,1,CP,1206,3424,176,1,[3424]
CP-26.zip,1,CP,3648,5540,170,1,[5540]
CP-1.zip,1,CP,1091,3309,354,1,[3309]
NCP-10.zip,2,NCP,2713,2706,39,1,[2706]
NCP-30.zip,2,NCP,949,2492,42,1,[2492]
NCP-17.zip,2,NCP,480,2116,58,2,"[2115, 2116]"
CP-7.zip,1,CP,1306,3643,48,3,"[3642, 3643, 3644]"
Normal-7.zip,0,Normal,1840,295,108,1,[295]
CP-18.zip,1,CP,1780,3562,63,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
NCP-1.zip,2,NCP,1011,2575,111,2,"[2574, 2575]"
Normal-16.zip,0,Normal,2132,587,97,1,[587]
CP-29.zip,1,CP,3814,5758,29,1,[5758]
CP-18.zip,1,CP,1768,3175,175,1,[3175]
Normal-13.zip,0,Normal,2028,483,89,1,[483]
NCP-16.zip,2,NCP,454,2062,139,2,"[2062, 2063]"
CP-8.zip,1,CP,1333,3706,52,2,"[3705, 3706]"
CP-25.zip,1,CP,737,3099,84,1,[3099]
NCP-9.zip,2,NCP,2683,2653,46,1,[2653]
Normal-11.zip,0,Normal,1958,413,90,1,[413]
Normal-7.zip,0,Normal,1855,310,86,1,[310]
NCP-10.zip,2,NCP,282,1710,120,2,"[1710, 1711]"
NCP-8.zip,2,NCP,252,1650,139,2,"[1650, 1651]"
NCP-3.zip,2,NCP,133,1411,41,2,"[1410, 1411]"
CP-21.zip,1,CP,588,2950,116,1,[2950]
Normal-15.zip,0,Normal,2094,549,78,1,[549]
NCP-20.zip,2,NCP,562,2282,113,2,"[2282, 2283]"
Normal-5.zip,0,Normal,806,241,104,1,[241]
CP-3.zip,1,CP,1145,3363,169,1,[3363]
NCP-28.zip,2,NCP,847,2365,53,1,[2365]
NCP-4.zip,2,NCP,143,1431,54,2,"[1430, 1431]"
NCP-15.zip,2,NCP,407,1963,124,2,"[1963, 1964]"
Normal-6.zip,0,Normal,1817,272,85,1,[272]
CP-32.zip,1,CP,1089,3224,90,1,[3224]
NCP-22.zip,2,NCP,834,2347,194,2,"[2347, 2348]"
CP-9.zip,1,CP,1381,3816,66,3,"[3815, 3816, 3817]"
Normal-8.zip,0,Normal,1866,321,75,1,[321]
NCP-22.zip,2,NCP,86,1306,50,2,"[1305, 1306]"
CP-26.zip,1,CP,3725,5660,251,2,"[5660, 5661]"
NCP-18.zip,2,NCP,497,2150,126,2,"[2150, 2151]"
NCP-27.zip,2,NCP,1043,2615,45,1,[2615]
CP-4.zip,1,CP,1167,3385,149,1,[3385]
Normal-4.zip,0,Normal,782,217,340,1,[217]
NCP-15.zip,2,NCP,421,1995,161,2,"[1995, 1996]"
Normal-9.zip,0,Normal,1897,352,88,1,[352]
NCP-13.zip,2,NCP,365,1882,50,2,"[1881, 1882]"
CP-1.zip,1,CP,1067,3106,62,1,[3106]
CP-22.zip,1,CP,642,3004,128,1,[3004]
CP-20.zip,1,CP,2668,3258,52,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
CP-10.zip,1,CP,1406,3875,60,2,"[3874, 3875]"
CP-1.zip,1,CP,10,3158,285,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
NCP-21.zip,2,NCP,60,1254,59,2,"[1253, 1254]"
Normal-26.zip,0,Normal,3884,5396,62,2,"[5396, 5397]"
NCP-25.zip,2,NCP,3710,5537,66,1,[5537]
CP-9.zip,1,CP,1371,3795,60,3,"[3794, 3795, 3796]"
CP-20.zip,1,CP,2450,2928,92,2,"[2928, 2929]"
NCP-4.zip,2,NCP,166,1476,139,2,"[1476, 1477]"
NCP-20.zip,2,NCP,554,2266,54,2,"[2265, 2266]"
NCP-18.zip,2,NCP,491,2139,62,2,"[2138, 2139]"
CP-2.zip,1,CP,1098,3316,171,1,[3316]
CP-12.zip,1,CP,1465,4010,67,2,"[4009, 4010]"
NCP-20.zip,2,NCP,548,2254,61,2,"[2253, 2254]"
Normal-16.zip,0,Normal,2150,605,88,1,[605]
NCP-16.zip,2,NCP,451,2056,51,3,"[2056, 2057, 2058]"
Normal-11.zip,0,Normal,1965,420,88,1,[420]
NCP-1.zip,2,NCP,101,1339,136,2,"[1339, 1340]"
Normal-12.zip,0,Normal,2008,463,92,1,[463]
CP-10.zip,1,CP,1402,3867,55,3,"[3865, 3866, 3867]"
NCP-2.zip,2,NCP,122,1386,62,2,"[1385, 1386]"
CP-20.zip,1,CP,2457,2941,108,1,[2941]
NCP-14.zip,2,NCP,38,1208,137,2,"[1208, 1209]"
Normal-10.zip,0,Normal,1933,388,103,1,[388]
CP-1.zip,1,CP,10,3152,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
NCP-20.zip,2,NCP,562,2283,48,2,"[2282, 2283]"
NCP-12.zip,2,NCP,335,1819,55,2,"[1818, 1819]"
NCP-21.zip,2,NCP,579,2316,63,2,"[2315, 2316]"
Normal-7.zip,0,Normal,1856,311,80,1,[311]
NCP-18.zip,2,NCP,506,2169,51,2,"[2168, 2169]"
CP-8.zip,1,CP,1339,3719,59,2,"[3718, 3719]"
CP-18.zip,1,CP,1652,4340,25,1,[4340]
NCP-11.zip,2,NCP,296,1737,139,2,"[1737, 1738]"
Normal-8.zip,0,Normal,1886,341,84,1,[341]
NCP-8.zip,2,NCP,250,1647,60,2,"[1646, 1647]"
CP-14.zip,1,CP,1537,4184,53,3,"[4182, 4183, 4184]"
NCP-17.zip,2,NCP,486,2128,64,2,"[2127, 2128]"
CP-8.zip,1,CP,1335,3711,62,3,"[3709, 3710, 3711]"
CP-27.zip,1,CP,3739,5683,19,1,[5683]
NCP-25.zip,2,NCP,3950,5464,41,1,[5464]
CP-12.zip,1,CP,1474,4029,62,2,"[4029, 4030]"
Normal-10.zip,0,Normal,1946,401,93,1,[401]
NCP-30.zip,2,NCP,947,2490,41,1,[2490]
NCP-14.zip,2,NCP,371,1893,141,2,"[1893, 1894]"
NCP-8.zip,2,NCP,2676,2694,54,1,[2694]
NCP-1.zip,2,NCP,1011,2574,117,2,"[2574, 2575]"
Normal-9.zip,0,Normal,1906,361,93,1,[361]
NCP-4.zip,2,NCP,147,1439,72,2,"[1438, 1439]"
CP-12.zip,1,CP,1485,4058,49,3,"[4056, 4057, 4058]"
Normal-7.zip,0,Normal,1838,293,86,1,[293]
CP-25.zip,1,CP,9,3150,72,4,"[3148, 3149, 3150, 3151]"
NCP-12.zip,2,NCP,330,1809,64,2,"[1808, 1809]"
NCP-8.zip,2,NCP,267,1681,54,2,"[1680, 1681]"
NCP-20.zip,2,NCP,553,2263,137,2,"[2263, 2264]"
NCP-29.zip,2,NCP,893,2433,24,2,"[2432, 2433]"
NCP-21.zip,2,NCP,582,2321,128,2,"[2321, 2322]"
Normal-24.zip,0,Normal,2642,152,38,1,[152]
CP-25.zip,1,CP,726,3088,183,1,[3088]
NCP-5.zip,2,NCP,171,1487,60,2,"[1486, 1487]"
CP-22.zip,1,CP,632,2994,132,1,[2994]
Normal-7.zip,0,Normal,1850,305,99,1,[305]
NCP-30.zip,2,NCP,945,2488,45,1,[2488]
Normal-19.zip,0,Normal,2244,699,98,1,[699]
CP-1.zip,1,CP,1073,3116,52,1,[3116]
Normal-21.zip,0,Normal,2310,765,91,1,[765]
CP-1.zip,1,CP,10,3153,297,9,"[3152, 3153, 3154, 3155, 3156, 3157, 3158, 3159, 3160]"
CP-1.zip,1,CP,1075,3119,70,2,"[3118, 3119]"
CP-12.zip,1,CP,1470,4020,54,2,"[4020, 4021]"
NCP-26.zip,2,NCP,3997,5519,56,1,[5519]
NCP-10.zip,2,NCP,274,1694,160,2,"[1694, 1695]"
Normal-15.zip,0,Normal,2089,544,98,1,[544]
CP-24.zip,1,CP,681,3043,102,1,[3043]
NCP-20.zip,2,NCP,573,2305,63,2,"[2304, 2305]"
CP-15.zip,1,CP,1557,4233,43,2,"[4232, 4233]"
NCP-30.zip,2,NCP,990,2543,59,1,[2543]
CP-7.zip,1,CP,1305,3640,20,2,"[3640, 3641]"
NCP-5.zip,2,NCP,183,1510,123,2,"[1510, 1511]"
CP-15.zip,1,CP,1582,4270,20,1,[4270]
CP-29.zip,1,CP,3817,5761,25,1,[5761]
NCP-20.zip,2,NCP,56,1245,164,2,"[1245, 1246]"
NCP-21.zip,2,NCP,58,1250,55,2,"[1249, 1250]"
CP-8.zip,1,CP,1335,3710,62,3,"[3709, 3710, 3711]"
Normal-3.zip,0,Normal,1766,1149,60,3,"[1149, 1150, 1151]"
NCP-10.zip,2,NCP,2716,2709,49,1,[2709]
CP-10.zip,1,CP,1402,3865,131,3,"[3865, 3866, 3867]"
CP-10.zip,1,CP,1391,3841,59,4,"[3839, 3840, 3841, 3842]"
Normal-22.zip,0,Normal,2594,104,42,1,[104]
CP-26.zip,1,CP,3733,5675,174,3,"[5673, 5674, 5675]"
Normal-25.zip,0,Normal,3715,5345,30,1,[5345]
Normal-3.zip,0,Normal,762,197,363,1,[197]
NCP-15.zip,2,NCP,420,1994,71,2,"[1993, 1994]"
Normal-12.zip,0,Normal,1996,451,90,1,[451]
NCP-22.zip,2,NCP,885,2423,195,2,"[2422, 2423]"
NCP-29.zip,2,NCP,921,2463,36,1,[2463]
Normal-25.zip,0,Normal,3848,5360,192,1,[5360]
CP-28.zip,1,CP,3776,5720,30,1,[5720]
NCP-15.zip,2,NCP,402,1953,148,2,"[1953, 1954]"
Normal-19.zip,0,Normal,2232,687,99,1,[687]
CP-11.zip,1,CP,1447,3968,63,2,"[3967, 3968]"
Normal-17.zip,0,Normal,2176,631,91,1,[631]
NCP-12.zip,2,NCP,315,1778,46,2,"[1777, 1778]"
CP-2.zip,1,CP,1102,3320,182,1,[3320]
NCP-14.zip,2,NCP,373,1897,122,2,"[1897, 1898]"
CP-4.zip,1,CP,1175,3393,189,1,[3393]
NCP-14.zip,2,NCP,392,1934,143,2,"[1934, 1935]"
CP-8.zip,1,CP,1321,3679,58,2,"[3678, 3679]"
NCP-16.zip,2,NCP,430,2013,152,2,"[2013, 2014]"
NCP-26.zip,2,NCP,3988,5512,53,1,[5512]
Normal-22.zip,0,Normal,2316,771,92,1,[771]
CP-14.zip,1,CP,1531,4170,59,2,"[4169, 4170]"
Normal-3.zip,0,Normal,748,183,261,1,[183]
NCP-23.zip,2,NCP,943,2486,334,1,[2486]
Normal-18.zip,0,Normal,2202,657,82,1,[657]
CP-27.zip,1,CP,3735,5679,26,1,[5679]
NCP-15.zip,2,NCP,409,1967,153,2,"[1967, 1968]"
CP-4.zip,1,CP,1171,3389,180,1,[3389]
CP-11.zip,1,CP,1452,3977,56,2,"[3977, 3978]"
Normal-1.zip,0,Normal,1684,875,71,5,"[870, 871, 873, 874, 875]"
CP-8.zip,1,CP,1333,3705,52,2,"[3705, 3706]"
NCP-3.zip,2,NCP,135,1414,138,2,"[1414, 1415]"
NCP-25.zip,2,NCP,3965,5506,53,1,[5506]
NCP-8.zip,2,NCP,258,1662,135,2,"[1662, 1663]"
Normal-10.zip,0,Normal,1926,381,87,1,[381]
CP-16.zip,1,CP,1596,4284,22,1,[4284]
CP-14.zip,1,CP,1554,4226,41,2,"[4226, 4227]"
CP-26.zip,1,CP,3645,5605,38,1,[5605]
CP-2.zip,1,CP,1110,3328,143,1,[3328]
NCP-22.zip,2,NCP,81,1296,53,2,"[1295, 1296]"
Normal-1.zip,0,Normal,1685,877,65,4,"[877, 878, 879, 880]"
NCP-29.zip,2,NCP,923,2465,19,1,[2465]
NCP-14.zip,2,NCP,399,1948,149,2,"[1948, 1949]"
NCP-18.zip,2,NCP,510,2176,102,2,"[2176, 2177]"
NCP-20.zip,2,NCP,558,2274,51,2,"[2273, 2274]"
Normal-2.zip,0,Normal,1762,1131,70,2,"[1130, 1131]"
CP-19.zip,1,CP,2434,2898,102,3,"[2898, 2899, 2900]"
Normal-19.zip,0,Normal,2219,674,106,1,[674]
Normal-8.zip,0,Normal,1869,324,94,1,[324]
NCP-21.zip,2,NCP,70,1272,120,2,"[1272, 1273]"
NCP-10.zip,2,NCP,2710,2703,48,1,[2703]
Normal-9.zip,0,Normal,1904,359,94,1,[359]
NCP-20.zip,2,NCP,564,2287,60,2,"[2286, 2287]"
NCP-15.zip,2,NCP,424,2001,161,2,"[2001, 2002]"
CP-14.zip,1,CP,1529,4166,42,3,"[4165, 4166, 4167]"
Normal-16.zip,0,Normal,2138,593,72,1,[593]
CP-16.zip,1,CP,1613,4301,27,1,[4301]
CP-24.zip,1,CP,697,3059,114,1,[3059]
CP-10.zip,1,CP,1390,3836,215,3,"[3836, 3837, 3838]"
Normal-6.zip,0,Normal,1805,260,79,1,[260]
CP-10.zip,1,CP,1390,3837,56,3,"[3836, 3837, 3838]"
CP-3.zip,1,CP,1150,3368,214,1,[3368]
CP-2.zip,1,CP,1116,3334,183,1,[3334]
Normal-14.zip,0,Normal,2057,512,78,1,[512]
NCP-19.zip,2,NCP,532,2223,58,2,"[2222, 2223]"
CP-29.zip,1,CP,3810,5754,24,1,[5754]
CP-14.zip,1,CP,1539,4188,131,3,"[4188, 4189, 4190]"
CP-10.zip,1,CP,1385,3826,64,2,"[3825, 3826]"
NCP-29.zip,2,NCP,929,2471,21,1,[2471]
NCP-28.zip,2,NCP,856,2377,229,2,"[2376, 2377]"
NCP-15.zip,2,NCP,408,1966,55,2,"[1965, 1966]"
CP-7.zip,1,CP,1319,3675,61,2,"[3674, 3675]"
NCP-1.zip,2,NCP,1022,2591,48,1,[2591]
Normal-20.zip,0,Normal,2254,709,75,1,[709]
NCP-22.zip,2,NCP,862,2385,33,1,[2385]
CP-29.zip,1,CP,3812,5756,27,1,[5756]
CP-11.zip,1,CP,1447,3967,63,2,"[3967, 3968]"
CP-15.zip,1,CP,1556,4230,40,2,"[4230, 4231]"
CP-1.zip,1,CP,1080,3125,64,1,[3125]
Normal-4.zip,0,Normal,778,213,114,1,[213]
CP-14.zip,1,CP,1529,4167,42,3,"[4165, 4166, 4167]"
CP-2.zip,1,CP,11,3167,283,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
NCP-20.zip,2,NCP,549,2256,36,2,"[2255, 2256]"
NCP-3.zip,2,NCP,1292,2733,66,1,[2733]
Normal-13.zip,0,Normal,2047,502,93,1,[502]
NCP-20.zip,2,NCP,549,2255,83,2,"[2255, 2256]"
CP-15.zip,1,CP,1563,4246,122,3,"[4245, 4246, 4247]"
NCP-25.zip,2,NCP,3956,5469,49,1,[5469]
NCP-22.zip,2,NCP,833,2346,484,1,[2346]
CP-12.zip,1,CP,1487,4061,163,3,"[4061, 4062, 4063]"
CP-7.zip,1,CP,1306,3642,52,3,"[3642, 3643, 3644]"
NCP-17.zip,2,NCP,47,1228,58,2,"[1227, 1228]"
CP-8.zip,1,CP,1338,3716,67,2,"[3716, 3717]"
Normal-25.zip,0,Normal,3711,5341,27,1,[5341]
NCP-16.zip,2,NCP,452,2059,63,1,[2059]
Normal-23.zip,0,Normal,2604,114,36,1,[114]
NCP-28.zip,2,NCP,849,2368,224,1,[2368]
NCP-29.zip,2,NCP,886,2424,52,1,[2424]
NCP-28.zip,2,NCP,875,2408,218,1,[2408]
NCP-20.zip,2,NCP,573,2304,151,2,"[2304, 2305]"
NCP-22.zip,2,NCP,83,1300,70,2,"[1299, 1300]"
Normal-14.zip,0,Normal,2056,511,84,1,[511]
Normal-7.zip,0,Normal,1844,299,93,1,[299]
CP-13.zip,1,CP,1494,4083,154,3,"[4083, 4084, 4085]"
CP-5.zip,1,CP,1201,3419,171,1,[3419]
NCP-23.zip,2,NCP,897,2438,40,1,[2438]
Normal-27.zip,0,Normal,3914,5456,55,2,"[5456, 5457]"
CP-9.zip,1,CP,1354,3751,181,3,"[3751, 3752, 3753]"
NCP-29.zip,2,NCP,899,2440,34,2,"[2440, 2441]"
CP-10.zip,1,CP,1414,3891,151,3,"[3891, 3892, 3893]"
CP-14.zip,1,CP,1543,4202,57,3,"[4200, 4201, 4202]"
Normal-25.zip,0,Normal,3837,5349,208,1,[5349]
NCP-10.zip,2,NCP,272,1691,64,2,"[1690, 1691]"
Normal-9.zip,0,Normal,1905,360,93,1,[360]
CP-8.zip,1,CP,1340,3721,64,2,"[3720, 3721]"
NCP-5.zip,2,NCP,19,1170,146,2,"[1170, 1171]"
Normal-2.zip,0,Normal,1738,1041,75,1,[1041]
NCP-2.zip,2,NCP,108,1354,58,2,"[1353, 1354]"
Normal-25.zip,0,Normal,3844,5356,201,1,[5356]
CP-20.zip,1,CP,2459,2945,108,1,[2945]
CP-10.zip,1,CP,1414,3892,63,3,"[3891, 3892, 3893]"
Normal-18.zip,0,Normal,2201,656,66,1,[656]
NCP-21.zip,2,NCP,78,1289,166,2,"[1289, 1290]"
CP-18.zip,1,CP,1776,3539,76,6,"[3534, 3535, 3536, 3537, 3538, 3539]"
NCP-1.zip,2,NCP,1010,2572,126,2,"[2572, 2573]"
CP-11.zip,1,CP,1441,3951,203,3,"[3951, 3952, 3953]"
CP-13.zip,1,CP,1512,4125,50,2,"[4125, 4126]"
CP-30.zip,1,CP,3934,5640,53,3,"[5638, 5639, 5640]"
NCP-4.zip,2,NCP,143,1430,128,2,"[1430, 1431]"
Normal-17.zip,0,Normal,2166,621,93,1,[621]
NCP-22.zip,2,NCP,83,1299,167,2,"[1299, 1300]"
CP-29.zip,1,CP,3804,5748,29,1,[5748]
CP-22.zip,1,CP,624,2986,90,1,[2986]
NCP-7.zip,2,NCP,231,1607,58,2,"[1606, 1607]"
NCP-8.zip,2,NCP,258,1663,57,2,"[1662, 1663]"
Normal-10.zip,0,Normal,1956,411,89,1,[411]
NCP-4.zip,2,NCP,165,1475,55,2,"[1474, 1475]"
Normal-2.zip,0,Normal,1753,1091,60,7,"[1086, 1087, 1088, 1089, 1090, 1091, 1092]"
CP-6.zip,1,CP,1247,3465,218,1,[3465]
CP-17.zip,1,CP,1644,4332,23,1,[4332]
NCP-5.zip,2,NCP,188,1520,134,2,"[1520, 1521]"
CP-13.zip,1,CP,1509,4118,233,3,"[4118, 4119, 4120]"
CP-19.zip,1,CP,2434,2899,102,3,"[2898, 2899, 2900]"
Normal-27.zip,0,Normal,3914,5457,55,2,"[5456, 5457]"
NCP-3.zip,2,NCP,133,1410,100,2,"[1410, 1411]"
CP-24.zip,1,CP,690,3052,134,1,[3052]
NCP-6.zip,2,NCP,208,1560,134,2,"[1560, 1561]"
Normal-26.zip,0,Normal,3872,5384,29,1,[5384]
CP-7.zip,1,CP,1258,3476,202,1,[3476]
NCP-4.zip,2,NCP,154,1453,47,2,"[1452, 1453]"
CP-8.zip,1,CP,1335,3709,207,3,"[3709, 3710, 3711]"
CP-7.zip,1,CP,1305,3641,50,2,"[3640, 3641]"
CP-25.zip,1,CP,716,3078,640,1,[3078]
Normal-2.zip,0,Normal,1761,1125,45,5,"[1125, 1126, 1127, 1128, 1129]"
NCP-14.zip,2,NCP,38,1209,57,2,"[1208, 1209]"
Normal-1.zip,0,Normal,1685,878,65,4,"[877, 878, 879, 880]"
NCP-17.zip,2,NCP,467,2090,58,2,"[2089, 2090]"
CP-14.zip,1,CP,1539,4189,54,3,"[4188, 4189, 4190]"
NCP-16.zip,2,NCP,454,2063,58,2,"[2062, 2063]"
CP-13.zip,1,CP,1491,4076,48,3,"[4074, 4075, 4076]"
Normal-4.zip,0,Normal,794,229,341,1,[229]
NCP-19.zip,2,NCP,521,2199,58,2,"[2198, 2199]"
CP-7.zip,1,CP,1311,3656,67,3,"[3655, 3656, 3657]"
Normal-22.zip,0,Normal,2584,94,44,1,[94]
CP-23.zip,1,CP,678,3040,46,1,[3040]
CP-14.zip,1,CP,1539,4190,54,3,"[4188, 4189, 4190]"
CP-30.zip,1,CP,3937,5644,55,2,"[5643, 5644]"
NCP-15.zip,2,NCP,427,2007,132,2,"[2007, 2008]"
NCP-28.zip,2,NCP,843,2358,279,1,[2358]
NCP-14.zip,2,NCP,375,1903,49,3,"[1901, 1902, 1903]"
NCP-11.zip,2,NCP,306,1759,153,2,"[1759, 1760]"
NCP-16.zip,2,NCP,44,1221,124,2,"[1221, 1222]"
NCP-8.zip,2,NCP,256,1659,58,2,"[1658, 1659]"
CP-8.zip,1,CP,1338,3717,67,2,"[3716, 3717]"
CP-18.zip,1,CP,1780,3553,67,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-7.zip,1,CP,1267,3485,151,1,[3485]
CP-13.zip,1,CP,1509,4119,118,3,"[4118, 4119, 4120]"
Normal-3.zip,0,Normal,1766,1151,62,3,"[1149, 1150, 1151]"
CP-10.zip,1,CP,1405,3873,60,2,"[3872, 3873]"
CP-1.zip,1,CP,1079,3124,63,1,[3124]
CP-18.zip,1,CP,1780,3559,69,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
Normal-7.zip,0,Normal,1852,307,94,1,[307]
CP-5.zip,1,CP,1195,3413,247,1,[3413]
NCP-20.zip,2,NCP,556,2270,53,2,"[2269, 2270]"
NCP-2.zip,2,NCP,108,1353,139,2,"[1353, 1354]"
NCP-16.zip,2,NCP,445,2045,58,2,"[2044, 2045]"
CP-13.zip,1,CP,1512,4126,50,2,"[4125, 4126]"
NCP-21.zip,2,NCP,64,1262,55,2,"[1261, 1262]"
CP-5.zip,1,CP,1211,3429,143,1,[3429]
NCP-1.zip,2,NCP,1042,2614,143,2,"[2613, 2614]"
NCP-21.zip,2,NCP,73,1280,55,3,"[1278, 1279, 1280]"
CP-9.zip,1,CP,1364,3776,133,3,"[3776, 3777, 3778]"
NCP-21.zip,2,NCP,58,1249,131,2,"[1249, 1250]"
CP-20.zip,1,CP,2668,3250,44,11,"[3249, 3250, 3251, 3252, 3253, 3254, 3255, 3256, 3257, 3258, 3259]"
NCP-21.zip,2,NCP,73,1279,57,3,"[1278, 1279, 1280]"
CP-26.zip,1,CP,3733,5674,159,3,"[5673, 5674, 5675]"
Normal-19.zip,0,Normal,2247,702,86,1,[702]
NCP-28.zip,2,NCP,867,2394,161,1,[2394]
CP-22.zip,1,CP,633,2995,114,1,[2995]
CP-9.zip,1,CP,1371,3796,60,3,"[3794, 3795, 3796]"
NCP-22.zip,2,NCP,86,1305,117,2,"[1305, 1306]"
NCP-14.zip,2,NCP,40,1213,63,2,"[1212, 1213]"
Normal-26.zip,0,Normal,3892,5415,72,1,[5415]
CP-7.zip,1,CP,1306,3644,237,3,"[3642, 3643, 3644]"
CP-24.zip,1,CP,702,3064,78,1,[3064]
NCP-26.zip,2,NCP,3975,5483,44,1,[5483]
CP-4.zip,1,CP,1164,3382,193,1,[3382]
Normal-11.zip,0,Normal,1960,415,98,1,[415]
CP-5.zip,1,CP,1203,3421,231,1,[3421]
CP-19.zip,1,CP,2434,2900,104,3,"[2898, 2899, 2900]"
NCP-29.zip,2,NCP,890,2429,203,1,[2429]
NCP-16.zip,2,NCP,448,2050,139,2,"[2050, 2051]"
CP-18.zip,1,CP,1780,3555,60,14,"[3553, 3554, 3555, 3556, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3564, 3565, 3566]"
CP-12.zip,1,CP,1457,3991,69,1,[3991]
Normal-3.zip,0,Normal,756,191,106,1,[191]
NCP-29.zip,2,NCP,900,2442,506,1,[2442]
NCP-17.zip,2,NCP,476,2107,127,2,"[2107, 2108]"
CP-28.zip,1,CP,3794,5738,26,1,[5738]
CP-23.zip,1,CP,669,3031,70,1,[3031]
Normal-9.zip,0,Normal,1911,366,96,1,[366]
Normal-9.zip,0,Normal,1919,374,99,1,[374]
NCP-12.zip,2,NCP,335,1818,129,2,"[1818, 1819]"
CP-18.zip,1,CP,1651,4339,31,1,[4339]
Normal-4.zip,0,Normal,798,233,122,1,[233]
NCP-18.zip,2,NCP,508,2173,61,2,"[2172, 2173]"
NCP-21.zip,2,NCP,67,1266,168,2,"[1266, 1267]"
NCP-6.zip,2,NCP,214,1573,60,2,"[1572, 1573]"
CP-10.zip,1,CP,1405,3872,60,2,"[3872, 3873]"
NCP-6.zip,2,NCP,208,1561,56,2,"[1560, 1561]"
NCP-14.zip,2,NCP,373,1898,52,2,"[1897, 1898]"
NCP-3.zip,2,NCP,1281,2722,65,1,[2722]
CP-24.zip,1,CP,707,3069,72,1,[3069]
NCP-28.zip,2,NCP,831,2344,278,1,[2344]
Normal-17.zip,0,Normal,2179,634,101,1,[634]
NCP-21.zip,2,NCP,60,1253,141,2,"[1253, 1254]"
NCP-8.zip,2,NCP,259,1665,65,2,"[1664, 1665]"
NCP-11.zip,2,NCP,311,1770,55,2,"[1769, 1770]"
NCP-27.zip,2,NCP,1050,2623,46,2,"[2623, 2624]"
NCP-18.zip,2,NCP,490,2137,62,2,"[2136, 2137]"
Normal-27.zip,0,Normal,3900,5431,64,2,"[5431, 5432]"
Normal-15.zip,0,Normal,2110,565,83,1,[565]
NCP-13.zip,2,NCP,368,1887,129,2,"[1887, 1888]"
NCP-27.zip,2,NCP,817,2326,120,1,[2326]
CP-15.zip,1,CP,1567,4255,59,2,"[4254, 4255]"
NCP-5.zip,2,NCP,178,1500,124,2,"[1500, 1501]"
NCP-13.zip,2,NCP,345,1841,147,2,"[1841, 1842]"
Normal-2.zip,0,Normal,1761,1128,60,5,"[1125, 1126, 1127, 1128, 1129]"
CP-8.zip,1,CP,1343,3727,56,2,"[3726, 3727]"
NCP-30.zip,2,NCP,936,2478,21,1,[2478]
NCP-11.zip,2,NCP,306,1760,64,2,"[1759, 1760]"
NCP-17.zip,2,NCP,487,2129,167,2,"[2129, 2130]"
CP-30.zip,1,CP,3930,5629,62,2,"[5628, 5629]"
NCP-9.zip,2,NCP,2692,2700,48,1,[2700]
NCP-20.zip,2,NCP,556,2269,125,2,"[2269, 2270]"
CP-18.zip,1,CP,1775,3531,58,4,"[3530, 3531, 3532, 3533]"
NCP-23.zip,2,NCP,896,2437,39,1,[2437]
CP-21.zip,1,CP,5,3509,275,1,[3509]
Normal-19.zip,0,Normal,2217,672,71,1,[672]
NCP-1.zip,2,NCP,1010,2573,126,2,"[2572, 2573]"
NCP-1.zip,2,NCP,100,1337,139,2,"[1337, 1338]"
NCP-26.zip,2,NCP,3998,5495,41,1,[5495]
CP-25.zip,1,CP,711,3073,112,1,[3073]
CP-24.zip,1,CP,699,3061,64,1,[3061]
CP-4.zip,1,CP,1173,3391,201,1,[3391]
CP-27.zip,1,CP,3740,5684,23,1,[5684]
CP-16.zip,1,CP,1590,4278,20,1,[4278]
Normal-2.zip,0,Normal,1762,1130,70,2,"[1130, 1131]"
Normal-1.zip,0,Normal,1679,833,66,6,"[833, 834, 835, 836, 837, 838]"
NCP-29.zip,2,NCP,928,2470,25,1,[2470]
CP-18.zip,1,CP,1775,3533,57,4,"[3530, 3531, 3532, 3533]"
Normal-3.zip,0,Normal,766,201,94,1,[201]
Normal-11.zip,0,Normal,1964,419,100,1,[419]
NCP-9.zip,2,NCP,2690,2657,48,1,[2657]
NCP-21.zip,2,NCP,78,1290,69,2,"[1289, 1290]"
Normal-16.zip,0,Normal,2147,602,95,1,[602]
NCP-19.zip,2,NCP,544,2246,62,2,"[2245, 2246]"
Normal-27.zip,0,Normal,3900,5432,64,2,"[5431, 5432]"
Normal-8.zip,0,Normal,1860,315,92,1,[315]
CP-21.zip,1,CP,601,2963,104,1,[2963]
CP-2.zip,1,CP,11,3164,287,7,"[3161, 3162, 3163, 3164, 3165, 3166, 3167]"
CP-15.zip,1,CP,1563,4245,241,3,"[4245, 4246, 4247]"
================================================
FILE: Finetune/CC-CCII/csv/CC_CCII_fold2_valid.csv
================================================
zip_file,target,label,patient_id,scan_id,n_slice,scan_count,all_scan_ids
Normal-2.zip,0,Normal,1740,1050,21,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-10.zip,1,CP,1387,3830,51,2,"[3829, 3830]"
NCP-10.zip,2,NCP,2719,2675,44,1,[2675]
CP-1.zip,1,CP,1065,3104,58,1,[3104]
CP-10.zip,1,CP,1392,3843,62,2,"[3843, 3844]"
CP-13.zip,1,CP,1508,4117,57,3,"[4115, 4116, 4117]"
NCP-22.zip,2,NCP,863,2387,282,2,"[2386, 2387]"
Normal-3.zip,0,Normal,763,198,102,1,[198]
Normal-23.zip,0,Normal,2635,145,27,1,[145]
NCP-20.zip,2,NCP,572,2303,58,2,"[2302, 2303]"
Normal-1.zip,0,Normal,1683,862,65,6,"[861, 862, 864, 865, 868, 869]"
CP-10.zip,1,CP,1398,3856,44,2,"[3856, 3857]"
CP-15.zip,1,CP,1566,4252,54,2,"[4252, 4253]"
NCP-10.zip,2,NCP,280,1707,51,2,"[1706, 1707]"
CP-19.zip,1,CP,1785,3187,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
CP-15.zip,1,CP,1570,4258,22,1,[4258]
CP-10.zip,1,CP,1413,3890,66,2,"[3889, 3890]"
CP-7.zip,1,CP,1303,3618,42,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-19.zip,1,CP,2435,2903,295,3,"[2901, 2902, 2903]"
NCP-22.zip,2,NCP,860,2382,212,2,"[2382, 2383]"
NCP-22.zip,2,NCP,883,2419,52,2,"[2419, 2420]"
Normal-2.zip,0,Normal,1751,1079,61,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
Normal-25.zip,0,Normal,3852,5364,195,1,[5364]
NCP-20.zip,2,NCP,559,2275,127,2,"[2275, 2276]"
NCP-18.zip,2,NCP,498,2153,58,2,"[2152, 2153]"
Normal-27.zip,0,Normal,3911,5448,64,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
Normal-17.zip,0,Normal,2158,613,100,1,[613]
NCP-7.zip,2,NCP,246,1639,58,2,"[1638, 1639]"
NCP-17.zip,2,NCP,473,2102,61,2,"[2101, 2102]"
Normal-2.zip,0,Normal,1732,1025,73,1,[1025]
NCP-10.zip,2,NCP,271,1688,146,2,"[1688, 1689]"
CP-7.zip,1,CP,1303,3627,252,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-11.zip,2,NCP,286,1717,121,2,"[1717, 1718]"
Normal-19.zip,0,Normal,2223,678,95,1,[678]
NCP-22.zip,2,NCP,822,2333,31,2,"[2332, 2333]"
NCP-28.zip,2,NCP,870,2400,47,2,"[2399, 2400]"
NCP-21.zip,2,NCP,75,1284,54,2,"[1283, 1284]"
NCP-17.zip,2,NCP,469,2094,66,2,"[2093, 2094]"
NCP-8.zip,2,NCP,255,1656,139,2,"[1656, 1657]"
NCP-6.zip,2,NCP,211,1566,137,2,"[1566, 1567]"
NCP-25.zip,2,NCP,3966,5476,43,1,[5476]
NCP-21.zip,2,NCP,575,2309,61,2,"[2308, 2309]"
Normal-2.zip,0,Normal,1740,1045,102,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
Normal-1.zip,0,Normal,1681,845,69,1,[845]
NCP-11.zip,2,NCP,310,1768,70,2,"[1767, 1768]"
NCP-22.zip,2,NCP,87,1307,145,2,"[1307, 1308]"
Normal-4.zip,0,Normal,786,221,124,1,[221]
Normal-20.zip,0,Normal,2270,725,86,1,[725]
NCP-18.zip,2,NCP,515,2187,58,2,"[2186, 2187]"
NCP-5.zip,2,NCP,172,1488,139,2,"[1488, 1489]"
NCP-20.zip,2,NCP,551,2260,65,2,"[2259, 2260]"
NCP-21.zip,2,NCP,61,1256,60,2,"[1255, 1256]"
CP-13.zip,1,CP,1508,4116,57,3,"[4115, 4116, 4117]"
NCP-22.zip,2,NCP,863,2386,228,2,"[2386, 2387]"
CP-10.zip,1,CP,1413,3889,67,2,"[3889, 3890]"
Normal-2.zip,0,Normal,1740,1047,60,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-19.zip,1,CP,1785,3188,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
CP-7.zip,1,CP,1303,3624,224,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-1.zip,0,Normal,1683,868,64,6,"[861, 862, 864, 865, 868, 869]"
CP-7.zip,1,CP,1303,3611,257,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3610,51,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-21.zip,2,NCP,61,1255,142,2,"[1255, 1256]"
Normal-1.zip,0,Normal,1683,865,72,6,"[861, 862, 864, 865, 868, 869]"
CP-7.zip,1,CP,1303,3630,49,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-10.zip,1,CP,1392,3844,62,2,"[3843, 3844]"
CP-15.zip,1,CP,1566,4253,54,2,"[4252, 4253]"
Normal-27.zip,0,Normal,3911,5447,65,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
Normal-27.zip,0,Normal,3911,5449,64,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
NCP-22.zip,2,NCP,87,1308,61,2,"[1307, 1308]"
Normal-1.zip,0,Normal,1683,861,65,6,"[861, 862, 864, 865, 868, 869]"
CP-7.zip,1,CP,1303,3613,232,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-20.zip,2,NCP,551,2259,154,2,"[2259, 2260]"
CP-19.zip,1,CP,1785,3191,79,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-18.zip,2,NCP,515,2186,139,2,"[2186, 2187]"
Normal-2.zip,0,Normal,1740,1048,60,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-7.zip,1,CP,1303,3626,51,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3606,49,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-19.zip,1,CP,2435,2902,100,3,"[2901, 2902, 2903]"
CP-7.zip,1,CP,1303,3612,49,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3619,213,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-5.zip,2,NCP,172,1489,59,2,"[1488, 1489]"
CP-7.zip,1,CP,1303,3617,27,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-10.zip,1,CP,1398,3857,44,2,"[3856, 3857]"
CP-7.zip,1,CP,1303,3608,55,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-28.zip,2,NCP,870,2399,247,2,"[2399, 2400]"
NCP-22.zip,2,NCP,883,2420,200,2,"[2419, 2420]"
CP-7.zip,1,CP,1303,3609,271,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-10.zip,1,CP,1387,3829,51,2,"[3829, 3830]"
NCP-8.zip,2,NCP,255,1657,58,2,"[1656, 1657]"
Normal-2.zip,0,Normal,1740,1051,59,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
Normal-2.zip,0,Normal,1751,1081,62,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-17.zip,2,NCP,469,2093,159,2,"[2093, 2094]"
CP-7.zip,1,CP,1303,3621,230,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3607,247,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-27.zip,0,Normal,3911,5452,65,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
NCP-17.zip,2,NCP,473,2101,145,2,"[2101, 2102]"
CP-19.zip,1,CP,2435,2901,104,3,"[2901, 2902, 2903]"
Normal-2.zip,0,Normal,1740,1049,21,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-19.zip,1,CP,1785,3189,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-10.zip,2,NCP,271,1689,61,2,"[1688, 1689]"
CP-7.zip,1,CP,1303,3629,244,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3631,242,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-27.zip,0,Normal,3911,5451,65,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
NCP-22.zip,2,NCP,822,2332,36,2,"[2332, 2333]"
CP-7.zip,1,CP,1303,3622,28,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-19.zip,1,CP,1785,3190,79,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-11.zip,2,NCP,310,1767,169,2,"[1767, 1768]"
Normal-2.zip,0,Normal,1751,1080,61,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-21.zip,2,NCP,575,2308,144,2,"[2308, 2309]"
CP-19.zip,1,CP,1785,3186,67,6,"[3186, 3187, 3188, 3189, 3190, 3191]"
NCP-21.zip,2,NCP,75,1283,128,2,"[1283, 1284]"
NCP-11.zip,2,NCP,286,1718,51,2,"[1717, 1718]"
CP-7.zip,1,CP,1303,3628,50,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-2.zip,0,Normal,1740,1046,300,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
CP-7.zip,1,CP,1303,3620,45,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3614,27,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-27.zip,0,Normal,3911,5450,68,6,"[5447, 5448, 5449, 5450, 5451, 5452]"
Normal-1.zip,0,Normal,1683,864,72,6,"[861, 862, 864, 865, 868, 869]"
NCP-20.zip,2,NCP,572,2302,138,2,"[2302, 2303]"
Normal-2.zip,0,Normal,1751,1084,67,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
CP-7.zip,1,CP,1303,3625,32,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
Normal-2.zip,0,Normal,1740,1052,59,8,"[1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052]"
NCP-10.zip,2,NCP,280,1706,121,2,"[1706, 1707]"
NCP-18.zip,2,NCP,498,2152,139,2,"[2152, 2153]"
CP-7.zip,1,CP,1303,3623,45,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3615,44,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
CP-7.zip,1,CP,1303,3616,209,26,"[3606, 3607, 3608, 3609, 3610, 3611, 3612, 3613, 3614, 3615, 3616, 3617, 3618, 3619, 3620, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3628, 3629, 3630, 3631]"
NCP-6.zip,2,NCP,211,1567,58,2,"[1566, 1567]"
Normal-2.zip,0,Normal,1751,1083,67,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
NCP-22.zip,2,NCP,860,2383,183,2,"[2382, 2383]"
NCP-20.zip,2,NCP,559,2276,54,2,"[2275, 2276]"
Normal-2.zip,0,Normal,1751,1082,62,6,"[1079, 1080, 1081, 1082, 1083, 1084]"
CP-13.zip,1,CP,1508,4115,57,3,"[4115, 4116, 4117]"
Normal-1.zip,0,Normal,1683,869,64,6,"[861, 862, 864, 865, 868, 869]"
NCP-7.zip,2,NCP,246,1638,139,2,"[1638, 1639]"
Normal-12.zip,0,Normal,2015,470,94,1,[470]
NCP-6.zip,2,NCP,206,1557,58,2,"[1556, 1557]"
CP-1.zip,1,CP,1096,3314,196,1,[3314]
NCP-16.zip,2,NCP,43,1220,65,2,"[1219, 1220]"
NCP-18.zip,2,NCP,499,2155,58,2,"[2154, 2155]"
CP-10.zip,1,CP,1409,3881,66,2,"[3881, 3882]"
Normal-4.zip,0,Normal,777,212,83,1,[212]
NCP-9.zip,2,NCP,2708,2701,59,1,[2701]
CP-11.zip,1,CP,1432,3933,60,2,"[3932, 3933]"
NCP-4.zip,2,NCP,141,1426,129,2,"[1426, 1427]"
CP-23.zip,1,CP,673,3035,76,1,[3035]
NCP-29.zip,2,NCP,879,2414,173,1,[2414]
NCP-19.zip,2,NCP,536,2229,145,2,"[2229, 2230]"
NCP-18.zip,2,NCP,504,2165,65,2,"[2164, 2165]"
Normal-1.zip,0,Normal,1678,829,34,6,"[827, 828, 829, 830, 831, 832]"
NCP-8.zip,2,NCP,264,1674,179,2,"[1674, 1675]"
NCP-4.zip,2,NCP,155,1454,139,2,"[1454, 1455]"
CP-11.zip,1,CP,1418,3900,180,3,"[3900, 3901, 3902]"
NCP-5.zip,2,NCP,194,1532,133,2,"[1532, 1533]"
NCP-13.zip,2,NCP,361,1873,143,2,"[1873, 1874]"
Normal-1.zip,0,Normal,1710,976,78,2,"[975, 976]"
Normal-15.zip,0,Normal,2091,546,106,1,[546]
NCP-19.zip,2,NCP,518,2192,135,2,"[2192, 2193]"
Normal-18.zip,0,Normal,2190,645,90,1,[645]
Normal-12.zip,0,Normal,2013,468,87,1,[468]
NCP-11.zip,2,NCP,302,1751,62,2,"[1750, 1751]"
Normal-15.zip,0,Normal,2109,564,103,1,[564]
NCP-8.zip,2,NCP,264,1675,75,2,"[1674, 1675]"
CP-23.zip,1,CP,653,3015,285,1,[3015]
NCP-7.zip,2,NCP,235,1615,139,2,"[1615, 1616]"
CP-19.zip,1,CP,1786,3194,77,3,"[3192, 3193, 3194]"
CP-1.zip,1,CP,0,3137,37,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-15.zip,2,NCP,423,1999,133,2,"[1999, 2000]"
CP-6.zip,1,CP,1232,3450,91,1,[3450]
CP-14.zip,1,CP,1526,4158,51,3,"[4157, 4158, 4159]"
CP-4.zip,1,CP,1184,3402,193,1,[3402]
NCP-17.zip,2,NCP,483,2122,56,2,"[2121, 2122]"
CP-12.zip,1,CP,1459,3996,69,3,"[3995, 3996, 3997]"
CP-17.zip,1,CP,1637,4325,20,1,[4325]
CP-10.zip,1,CP,1411,3885,66,2,"[3885, 3886]"
NCP-9.zip,2,NCP,2707,2673,44,1,[2673]
NCP-29.zip,2,NCP,892,2431,20,1,[2431]
CP-26.zip,1,CP,3720,5653,243,2,"[5652, 5653]"
Normal-13.zip,0,Normal,2023,478,96,1,[478]
CP-11.zip,1,CP,1439,3947,62,2,"[3946, 3947]"
Normal-6.zip,0,Normal,1801,256,89,1,[256]
NCP-16.zip,2,NCP,442,2038,131,2,"[2038, 2039]"
Normal-9.zip,0,Normal,1920,375,100,1,[375]
CP-13.zip,1,CP,1489,4067,457,4,"[4067, 4068, 4069, 4070]"
CP-9.zip,1,CP,1378,3811,50,2,"[3810, 3811]"
NCP-12.zip,2,NCP,336,1821,50,2,"[1820, 1821]"
NCP-3.zip,2,NCP,1295,2736,61,1,[2736]
Normal-20.zip,0,Normal,2268,723,85,1,[723]
Normal-20.zip,0,Normal,2281,736,84,1,[736]
CP-1.zip,1,CP,1083,3128,71,2,"[3128, 3129]"
CP-14.zip,1,CP,1545,4207,65,2,"[4206, 4207]"
Normal-21.zip,0,Normal,2306,761,103,1,[761]
NCP-13.zip,2,NCP,350,1852,47,2,"[1851, 1852]"
CP-8.zip,1,CP,1326,3688,53,2,"[3688, 3689]"
NCP-7.zip,2,NCP,236,1617,283,2,"[1617, 1618]"
Normal-1.zip,0,Normal,1722,1001,73,2,"[1001, 1002]"
NCP-5.zip,2,NCP,177,1498,139,2,"[1498, 1499]"
Normal-1.zip,0,Normal,1708,971,74,2,"[971, 972]"
NCP-8.zip,2,NCP,2680,2651,46,1,[2651]
NCP-20.zip,2,NCP,570,2298,139,2,"[2298, 2299]"
Normal-1.zip,0,Normal,1723,1004,77,2,"[1003, 1004]"
NCP-10.zip,2,NCP,2723,2679,40,1,[2679]
Normal-21.zip,0,Normal,2302,757,96,1,[757]
Normal-18.zip,0,Normal,2199,654,85,1,[654]
Normal-25.zip,0,Normal,3858,5370,234,1,[5370]
Normal-21.zip,0,Normal,2286,741,84,1,[741]
Normal-1.zip,0,Normal,1720,995,74,2,"[995, 996]"
Normal-3.zip,0,Normal,769,204,138,1,[204]
NCP-9.zip,2,NCP,2687,2654,51,1,[2654]
Normal-16.zip,0,Normal,2124,579,101,1,[579]
NCP-6.zip,2,NCP,206,1556,139,2,"[1556, 1557]"
Normal-20.zip,0,Normal,2256,711,86,1,[711]
CP-10.zip,1,CP,1411,3886,66,2,"[3885, 3886]"
CP-11.zip,1,CP,1418,3901,54,3,"[3900, 3901, 3902]"
NCP-4.zip,2,NCP,155,1455,58,2,"[1454, 1455]"
NCP-19.zip,2,NCP,536,2230,61,2,"[2229, 2230]"
CP-13.zip,1,CP,1489,4068,229,4,"[4067, 4068, 4069, 4070]"
Normal-1.zip,0,Normal,1722,1002,73,2,"[1001, 1002]"
CP-14.zip,1,CP,1526,4157,124,3,"[4157, 4158, 4159]"
CP-13.zip,1,CP,1489,4069,58,4,"[4067, 4068, 4069, 4070]"
CP-1.zip,1,CP,0,3134,37,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-5.zip,2,NCP,177,1499,58,2,"[1498, 1499]"
NCP-13.zip,2,NCP,350,1851,109,2,"[1851, 1852]"
Normal-1.zip,0,Normal,1678,827,58,6,"[827, 828, 829, 830, 831, 832]"
CP-1.zip,1,CP,1083,3129,71,2,"[3128, 3129]"
CP-1.zip,1,CP,0,3140,269,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-9.zip,1,CP,1378,3810,50,2,"[3810, 3811]"
CP-8.zip,1,CP,1326,3689,53,2,"[3688, 3689]"
CP-1.zip,1,CP,0,3133,290,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-4.zip,2,NCP,141,1427,54,2,"[1426, 1427]"
Normal-1.zip,0,Normal,1723,1003,77,2,"[1003, 1004]"
NCP-15.zip,2,NCP,423,2000,56,2,"[1999, 2000]"
NCP-11.zip,2,NCP,302,1750,152,2,"[1750, 1751]"
NCP-20.zip,2,NCP,570,2299,58,2,"[2298, 2299]"
CP-12.zip,1,CP,1459,3995,164,3,"[3995, 3996, 3997]"
NCP-16.zip,2,NCP,442,2039,53,2,"[2038, 2039]"
CP-1.zip,1,CP,0,3136,290,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-1.zip,1,CP,0,3135,269,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-14.zip,1,CP,1526,4159,51,3,"[4157, 4158, 4159]"
CP-1.zip,1,CP,0,3131,285,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
NCP-12.zip,2,NCP,336,1820,117,2,"[1820, 1821]"
NCP-7.zip,2,NCP,235,1616,58,2,"[1615, 1616]"
CP-11.zip,1,CP,1418,3902,54,3,"[3900, 3901, 3902]"
NCP-7.zip,2,NCP,236,1618,119,2,"[1617, 1618]"
CP-11.zip,1,CP,1439,3946,62,2,"[3946, 3947]"
CP-1.zip,1,CP,0,3139,39,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
CP-1.zip,1,CP,0,3132,42,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
Normal-1.zip,0,Normal,1708,972,74,2,"[971, 972]"
CP-12.zip,1,CP,1459,3997,69,3,"[3995, 3996, 3997]"
NCP-18.zip,2,NCP,504,2164,155,2,"[2164, 2165]"
Normal-1.zip,0,Normal,1720,996,74,2,"[995, 996]"
CP-19.zip,1,CP,1786,3192,81,3,"[3192, 3193, 3194]"
Normal-1.zip,0,Normal,1678,830,34,6,"[827, 828, 829, 830, 831, 832]"
CP-26.zip,1,CP,3720,5652,48,2,"[5652, 5653]"
CP-19.zip,1,CP,1786,3193,81,3,"[3192, 3193, 3194]"
CP-13.zip,1,CP,1489,4070,58,4,"[4067, 4068, 4069, 4070]"
NCP-13.zip,2,NCP,361,1874,60,2,"[1873, 1874]"
NCP-17.zip,2,NCP,483,2121,137,2,"[2121, 2122]"
Normal-1.zip,0,Normal,1678,832,62,6,"[827, 828, 829, 830, 831, 832]"
NCP-16.zip,2,NCP,43,1219,156,2,"[1219, 1220]"
NCP-18.zip,2,NCP,499,2154,139,2,"[2154, 2155]"
CP-10.zip,1,CP,1409,3882,66,2,"[3881, 3882]"
NCP-5.zip,2,NCP,194,1533,56,2,"[1532, 1533]"
NCP-19.zip,2,NCP,518,2193,57,2,"[2192, 2193]"
CP-11.zip,1,CP,1432,3932,60,2,"[3932, 3933]"
Normal-1.zip,0,Normal,1678,828,58,6,"[827, 828, 829, 830, 831, 832]"
Normal-1.zip,0,Normal,1678,831,62,6,"[827, 828, 829, 830, 831, 832]"
CP-1.zip,1,CP,0,3138,245,10,"[3131, 3132, 3133, 3134, 3135, 3136, 3137, 3138, 3139, 3140]"
Normal-1.zip,0,Normal,1710,975,78,2,"[975, 976]"
CP-14.zip,1,CP,1545,4206,65,2,"[4206, 4207]"
NCP-5.zip,2,NCP,18,1169,57,2,"[1168, 1169]"
Normal-15.zip,0,Normal,2096,551,93,1,[551]
CP-21.zip,1,CP,2776,3307,31,1,[3307]
NCP-16.zip,2,NCP,449,2053,61,2,"[2052, 2053]"
NCP-15.zip,2,NCP,404,1958,46,2,"[1957, 1958]"
NCP-6.zip,2,NCP,210,1565,55,2,"[1564, 1565]"
CP-3.zip,1,CP,1144,3362,159,1,[3362]
Normal-8.zip,0,Normal,1879,334,88,1,[334]
Normal-1.zip,0,Normal,1721,1000,75,4,"[1000, 997, 998, 999]"
NCP-21.zip,2,NCP,583,2323,147,2,"[2323, 2324]"
NCP-1.zip,2,NCP,1039,2610,45,1,[2610]
Normal-8.zip,0,Normal,1882,337,86,1,[337]
Normal-21.zip,0,Normal,2307,762,80,1,[762]
CP-14.zip,1,CP,1528,4163,61,2,"[4163, 4164]"
CP-11.zip,1,CP,1443,3958,58,3,"[3957, 3958, 3959]"
NCP-18.zip,2,NCP,496,2149,70,2,"[2148, 2149]"
CP-7.zip,1,CP,1270,3489,204,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-7.zip,0,Normal,1834,289,82,1,[289]
NCP-13.zip,2,NCP,351,1853,145,2,"[1853, 1854]"
CP-18.zip,1,CP,1782,3584,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-1.zip,0,Normal,1676,816,65,7,"[816, 817, 818, 819, 820, 821, 822]"
CP-11.zip,1,CP,1428,3923,221,3,"[3923, 3924, 3925]"
CP-8.zip,1,CP,1330,3699,58,3,"[3698, 3699, 3700]"
Normal-19.zip,0,Normal,2233,688,76,1,[688]
NCP-18.zip,2,NCP,514,2184,160,2,"[2184, 2185]"
Normal-6.zip,0,Normal,1804,259,102,1,[259]
Normal-22.zip,0,Normal,2598,108,38,1,[108]
CP-14.zip,1,CP,1534,4176,58,2,"[4176, 4177]"
CP-5.zip,1,CP,1217,3435,320,1,[3435]
NCP-14.zip,2,NCP,378,1908,168,2,"[1908, 1909]"
CP-18.zip,1,CP,1782,3582,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
NCP-25.zip,2,NCP,3963,5474,56,1,[5474]
NCP-22.zip,2,NCP,82,1298,55,2,"[1297, 1298]"
NCP-2.zip,2,NCP,1274,2715,55,1,[2715]
CP-22.zip,1,CP,619,2981,102,1,[2981]
Normal-24.zip,0,Normal,2661,171,31,1,[171]
CP-14.zip,1,CP,1540,4192,58,3,"[4191, 4192, 4193]"
NCP-10.zip,2,NCP,2724,2680,43,1,[2680]
Normal-2.zip,0,Normal,1742,1055,60,1,[1055]
CP-12.zip,1,CP,1486,4060,63,2,"[4059, 4060]"
NCP-19.zip,2,NCP,527,2211,48,2,"[2210, 2211]"
CP-10.zip,1,CP,1393,3846,60,2,"[3845, 3846]"
Normal-1.zip,0,Normal,1721,997,68,4,"[1000, 997, 998, 999]"
Normal-25.zip,0,Normal,3839,5351,220,1,[5351]
Normal-12.zip,0,Normal,1991,446,306,1,[446]
CP-19.zip,1,CP,1794,3595,38,2,"[3594, 3595]"
Normal-1.zip,0,Normal,1669,785,54,5,"[782, 783, 784, 785, 786]"
CP-18.zip,1,CP,1782,3580,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
Normal-11.zip,0,Normal,1963,418,95,1,[418]
CP-19.zip,1,CP,1789,3205,59,4,"[3204, 3205, 3206, 3207]"
CP-11.zip,1,CP,1428,3924,56,3,"[3923, 3924, 3925]"
Normal-9.zip,0,Normal,1918,373,85,1,[373]
CP-4.zip,1,CP,1176,3394,161,1,[3394]
CP-10.zip,1,CP,1397,3855,60,2,"[3854, 3855]"
Normal-16.zip,0,Normal,2118,573,89,1,[573]
CP-16.zip,1,CP,1594,4282,26,1,[4282]
NCP-4.zip,2,NCP,140,1424,128,2,"[1424, 1425]"
CP-1.zip,1,CP,1077,3121,74,2,"[3121, 3122]"
Normal-16.zip,0,Normal,2142,597,84,1,[597]
NCP-15.zip,2,NCP,410,1969,143,2,"[1969, 1970]"
Normal-3.zip,0,Normal,749,184,89,1,[184]
Normal-1.zip,0,Normal,1718,991,66,2,"[991, 992]"
NCP-5.zip,2,NCP,176,1497,53,2,"[1496, 1497]"
CP-29.zip,1,CP,3819,5763,31,1,[5763]
NCP-8.zip,2,NCP,265,1677,50,2,"[1676, 1677]"
CP-7.zip,1,CP,1270,3495,148,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-26.zip,2,NCP,3982,5489,34,1,[5489]
CP-12.zip,1,CP,1468,4016,54,3,"[4015, 4016, 4017]"
CP-3.zip,1,CP,1139,3357,332,1,[3357]
Normal-14.zip,0,Normal,2070,525,104,1,[525]
Normal-1.zip,0,Normal,1672,798,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
CP-11.zip,1,CP,1435,3939,46,2,"[3938, 3939]"
CP-30.zip,1,CP,4019,5568,38,1,[5568]
CP-18.zip,1,CP,1777,3540,67,5,"[3540, 3541, 3542, 3543, 3544]"
CP-23.zip,1,CP,666,3028,192,1,[3028]
Normal-1.zip,0,Normal,1703,959,70,2,"[959, 960]"
CP-3.zip,1,CP,1133,3351,213,1,[3351]
NCP-8.zip,2,NCP,2677,2695,51,1,[2695]
NCP-13.zip,2,NCP,357,1866,63,2,"[1865, 1866]"
NCP-13.zip,2,NCP,346,1843,139,2,"[1843, 1844]"
CP-13.zip,1,CP,1504,4107,64,1,[4107]
Normal-3.zip,0,Normal,745,180,105,1,[180]
Normal-1.zip,0,Normal,1676,820,72,7,"[816, 817, 818, 819, 820, 821, 822]"
Normal-26.zip,0,Normal,3869,5381,27,1,[5381]
CP-18.zip,1,CP,1774,3528,58,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-14.zip,2,NCP,379,1911,62,2,"[1910, 1911]"
Normal-21.zip,0,Normal,2301,756,88,1,[756]
NCP-1.zip,2,NCP,104,1345,139,2,"[1345, 1346]"
CP-18.zip,1,CP,1771,3519,51,4,"[3518, 3519, 3520, 3521]"
NCP-2.zip,2,NCP,116,1373,127,2,"[1373, 1374]"
CP-22.zip,1,CP,643,3005,126,1,[3005]
NCP-17.zip,2,NCP,466,2087,145,2,"[2087, 2088]"
CP-26.zip,1,CP,3723,5658,43,1,[5658]
CP-11.zip,1,CP,1443,3957,139,3,"[3957, 3958, 3959]"
Normal-8.zip,0,Normal,1884,339,82,1,[339]
CP-15.zip,1,CP,1586,4274,23,1,[4274]
CP-8.zip,1,CP,1349,3743,58,3,"[3742, 3743, 3744]"
Normal-22.zip,0,Normal,2586,96,30,1,[96]
Normal-4.zip,0,Normal,785,220,292,1,[220]
CP-19.zip,1,CP,2428,2887,124,1,[2887]
NCP-5.zip,2,NCP,181,1507,58,2,"[1506, 1507]"
NCP-13.zip,2,NCP,352,1856,58,2,"[1855, 1856]"
NCP-2.zip,2,NCP,109,1355,143,2,"[1355, 1356]"
CP-13.zip,1,CP,1493,4080,125,3,"[4080, 4081, 4082]"
CP-4.zip,1,CP,1191,3409,220,1,[3409]
CP-17.zip,1,CP,1642,4330,25,1,[4330]
CP-7.zip,1,CP,1304,3635,232,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-18.zip,2,NCP,496,2148,168,2,"[2148, 2149]"
NCP-27.zip,2,NCP,1058,2635,46,1,[2635]
Normal-14.zip,0,Normal,2071,526,103,1,[526]
CP-26.zip,1,CP,3719,5650,55,3,"[5649, 5650, 5651]"
Normal-24.zip,0,Normal,2663,173,48,1,[173]
NCP-3.zip,2,NCP,1298,2739,60,1,[2739]
CP-19.zip,1,CP,2430,2891,102,2,"[2891, 2892]"
CP-12.zip,1,CP,1458,3993,69,3,"[3992, 3993, 3994]"
Normal-1.zip,0,Normal,1677,823,64,4,"[823, 824, 825, 826]"
CP-12.zip,1,CP,1469,4018,47,2,"[4018, 4019]"
CP-7.zip,1,CP,1268,3486,336,1,[3486]
Normal-18.zip,0,Normal,2203,658,75,1,[658]
CP-21.zip,1,CP,593,2955,100,1,[2955]
Normal-16.zip,0,Normal,2143,598,87,1,[598]
NCP-20.zip,2,NCP,552,2261,146,2,"[2261, 2262]"
NCP-11.zip,2,NCP,309,1766,69,2,"[1766, 1765]"
NCP-19.zip,2,NCP,520,2197,55,2,"[2196, 2197]"
CP-14.zip,1,CP,1550,4217,64,2,"[4217, 4218]"
NCP-8.zip,2,NCP,265,1676,119,2,"[1676, 1677]"
Normal-1.zip,0,Normal,1669,782,62,5,"[782, 783, 784, 785, 786]"
NCP-26.zip,2,NCP,3976,5484,32,1,[5484]
NCP-31.zip,2,NCP,998,2555,44,1,[2555]
NCP-2.zip,2,NCP,107,1351,146,2,"[1351, 1352]"
Normal-16.zip,0,Normal,2136,591,83,1,[591]
CP-12.zip,1,CP,1463,4006,49,2,"[4005, 4006]"
NCP-4.zip,2,NCP,156,1457,58,2,"[1456, 1457]"
NCP-1.zip,2,NCP,1002,2561,58,1,[2561]
Normal-1.zip,0,Normal,1672,801,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
Normal-14.zip,0,Normal,2078,533,73,1,[533]
NCP-5.zip,2,NCP,185,1514,121,2,"[1514, 1515]"
CP-14.zip,1,CP,1530,4168,60,1,[4168]
NCP-15.zip,2,NCP,413,1976,128,4,"[1975, 1976, 1977, 1979]"
CP-5.zip,1,CP,1224,3442,204,1,[3442]
CP-5.zip,1,CP,1215,3433,165,1,[3433]
Normal-26.zip,0,Normal,3886,5399,76,1,[5399]
Normal-24.zip,0,Normal,2640,150,41,1,[150]
NCP-28.zip,2,NCP,836,2351,52,1,[2351]
NCP-4.zip,2,NCP,146,1436,123,2,"[1436, 1437]"
Normal-17.zip,0,Normal,2155,610,89,1,[610]
CP-30.zip,1,CP,3939,5547,38,1,[5547]
CP-19.zip,1,CP,1784,3590,112,4,"[3590, 3591, 3592, 3593]"
CP-10.zip,1,CP,1399,3859,45,2,"[3858, 3859]"
NCP-19.zip,2,NCP,519,2194,126,2,"[2194, 2195]"
NCP-11.zip,2,NCP,297,1739,144,2,"[1739, 1741]"
NCP-22.zip,2,NCP,88,1309,170,2,"[1309, 1310]"
CP-18.zip,1,CP,1778,3547,65,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
NCP-30.zip,2,NCP,968,2511,61,1,[2511]
CP-9.zip,1,CP,1360,3769,67,3,"[3767, 3768, 3769]"
CP-26.zip,1,CP,3638,5597,285,1,[5597]
NCP-13.zip,2,NCP,353,1857,167,2,"[1857, 1858]"
CP-30.zip,1,CP,3932,5634,71,2,"[5634, 5635]"
NCP-21.zip,2,NCP,62,1257,144,2,"[1257, 1258]"
CP-2.zip,1,CP,1127,3345,278,1,[3345]
NCP-12.zip,2,NCP,337,1823,58,2,"[1822, 1823]"
NCP-14.zip,2,NCP,390,1931,53,2,"[1930, 1931]"
NCP-15.zip,2,NCP,417,1988,58,2,"[1987, 1988]"
CP-24.zip,1,CP,689,3051,58,1,[3051]
CP-7.zip,1,CP,1270,3501,420,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-9.zip,1,CP,1377,3808,58,2,"[3808, 3809]"
CP-13.zip,1,CP,1505,4110,54,3,"[4108, 4109, 4110]"
CP-13.zip,1,CP,1492,4078,58,3,"[4077, 4078, 4079]"
NCP-4.zip,2,NCP,159,1463,61,2,"[1462, 1463]"
NCP-6.zip,2,NCP,220,1585,67,2,"[1584, 1585]"
NCP-29.zip,2,NCP,884,2421,23,1,[2421]
Normal-3.zip,0,Normal,757,192,110,1,[192]
CP-21.zip,1,CP,4,3505,298,4,"[3505, 3506, 3507, 3508]"
CP-16.zip,1,CP,1608,4296,23,1,[4296]
CP-4.zip,1,CP,1169,3387,171,1,[3387]
Normal-4.zip,0,Normal,797,232,112,1,[232]
NCP-19.zip,2,NCP,540,2238,54,2,"[2237, 2238]"
Normal-14.zip,0,Normal,2068,523,81,1,[523]
Normal-11.zip,0,Normal,1985,440,96,1,[440]
CP-9.zip,1,CP,1353,3748,140,3,"[3748, 3749, 3750]"
NCP-6.zip,2,NCP,224,1592,136,2,"[1592, 1593]"
CP-10.zip,1,CP,1397,3854,60,2,"[3854, 3855]"
NCP-12.zip,2,NCP,318,1784,63,2,"[1783, 1784]"
NCP-21.zip,2,NCP,59,1251,122,2,"[1251, 1252]"
Normal-17.zip,0,Normal,2184,639,86,1,[639]
NCP-18.zip,2,NCP,493,2143,56,2,"[2142, 2143]"
NCP-25.zip,2,NCP,3954,5467,42,1,[5467]
Normal-2.zip,0,Normal,1763,1137,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
CP-23.zip,1,CP,675,3037,124,1,[3037]
CP-9.zip,1,CP,1365,3780,60,3,"[3779, 3780, 3781]"
CP-6.zip,1,CP,1256,3474,140,1,[3474]
Normal-1.zip,0,Normal,1676,822,69,7,"[816, 817, 818, 819, 820, 821, 822]"
NCP-16.zip,2,NCP,441,2037,49,2,"[2036, 2037]"
NCP-7.zip,2,NCP,2484,2643,46,1,[2643]
CP-20.zip,1,CP,2771,3302,37,1,[3302]
NCP-10.zip,2,NCP,2714,2707,53,1,[2707]
Normal-4.zip,0,Normal,772,207,363,1,[207]
NCP-16.zip,2,NCP,440,2035,53,2,"[2034, 2035]"
CP-17.zip,1,CP,1646,4334,26,1,[4334]
NCP-11.zip,2,NCP,284,1713,139,2,"[1713, 1714]"
CP-23.zip,1,CP,656,3018,575,1,[3018]
CP-2.zip,1,CP,1104,3322,164,1,[3322]
NCP-22.zip,2,NCP,85,1303,139,2,"[1303, 1304]"
CP-30.zip,1,CP,3933,5637,38,2,"[5636, 5637]"
Normal-7.zip,0,Normal,1839,294,94,1,[294]
NCP-6.zip,2,NCP,223,1590,132,2,"[1590, 1591]"
CP-2.zip,1,CP,1119,3337,157,1,[3337]
CP-11.zip,1,CP,1431,3931,61,2,"[3930, 3931]"
CP-7.zip,1,CP,1304,3634,47,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-11.zip,2,NCP,299,1745,58,2,"[1744, 1745]"
NCP-15.zip,2,NCP,405,1960,60,2,"[1959, 1960]"
NCP-20.zip,2,NCP,574,2307,58,2,"[2306, 2307]"
CP-10.zip,1,CP,1412,3887,66,2,"[3887, 3888]"
NCP-4.zip,2,NCP,167,1479,60,2,"[1478, 1479]"
NCP-4.zip,2,NCP,157,1459,49,2,"[1458, 1459]"
NCP-13.zip,2,NCP,349,1849,135,2,"[1849, 1850]"
CP-18.zip,1,CP,1771,3520,51,4,"[3518, 3519, 3520, 3521]"
NCP-14.zip,2,NCP,372,1895,109,2,"[1895, 1896]"
NCP-18.zip,2,NCP,503,2162,146,2,"[2162, 2163]"
NCP-6.zip,2,NCP,199,1543,58,2,"[1542, 1543]"
CP-18.zip,1,CP,1662,4350,19,1,[4350]
CP-9.zip,1,CP,1377,3809,57,2,"[3808, 3809]"
Normal-1.zip,0,Normal,1727,1009,63,4,"[1009, 1010, 1011, 1012]"
NCP-20.zip,2,NCP,566,2290,160,2,"[2290, 2291]"
NCP-21.zip,2,NCP,583,2324,62,2,"[2323, 2324]"
CP-29.zip,1,CP,3821,5765,29,1,[5765]
NCP-5.zip,2,NCP,190,1525,64,2,"[1524, 1525]"
Normal-2.zip,0,Normal,1746,1064,68,2,"[1063, 1064]"
CP-27.zip,1,CP,3744,5688,17,1,[5688]
CP-2.zip,1,CP,1111,3329,204,1,[3329]
Normal-10.zip,0,Normal,1948,403,98,1,[403]
NCP-12.zip,2,NCP,338,1824,150,2,"[1824, 1825]"
NCP-13.zip,2,NCP,348,1847,112,2,"[1847, 1848]"
NCP-19.zip,2,NCP,527,2210,114,2,"[2210, 2211]"
CP-24.zip,1,CP,700,3062,86,1,[3062]
NCP-15.zip,2,NCP,404,1957,108,2,"[1957, 1958]"
CP-18.zip,1,CP,1655,4343,23,1,[4343]
CP-27.zip,1,CP,3736,5680,16,1,[5680]
Normal-24.zip,0,Normal,2654,164,31,1,[164]
NCP-13.zip,2,NCP,359,1869,145,2,"[1869, 1870]"
NCP-16.zip,2,NCP,437,2027,142,2,"[2027, 2028]"
CP-27.zip,1,CP,3741,5685,17,1,[5685]
CP-24.zip,1,CP,693,3055,273,1,[3055]
NCP-17.zip,2,NCP,466,2088,61,2,"[2087, 2088]"
CP-24.zip,1,CP,682,3044,149,1,[3044]
Normal-17.zip,0,Normal,2175,630,80,1,[630]
NCP-6.zip,2,NCP,223,1591,56,2,"[1590, 1591]"
NCP-2.zip,2,NCP,1051,2626,178,2,"[2625, 2626]"
CP-11.zip,1,CP,1454,3982,125,3,"[3982, 3983, 3984]"
Normal-20.zip,0,Normal,2253,708,70,1,[708]
NCP-4.zip,2,NCP,140,1425,54,2,"[1424, 1425]"
Normal-20.zip,0,Normal,2252,707,84,1,[707]
Normal-21.zip,0,Normal,2308,763,85,1,[763]
NCP-18.zip,2,NCP,516,2189,57,2,"[2188, 2189]"
NCP-12.zip,2,NCP,313,1774,62,2,"[1773, 1774]"
CP-2.zip,1,CP,1126,3344,204,1,[3344]
Normal-20.zip,0,Normal,2257,712,83,1,[712]
NCP-6.zip,2,NCP,203,1551,59,2,"[1550, 1551]"
CP-13.zip,1,CP,1503,4106,64,3,"[4104, 4105, 4106]"
Normal-20.zip,0,Normal,2280,735,82,1,[735]
CP-19.zip,1,CP,2443,2915,112,3,"[2915, 2916, 2917]"
CP-20.zip,1,CP,2451,2930,136,1,[2930]
CP-1.zip,1,CP,1093,3311,173,1,[3311]
CP-13.zip,1,CP,1518,4138,160,3,"[4138, 4139, 4140]"
CP-20.zip,1,CP,2773,3304,30,1,[3304]
NCP-15.zip,2,NCP,414,1981,51,2,"[1980, 1981]"
NCP-23.zip,2,NCP,96,1328,145,2,"[1328, 1329]"
CP-11.zip,1,CP,1422,3909,59,3,"[3908, 3909, 3910]"
Normal-20.zip,0,Normal,2258,713,74,1,[713]
NCP-29.zip,2,NCP,882,2417,52,2,"[2417, 2418]"
Normal-2.zip,0,Normal,1737,1038,79,4,"[1037, 1038, 1039, 1040]"
Normal-13.zip,0,Normal,2025,480,101,1,[480]
NCP-5.zip,2,NCP,173,1490,139,2,"[1490, 1491]"
CP-6.zip,1,CP,1257,3475,155,1,[3475]
NCP-23.zip,2,NCP,952,2495,379,1,[2495]
Normal-1.zip,0,Normal,1700,954,64,2,"[953, 954]"
NCP-17.zip,2,NCP,465,2085,31,3,"[2084, 2085, 2086]"
Normal-16.zip,0,Normal,2122,577,85,1,[577]
CP-13.zip,1,CP,1502,4102,73,2,"[4102, 4103]"
Normal-17.zip,0,Normal,2153,608,82,1,[608]
Normal-24.zip,0,Normal,2650,160,40,1,[160]
NCP-27.zip,2,NCP,1031,2602,231,2,"[2601, 2602]"
NCP-14.zip,2,NCP,393,1937,62,2,"[1936, 1937]"
CP-5.zip,1,CP,12,3169,233,2,"[3168, 3169]"
NCP-13.zip,2,NCP,346,1844,58,2,"[1843, 1844]"
Normal-11.zip,0,Normal,1986,441,88,1,[441]
CP-19.zip,1,CP,2433,2897,108,1,[2897]
NCP-4.zip,2,NCP,151,1447,54,2,"[1446, 1447]"
NCP-13.zip,2,NCP,370,1891,128,2,"[1891, 1892]"
Normal-17.zip,0,Normal,2168,623,89,1,[623]
NCP-29.zip,2,NCP,880,2415,312,1,[2415]
NCP-12.zip,2,NCP,338,1825,63,2,"[1824, 1825]"
Normal-23.zip,0,Normal,2634,144,37,1,[144]
NCP-14.zip,2,NCP,396,1942,170,2,"[1942, 1943]"
NCP-16.zip,2,NCP,439,2032,162,2,"[2032, 2033]"
NCP-8.zip,2,NCP,266,1678,137,2,"[1678, 1679]"
CP-11.zip,1,CP,1423,3911,204,3,"[3911, 3912, 3913]"
CP-11.zip,1,CP,1454,3984,53,3,"[3982, 3983, 3984]"
CP-28.zip,1,CP,3792,5736,20,1,[5736]
CP-7.zip,1,CP,1270,3494,129,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
Normal-1.zip,0,Normal,1727,1011,66,4,"[1009, 1010, 1011, 1012]"
Normal-19.zip,0,Normal,2234,689,89,1,[689]
NCP-13.zip,2,NCP,35,1203,58,2,"[1202, 1203]"
NCP-18.zip,2,NCP,51,1236,59,2,"[1235, 1236]"
NCP-2.zip,2,NCP,113,1368,58,2,"[1367, 1368]"
Normal-2.zip,0,Normal,1757,1107,68,4,"[1105, 1106, 1107, 1108]"
NCP-12.zip,2,NCP,319,1785,158,2,"[1785, 1787]"
Normal-22.zip,0,Normal,2322,777,88,1,[777]
CP-21.zip,1,CP,584,2946,116,1,[2946]
CP-9.zip,1,CP,1365,3781,60,3,"[3779, 3780, 3781]"
NCP-12.zip,2,NCP,322,1792,120,2,"[1792, 1793]"
Normal-2.zip,0,Normal,1763,1140,75,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-21.zip,2,NCP,59,1252,52,2,"[1251, 1252]"
NCP-5.zip,2,NCP,170,1485,59,2,"[1484, 1485]"
NCP-21.zip,2,NCP,72,1276,129,2,"[1276, 1277]"
NCP-22.zip,2,NCP,887,2425,38,1,[2425]
CP-2.zip,1,CP,1117,3335,155,1,[3335]
Normal-2.zip,0,Normal,1763,1134,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
CP-18.zip,1,CP,1778,3550,64,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
CP-23.zip,1,CP,664,3026,78,1,[3026]
CP-23.zip,1,CP,668,3030,102,1,[3030]
NCP-13.zip,2,NCP,355,1862,53,2,"[1861, 1862]"
NCP-13.zip,2,NCP,358,1867,160,2,"[1867, 1868]"
CP-14.zip,1,CP,1550,4218,64,2,"[4217, 4218]"
CP-26.zip,1,CP,3729,5667,207,3,"[5665, 5666, 5667]"
CP-21.zip,1,CP,603,2965,88,1,[2965]
NCP-13.zip,2,NCP,370,1892,54,2,"[1891, 1892]"
NCP-13.zip,2,NCP,35,1202,139,2,"[1202, 1203]"
CP-3.zip,1,CP,1155,3373,171,1,[3373]
Normal-10.zip,0,Normal,1927,382,99,1,[382]
CP-15.zip,1,CP,1574,4262,26,1,[4262]
CP-13.zip,1,CP,1498,4096,60,2,"[4095, 4096]"
NCP-6.zip,2,NCP,205,1555,53,2,"[1554, 1555]"
NCP-11.zip,2,NCP,301,1748,147,2,"[1748, 1749]"
NCP-11.zip,2,NCP,303,1752,139,2,"[1752, 1753]"
CP-12.zip,1,CP,1468,4017,54,3,"[4015, 4016, 4017]"
Normal-14.zip,0,Normal,2081,536,93,1,[536]
Normal-2.zip,0,Normal,1763,1141,75,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-22.zip,2,NCP,859,2380,299,2,"[2380, 2381]"
Normal-26.zip,0,Normal,3885,5398,63,1,[5398]
CP-13.zip,1,CP,1505,4109,54,3,"[4108, 4109, 4110]"
NCP-1.zip,2,NCP,103,1343,150,2,"[1343, 1344]"
NCP-14.zip,2,NCP,396,1943,71,2,"[1942, 1943]"
NCP-22.zip,2,NCP,871,2402,293,2,"[2401, 2402]"
Normal-10.zip,0,Normal,1951,406,105,1,[406]
CP-11.zip,1,CP,1434,3936,63,2,"[3936, 3937]"
CP-26.zip,1,CP,3724,5659,51,1,[5659]
CP-12.zip,1,CP,1471,4022,56,2,"[4022, 4023]"
Normal-21.zip,0,Normal,2304,759,110,1,[759]
CP-28.zip,1,CP,3777,5721,26,1,[5721]
NCP-28.zip,2,NCP,837,2352,57,1,[2352]
Normal-2.zip,0,Normal,1763,1133,72,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
Normal-8.zip,0,Normal,1873,328,104,1,[328]
CP-12.zip,1,CP,1458,3992,165,3,"[3992, 3993, 3994]"
NCP-7.zip,2,NCP,230,1604,139,2,"[1604, 1605]"
CP-30.zip,1,CP,4042,5591,37,1,[5591]
Normal-4.zip,0,Normal,774,209,134,1,[209]
Normal-19.zip,0,Normal,2228,683,85,1,[683]
Normal-18.zip,0,Normal,2206,661,77,1,[661]
CP-17.zip,1,CP,1628,4316,23,1,[4316]
Normal-11.zip,0,Normal,1969,424,90,1,[424]
Normal-20.zip,0,Normal,2259,714,97,1,[714]
CP-17.zip,1,CP,1640,4328,25,1,[4328]
NCP-8.zip,2,NCP,254,1654,139,2,"[1654, 1655]"
Normal-16.zip,0,Normal,2140,595,88,1,[595]
CP-6.zip,1,CP,1249,3467,144,1,[3467]
NCP-23.zip,2,NCP,92,1321,37,2,"[1320, 1321]"
CP-18.zip,1,CP,1657,4345,24,1,[4345]
NCP-17.zip,2,NCP,484,2124,58,2,"[2123, 2124]"
Normal-2.zip,0,Normal,1743,1057,73,2,"[1056, 1057]"
CP-18.zip,1,CP,1778,3545,66,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
NCP-30.zip,2,NCP,966,2509,279,1,[2509]
CP-9.zip,1,CP,1376,3807,60,2,"[3806, 3807]"
Normal-1.zip,0,Normal,1716,987,71,2,"[987, 988]"
CP-7.zip,1,CP,1302,3602,42,4,"[3602, 3603, 3604, 3605]"
NCP-18.zip,2,NCP,50,1233,141,2,"[1233, 1234]"
CP-32.zip,1,CP,1781,3572,65,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-7.zip,1,CP,1270,3497,133,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-5.zip,2,NCP,192,1528,135,2,"[1528, 1529]"
NCP-7.zip,2,NCP,2489,2646,40,1,[2646]
CP-11.zip,1,CP,1434,3937,63,2,"[3936, 3937]"
CP-23.zip,1,CP,645,3007,124,1,[3007]
Normal-10.zip,0,Normal,1941,396,91,1,[396]
Normal-12.zip,0,Normal,2001,456,86,1,[456]
Normal-3.zip,0,Normal,761,196,120,1,[196]
CP-18.zip,1,CP,1782,3579,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
CP-7.zip,1,CP,1265,3483,166,1,[3483]
NCP-3.zip,2,NCP,1287,2728,66,1,[2728]
NCP-28.zip,2,NCP,835,2350,52,2,"[2349, 2350]"
NCP-19.zip,2,NCP,543,2243,128,2,"[2243, 2244]"
CP-21.zip,1,CP,4,3507,259,4,"[3505, 3506, 3507, 3508]"
CP-17.zip,1,CP,1633,4321,26,1,[4321]
NCP-20.zip,2,NCP,565,2289,57,2,"[2288, 2289]"
NCP-22.zip,2,NCP,878,2412,46,2,"[2412, 2413]"
CP-14.zip,1,CP,1520,4144,57,3,"[4143, 4144, 4145]"
Normal-23.zip,0,Normal,2620,130,36,1,[130]
NCP-23.zip,2,NCP,958,2501,133,1,[2501]
CP-13.zip,1,CP,1513,4128,60,2,"[4127, 4128]"
NCP-24.zip,2,NCP,98,1332,139,2,"[1332, 1333]"
CP-9.zip,1,CP,1375,3804,60,2,"[3804, 3805]"
NCP-2.zip,2,NCP,1051,2625,88,2,"[2625, 2626]"
NCP-31.zip,2,NCP,999,2556,41,1,[2556]
CP-18.zip,1,CP,1781,3575,78,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-10.zip,2,NCP,278,1703,57,2,"[1702, 1703]"
NCP-12.zip,2,NCP,313,1773,147,2,"[1773, 1774]"
NCP-14.zip,2,NCP,381,1915,60,2,"[1914, 1915]"
NCP-11.zip,2,NCP,295,1735,236,2,"[1735, 1736]"
CP-11.zip,1,CP,1440,3948,196,3,"[3948, 3949, 3950]"
CP-19.zip,1,CP,1795,3597,41,2,"[3596, 3597]"
CP-12.zip,1,CP,1467,4013,60,2,"[4013, 4014]"
NCP-12.zip,2,NCP,322,1793,51,2,"[1792, 1793]"
CP-9.zip,1,CP,1353,3750,59,3,"[3748, 3749, 3750]"
CP-19.zip,1,CP,1784,3591,50,4,"[3590, 3591, 3592, 3593]"
NCP-9.zip,2,NCP,2699,2665,51,1,[2665]
NCP-12.zip,2,NCP,331,1810,158,2,"[1810, 1811]"
NCP-12.zip,2,NCP,334,1817,59,2,"[1816, 1817]"
NCP-1.zip,2,NCP,1009,2571,29,2,"[2570, 2571]"
CP-30.zip,1,CP,4041,5590,31,1,[5590]
CP-24.zip,1,CP,705,3067,168,1,[3067]
Normal-24.zip,0,Normal,2665,175,33,1,[175]
NCP-12.zip,2,NCP,332,1813,70,2,"[1812, 1813]"
CP-11.zip,1,CP,1444,3962,58,3,"[3960, 3961, 3962]"
CP-22.zip,1,CP,614,2976,100,1,[2976]
Normal-23.zip,0,Normal,2630,140,38,1,[140]
Normal-8.zip,0,Normal,1876,331,97,1,[331]
NCP-1.zip,2,NCP,1001,2559,141,1,[2559]
NCP-22.zip,2,NCP,845,2361,148,4,"[2360, 2361, 2362, 2363]"
Normal-1.zip,0,Normal,1676,818,65,7,"[816, 817, 818, 819, 820, 821, 822]"
CP-26.zip,1,CP,3646,5606,36,1,[5606]
Normal-9.zip,0,Normal,1907,362,92,1,[362]
Normal-1.zip,0,Normal,1672,800,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
NCP-12.zip,2,NCP,333,1815,68,2,"[1814, 1815]"
CP-11.zip,1,CP,1428,3925,56,3,"[3923, 3924, 3925]"
CP-17.zip,1,CP,1634,4322,23,1,[4322]
Normal-12.zip,0,Normal,2009,464,93,1,[464]
CP-7.zip,1,CP,1270,3488,287,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-26.zip,1,CP,3731,5670,215,1,[5670]
Normal-25.zip,0,Normal,3714,5344,22,1,[5344]
Normal-19.zip,0,Normal,2231,686,85,1,[686]
CP-7.zip,1,CP,1270,3500,160,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-23.zip,2,NCP,940,2483,22,1,[2483]
Normal-25.zip,0,Normal,3851,5363,201,1,[5363]
NCP-6.zip,2,NCP,209,1562,139,2,"[1562, 1563]"
NCP-13.zip,2,NCP,347,1846,53,2,"[1845, 1846]"
NCP-11.zip,2,NCP,312,1772,62,2,"[1771, 1772]"
CP-5.zip,1,CP,1196,3414,186,1,[3414]
NCP-21.zip,2,NCP,74,1282,54,2,"[1281, 1282]"
CP-23.zip,1,CP,662,3024,114,1,[3024]
NCP-7.zip,2,NCP,23,1177,151,2,"[1177, 1178]"
CP-16.zip,1,CP,1591,4279,23,1,[4279]
Normal-12.zip,0,Normal,1995,450,95,1,[450]
Normal-20.zip,0,Normal,2264,719,82,1,[719]
NCP-30.zip,2,NCP,948,2491,365,1,[2491]
Normal-12.zip,0,Normal,1998,453,99,1,[453]
NCP-19.zip,2,NCP,522,2201,58,2,"[2200, 2201]"
CP-13.zip,1,CP,1510,4121,60,2,"[4121, 4122]"
NCP-15.zip,2,NCP,406,1962,61,2,"[1961, 1962]"
NCP-4.zip,2,NCP,162,1468,148,2,"[1468, 1469]"
CP-11.zip,1,CP,1431,3930,61,2,"[3930, 3931]"
CP-15.zip,1,CP,1569,4257,20,1,[4257]
CP-9.zip,1,CP,1379,3813,52,2,"[3812, 3813]"
NCP-30.zip,2,NCP,981,2525,40,2,"[2525, 2526]"
NCP-8.zip,2,NCP,2679,2650,42,1,[2650]
NCP-25.zip,2,NCP,3951,5465,43,1,[5465]
NCP-7.zip,2,NCP,2460,2684,36,1,[2684]
CP-25.zip,1,CP,734,3096,106,1,[3096]
NCP-6.zip,2,NCP,209,1563,58,2,"[1562, 1563]"
Normal-22.zip,0,Normal,2593,103,38,1,[103]
NCP-16.zip,2,NCP,438,2029,149,2,"[2029, 2030]"
CP-7.zip,1,CP,1304,3638,43,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
Normal-8.zip,0,Normal,1885,340,101,1,[340]
NCP-17.zip,2,NCP,484,2123,137,2,"[2123, 2124]"
NCP-20.zip,2,NCP,565,2288,135,2,"[2288, 2289]"
NCP-5.zip,2,NCP,185,1515,51,2,"[1514, 1515]"
NCP-29.zip,2,NCP,877,2411,65,1,[2411]
NCP-6.zip,2,NCP,216,1577,58,2,"[1576, 1577]"
Normal-24.zip,0,Normal,2658,168,37,1,[168]
CP-28.zip,1,CP,3779,5723,26,1,[5723]
Normal-15.zip,0,Normal,2090,545,83,1,[545]
Normal-2.zip,0,Normal,1750,1077,69,3,"[1074, 1077, 1078]"
NCP-24.zip,2,NCP,98,1333,58,2,"[1332, 1333]"
CP-5.zip,1,CP,1199,3417,180,1,[3417]
CP-3.zip,1,CP,1146,3364,161,1,[3364]
CP-11.zip,1,CP,1449,3971,50,2,"[3971, 3972]"
Normal-3.zip,0,Normal,1767,1154,66,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-22.zip,0,Normal,2585,95,41,1,[95]
CP-29.zip,1,CP,3816,5760,29,1,[5760]
NCP-21.zip,2,NCP,62,1258,60,2,"[1257, 1258]"
NCP-2.zip,2,NCP,1056,2632,473,1,[2632]
NCP-19.zip,2,NCP,525,2206,144,2,"[2206, 2207]"
Normal-22.zip,0,Normal,2600,110,41,1,[110]
CP-3.zip,1,CP,1161,3379,310,1,[3379]
NCP-12.zip,2,NCP,316,1779,139,2,"[1779, 1780]"
NCP-28.zip,2,NCP,868,2396,200,2,"[2395, 2396]"
CP-7.zip,1,CP,1301,3600,52,4,"[3598, 3599, 3600, 3601]"
NCP-11.zip,2,NCP,301,1749,62,2,"[1748, 1749]"
Normal-9.zip,0,Normal,1917,372,96,1,[372]
NCP-20.zip,2,NCP,571,2300,163,2,"[2300, 2301]"
Normal-3.zip,0,Normal,1767,1152,68,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-1.zip,0,Normal,1716,988,71,2,"[987, 988]"
NCP-28.zip,2,NCP,842,2357,42,1,[2357]
NCP-27.zip,2,NCP,309,1765,162,2,"[1766, 1765]"
CP-12.zip,1,CP,1479,4040,60,3,"[4039, 4040, 4041]"
NCP-6.zip,2,NCP,22,1175,163,2,"[1175, 1176]"
NCP-28.zip,2,NCP,868,2395,51,2,"[2395, 2396]"
CP-14.zip,1,CP,1532,4171,50,2,"[4171, 4172]"
Normal-11.zip,0,Normal,1984,439,86,1,[439]
Normal-24.zip,0,Normal,2643,153,39,1,[153]
CP-20.zip,1,CP,2765,3296,42,1,[3296]
Normal-2.zip,0,Normal,1763,1132,72,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-2.zip,2,NCP,109,1356,60,2,"[1355, 1356]"
NCP-7.zip,2,NCP,241,1628,55,2,"[1627, 1628]"
Normal-22.zip,0,Normal,2587,97,44,1,[97]
CP-20.zip,1,CP,2753,3284,37,1,[3284]
Normal-1.zip,0,Normal,1670,790,63,6,"[787, 788, 789, 790, 791, 792]"
Normal-15.zip,0,Normal,2103,558,88,1,[558]
CP-13.zip,1,CP,1503,4104,64,3,"[4104, 4105, 4106]"
Normal-21.zip,0,Normal,2313,768,94,1,[768]
CP-9.zip,1,CP,1382,3818,200,3,"[3818, 3819, 3820]"
Normal-2.zip,0,Normal,1756,1102,64,4,"[1101, 1102, 1103, 1104]"
NCP-12.zip,2,NCP,334,1816,140,2,"[1816, 1817]"
CP-13.zip,1,CP,1518,4140,67,3,"[4138, 4139, 4140]"
CP-13.zip,1,CP,1492,4077,139,3,"[4077, 4078, 4079]"
Normal-11.zip,0,Normal,1982,437,99,1,[437]
NCP-6.zip,2,NCP,213,1570,159,2,"[1570, 1571]"
CP-18.zip,1,CP,1779,3551,59,2,"[3551, 3552]"
NCP-12.zip,2,NCP,321,1790,122,2,"[1790, 1791]"
NCP-4.zip,2,NCP,159,1462,144,2,"[1462, 1463]"
CP-24.zip,1,CP,684,3046,161,1,[3046]
CP-29.zip,1,CP,3828,5772,26,1,[5772]
Normal-1.zip,0,Normal,1669,784,196,5,"[782, 783, 784, 785, 786]"
CP-12.zip,1,CP,1462,4004,51,3,"[4002, 4003, 4004]"
Normal-1.zip,0,Normal,1707,969,65,2,"[969, 970]"
CP-24.zip,1,CP,685,3047,168,1,[3047]
NCP-16.zip,2,NCP,444,2043,61,2,"[2042, 2043]"
CP-19.zip,1,CP,2430,2892,106,2,"[2891, 2892]"
Normal-25.zip,0,Normal,3857,5369,222,1,[5369]
CP-28.zip,1,CP,3774,5718,20,1,[5718]
CP-21.zip,1,CP,591,2953,124,1,[2953]
Normal-1.zip,0,Normal,1669,783,62,5,"[782, 783, 784, 785, 786]"
Normal-1.zip,0,Normal,1670,792,66,6,"[787, 788, 789, 790, 791, 792]"
NCP-14.zip,2,NCP,387,1925,54,2,"[1924, 1925]"
CP-10.zip,1,CP,14,3515,115,1,[3515]
NCP-4.zip,2,NCP,16,1164,113,2,"[1164, 1165]"
Normal-17.zip,0,Normal,2162,617,96,1,[617]
CP-13.zip,1,CP,1513,4127,60,2,"[4127, 4128]"
NCP-11.zip,2,NCP,300,1746,139,2,"[1746, 1747]"
NCP-21.zip,2,NCP,577,2312,61,2,"[2311, 2312]"
Normal-8.zip,0,Normal,1875,330,93,1,[330]
Normal-27.zip,0,Normal,3906,5439,62,1,[5439]
NCP-7.zip,2,NCP,249,1645,58,2,"[1644, 1645]"
NCP-20.zip,2,NCP,552,2262,61,2,"[2261, 2262]"
NCP-9.zip,2,NCP,2701,2667,56,1,[2667]
NCP-15.zip,2,NCP,417,1987,139,2,"[1987, 1988]"
CP-18.zip,1,CP,1782,3586,69,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
NCP-9.zip,2,NCP,2705,2671,56,1,[2671]
Normal-3.zip,0,Normal,1767,1160,71,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-15.zip,1,CP,1585,4273,23,1,[4273]
CP-27.zip,1,CP,3742,5686,17,1,[5686]
CP-14.zip,1,CP,1521,4146,57,2,"[4146, 4147]"
Normal-1.zip,0,Normal,1703,960,70,2,"[959, 960]"
CP-21.zip,1,CP,6,3510,36,1,[3510]
NCP-19.zip,2,NCP,54,1242,62,2,"[1241, 1242]"
NCP-5.zip,2,NCP,17,1166,143,2,"[1166, 1167]"
NCP-15.zip,2,NCP,413,1977,47,4,"[1975, 1976, 1977, 1979]"
NCP-22.zip,2,NCP,845,2360,53,4,"[2360, 2361, 2362, 2363]"
NCP-2.zip,2,NCP,120,1381,139,2,"[1381, 1382]"
CP-5.zip,1,CP,1207,3425,189,1,[3425]
CP-27.zip,1,CP,3758,5702,23,1,[5702]
CP-16.zip,1,CP,1592,4280,25,1,[4280]
CP-21.zip,1,CP,4,3506,275,4,"[3505, 3506, 3507, 3508]"
NCP-21.zip,2,NCP,72,1277,55,2,"[1276, 1277]"
NCP-17.zip,2,NCP,475,2105,156,2,"[2105, 2106]"
NCP-13.zip,2,NCP,358,1868,67,2,"[1867, 1868]"
Normal-3.zip,0,Normal,764,199,130,1,[199]
Normal-1.zip,0,Normal,1721,998,68,4,"[1000, 997, 998, 999]"
CP-9.zip,1,CP,1358,3763,63,3,"[3761, 3762, 3763]"
Normal-1.zip,0,Normal,1676,817,65,7,"[816, 817, 818, 819, 820, 821, 822]"
NCP-4.zip,2,NCP,169,1483,56,2,"[1482, 1483]"
Normal-1.zip,0,Normal,1707,970,65,2,"[969, 970]"
NCP-18.zip,2,NCP,502,2160,140,2,"[2160, 2161]"
CP-18.zip,1,CP,1781,3568,67,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-10.zip,2,NCP,2727,2683,44,1,[2683]
CP-26.zip,1,CP,3719,5651,277,3,"[5649, 5650, 5651]"
CP-11.zip,1,CP,1422,3910,58,3,"[3908, 3909, 3910]"
NCP-4.zip,2,NCP,168,1480,139,2,"[1480, 1481]"
CP-8.zip,1,CP,1329,3695,89,3,"[3695, 3696, 3697]"
CP-12.zip,1,CP,1463,4005,49,2,"[4005, 4006]"
Normal-27.zip,0,Normal,3915,5458,70,1,[5458]
Normal-18.zip,0,Normal,2209,664,82,1,[664]
CP-13.zip,1,CP,1492,4079,58,3,"[4077, 4078, 4079]"
CP-30.zip,1,CP,3830,5774,29,1,[5774]
CP-8.zip,1,CP,1329,3696,45,3,"[3695, 3696, 3697]"
Normal-16.zip,0,Normal,2139,594,87,1,[594]
NCP-14.zip,2,NCP,393,1936,149,2,"[1936, 1937]"
CP-21.zip,1,CP,4,3508,290,4,"[3505, 3506, 3507, 3508]"
Normal-2.zip,0,Normal,1737,1037,79,4,"[1037, 1038, 1039, 1040]"
NCP-25.zip,2,NCP,3708,5535,59,1,[5535]
CP-14.zip,1,CP,1540,4193,58,3,"[4191, 4192, 4193]"
CP-7.zip,1,CP,1301,3601,276,4,"[3598, 3599, 3600, 3601]"
NCP-7.zip,2,NCP,249,1644,139,2,"[1644, 1645]"
NCP-12.zip,2,NCP,339,1827,51,2,"[1826, 1827]"
NCP-2.zip,2,NCP,1275,2716,68,1,[2716]
NCP-13.zip,2,NCP,354,1860,73,2,"[1859, 1860]"
Normal-2.zip,0,Normal,1757,1105,71,4,"[1105, 1106, 1107, 1108]"
NCP-27.zip,2,NCP,1016,2582,108,3,"[2580, 2581, 2582]"
CP-18.zip,1,CP,1777,3541,62,5,"[3540, 3541, 3542, 3543, 3544]"
NCP-1.zip,2,NCP,1008,2569,387,1,[2569]
CP-7.zip,1,CP,1315,3665,59,2,"[3665, 3666]"
CP-27.zip,1,CP,3737,5681,17,1,[5681]
Normal-9.zip,0,Normal,1914,369,88,1,[369]
Normal-1.zip,0,Normal,1672,802,75,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
NCP-8.zip,2,NCP,25,1181,129,2,"[1181, 1183]"
CP-19.zip,1,CP,1789,3207,64,4,"[3204, 3205, 3206, 3207]"
CP-11.zip,1,CP,1444,3960,139,3,"[3960, 3961, 3962]"
NCP-4.zip,2,NCP,145,1435,58,2,"[1434, 1435]"
Normal-1.zip,0,Normal,1676,821,72,7,"[816, 817, 818, 819, 820, 821, 822]"
CP-23.zip,1,CP,659,3021,594,1,[3021]
Normal-25.zip,0,Normal,3716,5346,31,1,[5346]
Normal-10.zip,0,Normal,1936,391,82,1,[391]
NCP-22.zip,2,NCP,821,2331,30,1,[2331]
CP-13.zip,1,CP,1505,4108,54,3,"[4108, 4109, 4110]"
NCP-15.zip,2,NCP,411,1972,62,2,"[1971, 1972]"
CP-7.zip,1,CP,1304,3633,18,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-27.zip,2,NCP,1048,2621,44,2,"[2620, 2621]"
CP-21.zip,1,CP,595,2957,306,1,[2957]
NCP-22.zip,2,NCP,861,2384,197,1,[2384]
CP-7.zip,1,CP,1302,3604,39,4,"[3602, 3603, 3604, 3605]"
NCP-17.zip,2,NCP,472,2099,151,2,"[2099, 2100]"
NCP-8.zip,2,NCP,26,1185,36,2,"[1184, 1185]"
Normal-1.zip,0,Normal,1676,819,65,7,"[816, 817, 818, 819, 820, 821, 822]"
Normal-27.zip,0,Normal,3903,5435,75,1,[5435]
Normal-25.zip,0,Normal,3840,5352,210,1,[5352]
NCP-8.zip,2,NCP,266,1679,58,2,"[1678, 1679]"
Normal-16.zip,0,Normal,2120,575,84,1,[575]
NCP-22.zip,2,NCP,82,1297,129,2,"[1297, 1298]"
Normal-16.zip,0,Normal,2128,583,76,1,[583]
Normal-1.zip,0,Normal,1718,992,66,2,"[991, 992]"
CP-11.zip,1,CP,1449,3972,50,2,"[3971, 3972]"
CP-7.zip,1,CP,1304,3636,47,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
Normal-22.zip,0,Normal,2597,107,41,1,[107]
NCP-10.zip,2,NCP,2726,2682,50,1,[2682]
Normal-7.zip,0,Normal,1849,304,87,1,[304]
Normal-13.zip,0,Normal,2040,495,95,1,[495]
Normal-16.zip,0,Normal,2125,580,83,1,[580]
CP-25.zip,1,CP,740,3102,193,1,[3102]
NCP-22.zip,2,NCP,871,2401,281,2,"[2401, 2402]"
NCP-9.zip,2,NCP,2704,2670,56,1,[2670]
NCP-12.zip,2,NCP,33,1198,147,2,"[1198, 1199]"
CP-18.zip,1,CP,1663,4351,26,1,[4351]
Normal-3.zip,0,Normal,1767,1157,28,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-2.zip,0,Normal,1735,1031,76,2,"[1030, 1031]"
Normal-10.zip,0,Normal,1938,393,66,1,[393]
NCP-24.zip,2,NCP,975,2518,484,1,[2518]
CP-18.zip,1,CP,1774,3523,65,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-14.zip,2,NCP,381,1914,143,2,"[1914, 1915]"
NCP-12.zip,2,NCP,33,1199,62,2,"[1198, 1199]"
NCP-13.zip,2,NCP,352,1855,138,2,"[1855, 1856]"
NCP-12.zip,2,NCP,333,1814,162,2,"[1814, 1815]"
NCP-23.zip,2,NCP,904,2446,667,1,[2446]
NCP-24.zip,2,NCP,985,2531,508,1,[2531]
NCP-6.zip,2,NCP,228,1600,161,2,"[1600, 1601]"
NCP-15.zip,2,NCP,414,1980,121,2,"[1980, 1981]"
NCP-1.zip,2,NCP,103,1344,63,2,"[1343, 1344]"
Normal-3.zip,0,Normal,1767,1155,66,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-8.zip,1,CP,1349,3744,58,3,"[3742, 3743, 3744]"
NCP-8.zip,2,NCP,261,1669,65,2,"[1668, 1669]"
Normal-21.zip,0,Normal,2300,755,98,1,[755]
NCP-13.zip,2,NCP,354,1859,177,2,"[1859, 1860]"
CP-23.zip,1,CP,665,3027,116,1,[3027]
CP-15.zip,1,CP,1561,4242,49,2,"[4241, 4242]"
CP-9.zip,1,CP,1376,3806,60,2,"[3806, 3807]"
Normal-1.zip,0,Normal,1727,1012,66,4,"[1009, 1010, 1011, 1012]"
NCP-28.zip,2,NCP,835,2349,46,2,"[2349, 2350]"
CP-8.zip,1,CP,1349,3742,142,3,"[3742, 3743, 3744]"
Normal-20.zip,0,Normal,2277,732,95,1,[732]
NCP-28.zip,2,NCP,876,2409,52,1,[2409]
Normal-15.zip,0,Normal,2101,556,85,1,[556]
CP-11.zip,1,CP,1444,3961,58,3,"[3960, 3961, 3962]"
CP-7.zip,1,CP,1270,3496,154,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-2.zip,2,NCP,1276,2717,61,1,[2717]
Normal-3.zip,0,Normal,1767,1153,68,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-13.zip,0,Normal,2051,506,86,1,[506]
Normal-2.zip,0,Normal,1734,1029,66,2,"[1028, 1029]"
Normal-26.zip,0,Normal,3871,5383,22,1,[5383]
NCP-1.zip,2,NCP,1009,2570,39,2,"[2570, 2571]"
Normal-2.zip,0,Normal,1763,1139,65,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-13.zip,2,NCP,359,1870,61,2,"[1869, 1870]"
Normal-19.zip,0,Normal,2220,675,78,1,[675]
CP-9.zip,1,CP,1382,3819,60,3,"[3818, 3819, 3820]"
CP-20.zip,1,CP,2752,3283,26,1,[3283]
NCP-5.zip,2,NCP,181,1506,139,2,"[1506, 1507]"
CP-13.zip,1,CP,1510,4122,60,2,"[4121, 4122]"
NCP-16.zip,2,NCP,440,2034,125,2,"[2034, 2035]"
CP-12.zip,1,CP,1458,3994,69,3,"[3992, 3993, 3994]"
CP-7.zip,1,CP,1270,3492,137,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-11.zip,2,NCP,284,1714,58,2,"[1713, 1714]"
NCP-11.zip,2,NCP,303,1753,58,2,"[1752, 1753]"
NCP-6.zip,2,NCP,205,1554,126,2,"[1554, 1555]"
CP-14.zip,1,CP,1535,4179,53,2,"[4178, 4179]"
Normal-27.zip,0,Normal,3910,5446,66,2,"[5445, 5446]"
NCP-13.zip,2,NCP,357,1865,150,2,"[1865, 1866]"
Normal-3.zip,0,Normal,742,177,107,1,[177]
Normal-22.zip,0,Normal,2589,99,37,1,[99]
NCP-22.zip,2,NCP,88,1310,71,2,"[1309, 1310]"
CP-14.zip,1,CP,1521,4147,57,2,"[4146, 4147]"
CP-26.zip,1,CP,3729,5666,179,3,"[5665, 5666, 5667]"
CP-28.zip,1,CP,3793,5737,29,1,[5737]
Normal-3.zip,0,Normal,767,202,358,1,[202]
NCP-5.zip,2,NCP,198,1540,144,2,"[1540, 1541]"
CP-27.zip,1,CP,3738,5682,19,1,[5682]
CP-27.zip,1,CP,3750,5694,28,1,[5694]
CP-10.zip,1,CP,1416,3898,58,2,"[3897, 3898]"
CP-8.zip,1,CP,1322,3680,56,2,"[3680, 3681]"
Normal-23.zip,0,Normal,2607,117,38,1,[117]
NCP-3.zip,2,NCP,138,1420,124,2,"[1420, 1421]"
CP-11.zip,1,CP,1425,3916,185,3,"[3916, 3917, 3918]"
CP-15.zip,1,CP,1581,4269,19,1,[4269]
CP-24.zip,1,CP,706,3068,124,1,[3068]
CP-18.zip,1,CP,1666,4354,23,1,[4354]
NCP-4.zip,2,NCP,161,1466,135,2,"[1466, 1467]"
Normal-7.zip,0,Normal,1847,302,102,1,[302]
CP-11.zip,1,CP,1443,3959,58,3,"[3957, 3958, 3959]"
CP-19.zip,1,CP,1784,3593,69,4,"[3590, 3591, 3592, 3593]"
CP-21.zip,1,CP,605,2967,157,1,[2967]
CP-5.zip,1,CP,12,3168,291,2,"[3168, 3169]"
Normal-9.zip,0,Normal,1909,364,102,1,[364]
NCP-22.zip,2,NCP,850,2369,52,1,[2369]
CP-24.zip,1,CP,687,3049,135,1,[3049]
NCP-1.zip,2,NCP,1033,2604,39,1,[2604]
Normal-2.zip,0,Normal,1750,1074,65,3,"[1074, 1077, 1078]"
CP-9.zip,1,CP,1365,3779,200,3,"[3779, 3780, 3781]"
NCP-18.zip,2,NCP,502,2161,59,2,"[2160, 2161]"
Normal-3.zip,0,Normal,1767,1162,76,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-1.zip,0,Normal,1672,799,78,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
Normal-3.zip,0,Normal,747,182,100,1,[182]
NCP-12.zip,2,NCP,319,1787,66,2,"[1785, 1787]"
NCP-15.zip,2,NCP,405,1959,143,2,"[1959, 1960]"
CP-18.zip,1,CP,1781,3574,64,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-21.zip,1,CP,600,2962,202,1,[2962]
CP-12.zip,1,CP,1479,4039,60,3,"[4039, 4040, 4041]"
NCP-27.zip,2,NCP,827,2340,173,1,[2340]
NCP-24.zip,2,NCP,983,2528,67,1,[2528]
Normal-1.zip,0,Normal,1669,786,54,5,"[782, 783, 784, 785, 786]"
CP-11.zip,1,CP,1424,3915,60,2,"[3914, 3915]"
CP-2.zip,1,CP,1105,3323,220,1,[3323]
CP-10.zip,1,CP,1412,3888,66,2,"[3887, 3888]"
NCP-18.zip,2,NCP,495,2147,65,2,"[2146, 2147]"
NCP-3.zip,2,NCP,134,1412,128,2,"[1412, 1413]"
Normal-10.zip,0,Normal,1940,395,74,1,[395]
Normal-17.zip,0,Normal,2163,618,89,1,[618]
CP-9.zip,1,CP,1358,3761,249,3,"[3761, 3762, 3763]"
CP-23.zip,1,CP,658,3020,273,1,[3020]
NCP-12.zip,2,NCP,341,1830,129,3,"[1830, 1832, 1834]"
CP-14.zip,1,CP,1520,4145,57,3,"[4143, 4144, 4145]"
CP-19.zip,1,CP,1783,3588,62,2,"[3588, 3589]"
Normal-3.zip,0,Normal,1767,1158,66,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-7.zip,1,CP,1301,3598,55,4,"[3598, 3599, 3600, 3601]"
Normal-6.zip,0,Normal,1810,265,85,1,[265]
NCP-12.zip,2,NCP,321,1791,51,2,"[1790, 1791]"
NCP-12.zip,2,NCP,341,1834,54,3,"[1830, 1832, 1834]"
NCP-6.zip,2,NCP,210,1564,131,2,"[1564, 1565]"
CP-11.zip,1,CP,1435,3938,46,2,"[3938, 3939]"
Normal-26.zip,0,Normal,3876,5388,30,1,[5388]
Normal-16.zip,0,Normal,2123,578,90,1,[578]
Normal-6.zip,0,Normal,1816,271,76,1,[271]
NCP-26.zip,2,NCP,3992,5516,48,1,[5516]
CP-18.zip,1,CP,1777,3544,66,5,"[3540, 3541, 3542, 3543, 3544]"
NCP-5.zip,2,NCP,173,1491,58,2,"[1490, 1491]"
NCP-11.zip,2,NCP,312,1771,148,2,"[1771, 1772]"
NCP-19.zip,2,NCP,525,2207,61,2,"[2206, 2207]"
Normal-3.zip,0,Normal,752,187,103,1,[187]
NCP-7.zip,2,NCP,23,1178,63,2,"[1177, 1178]"
CP-27.zip,1,CP,3762,5706,26,1,[5706]
CP-18.zip,1,CP,1659,4347,26,1,[4347]
CP-20.zip,1,CP,2667,3248,46,3,"[3246, 3247, 3248]"
Normal-24.zip,0,Normal,2653,163,39,1,[163]
Normal-4.zip,0,Normal,801,236,107,1,[236]
Normal-20.zip,0,Normal,2272,727,79,1,[727]
NCP-30.zip,2,NCP,988,2539,56,2,"[2538, 2539]"
CP-18.zip,1,CP,1774,3527,58,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
Normal-17.zip,0,Normal,2165,620,95,1,[620]
CP-12.zip,1,CP,1479,4041,60,3,"[4039, 4040, 4041]"
Normal-21.zip,0,Normal,2299,754,90,1,[754]
CP-22.zip,1,CP,637,2999,118,1,[2999]
NCP-6.zip,2,NCP,217,1578,139,2,"[1578, 1579]"
CP-30.zip,1,CP,3919,5544,73,4,"[5543, 5544, 5545, 5546]"
CP-13.zip,1,CP,1511,4123,57,2,"[4123, 4124]"
Normal-13.zip,0,Normal,2035,490,82,1,[490]
CP-19.zip,1,CP,1794,3594,38,2,"[3594, 3595]"
NCP-15.zip,2,NCP,410,1970,60,2,"[1969, 1970]"
CP-10.zip,1,CP,1417,3899,59,1,[3899]
NCP-8.zip,2,NCP,261,1668,155,2,"[1668, 1669]"
CP-20.zip,1,CP,2667,3247,92,3,"[3246, 3247, 3248]"
CP-26.zip,1,CP,3636,5595,290,1,[5595]
Normal-2.zip,0,Normal,1763,1136,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
Normal-9.zip,0,Normal,1913,368,88,1,[368]
CP-9.zip,1,CP,1375,3805,58,2,"[3804, 3805]"
CP-16.zip,1,CP,1606,4294,26,1,[4294]
CP-18.zip,1,CP,1777,3543,68,5,"[3540, 3541, 3542, 3543, 3544]"
Normal-21.zip,0,Normal,2287,742,77,1,[742]
CP-11.zip,1,CP,1422,3908,140,3,"[3908, 3909, 3910]"
NCP-22.zip,2,NCP,859,2381,268,2,"[2380, 2381]"
Normal-24.zip,0,Normal,2645,155,38,1,[155]
CP-7.zip,1,CP,1302,3605,201,4,"[3602, 3603, 3604, 3605]"
CP-23.zip,1,CP,646,3008,128,1,[3008]
CP-11.zip,1,CP,1425,3918,49,3,"[3916, 3917, 3918]"
CP-18.zip,1,CP,1781,3569,65,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-16.zip,2,NCP,436,2025,146,2,"[2025, 2026]"
NCP-18.zip,2,NCP,503,2163,61,2,"[2162, 2163]"
NCP-4.zip,2,NCP,167,1478,143,2,"[1478, 1479]"
Normal-26.zip,0,Normal,3880,5392,32,1,[5392]
NCP-25.zip,2,NCP,3709,5536,65,1,[5536]
Normal-2.zip,0,Normal,1734,1028,66,2,"[1028, 1029]"
Normal-17.zip,0,Normal,2169,624,92,1,[624]
NCP-20.zip,2,NCP,546,2249,134,2,"[2249, 2250]"
NCP-14.zip,2,NCP,379,1910,147,2,"[1910, 1911]"
NCP-4.zip,2,NCP,146,1437,52,2,"[1436, 1437]"
NCP-26.zip,2,NCP,3995,5493,47,1,[5493]
CP-20.zip,1,CP,2763,3294,119,1,[3294]
NCP-13.zip,2,NCP,349,1850,57,2,"[1849, 1850]"
CP-26.zip,1,CP,3644,5604,284,1,[5604]
CP-8.zip,1,CP,1327,3690,253,3,"[3690, 3691, 3692]"
CP-20.zip,1,CP,2770,3301,38,1,[3301]
CP-12.zip,1,CP,1471,4023,55,2,"[4022, 4023]"
Normal-27.zip,0,Normal,3912,5453,68,1,[5453]
NCP-23.zip,2,NCP,93,1322,157,2,"[1322, 1323]"
CP-18.zip,1,CP,1781,3576,64,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-13.zip,2,NCP,347,1845,126,2,"[1845, 1846]"
CP-20.zip,1,CP,2454,2935,120,2,"[2935, 2936]"
Normal-1.zip,0,Normal,1670,788,58,6,"[787, 788, 789, 790, 791, 792]"
Normal-8.zip,0,Normal,1880,335,83,1,[335]
Normal-10.zip,0,Normal,1937,392,90,1,[392]
CP-20.zip,1,CP,2768,3299,38,1,[3299]
Normal-18.zip,0,Normal,2212,667,89,1,[667]
Normal-1.zip,0,Normal,1677,826,65,4,"[823, 824, 825, 826]"
CP-26.zip,1,CP,3721,5654,43,2,"[5654, 5655]"
NCP-16.zip,2,NCP,439,2033,66,2,"[2032, 2033]"
Normal-13.zip,0,Normal,2031,486,81,1,[486]
CP-19.zip,1,CP,1783,3589,62,2,"[3588, 3589]"
CP-2.zip,1,CP,1121,3339,156,1,[3339]
CP-22.zip,1,CP,612,2974,84,1,[2974]
Normal-26.zip,0,Normal,3867,5379,29,1,[5379]
NCP-1.zip,2,NCP,102,1342,56,2,"[1341, 1342]"
NCP-18.zip,2,NCP,493,2142,133,2,"[2142, 2143]"
NCP-12.zip,2,NCP,339,1826,120,2,"[1826, 1827]"
Normal-14.zip,0,Normal,2085,540,95,1,[540]
NCP-27.zip,2,NCP,238,1622,57,2,"[1621, 1622]"
Normal-2.zip,0,Normal,1737,1039,80,4,"[1037, 1038, 1039, 1040]"
CP-30.zip,1,CP,3919,5546,70,4,"[5543, 5544, 5545, 5546]"
NCP-1.zip,2,NCP,1012,2576,249,1,[2576]
NCP-17.zip,2,NCP,463,2080,144,2,"[2080, 2081]"
NCP-2.zip,2,NCP,127,1400,58,2,"[1399, 1400]"
Normal-21.zip,0,Normal,2291,746,96,1,[746]
NCP-8.zip,2,NCP,25,1183,45,2,"[1181, 1183]"
CP-9.zip,1,CP,1382,3820,60,3,"[3818, 3819, 3820]"
NCP-30.zip,2,NCP,967,2510,168,1,[2510]
Normal-27.zip,0,Normal,3910,5445,66,2,"[5445, 5446]"
NCP-4.zip,2,NCP,156,1456,138,2,"[1456, 1457]"
CP-12.zip,1,CP,1464,4007,63,2,"[4007, 4008]"
NCP-4.zip,2,NCP,162,1469,62,2,"[1468, 1469]"
CP-13.zip,1,CP,1493,4081,53,3,"[4080, 4081, 4082]"
CP-16.zip,1,CP,1602,4290,17,1,[4290]
NCP-6.zip,2,NCP,216,1576,139,2,"[1576, 1577]"
CP-25.zip,1,CP,723,3085,104,1,[3085]
NCP-15.zip,2,NCP,411,1971,149,2,"[1971, 1972]"
NCP-15.zip,2,NCP,425,2003,139,2,"[2003, 2004]"
CP-7.zip,1,CP,1270,3491,142,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-24.zip,1,CP,688,3050,127,1,[3050]
Normal-13.zip,0,Normal,2033,488,77,1,[488]
NCP-23.zip,2,NCP,96,1329,61,2,"[1328, 1329]"
Normal-5.zip,0,Normal,803,238,343,1,[238]
CP-16.zip,1,CP,1595,4283,23,1,[4283]
NCP-27.zip,2,NCP,238,1621,134,2,"[1621, 1622]"
NCP-19.zip,2,NCP,529,2214,141,3,"[2214, 2215, 2217]"
CP-25.zip,1,CP,710,3072,78,1,[3072]
Normal-19.zip,0,Normal,2243,698,86,1,[698]
CP-11.zip,1,CP,1440,3949,51,3,"[3948, 3949, 3950]"
CP-7.zip,1,CP,1260,3478,235,1,[3478]
Normal-1.zip,0,Normal,1672,797,76,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
CP-26.zip,1,CP,3719,5649,52,3,"[5649, 5650, 5651]"
NCP-23.zip,2,NCP,969,2512,68,1,[2512]
NCP-5.zip,2,NCP,186,1516,113,2,"[1516, 1517]"
CP-13.zip,1,CP,1507,4114,62,2,"[4113, 4114]"
CP-19.zip,1,CP,2443,2916,310,3,"[2915, 2916, 2917]"
CP-13.zip,1,CP,1503,4105,64,3,"[4104, 4105, 4106]"
Normal-10.zip,0,Normal,1934,389,85,1,[389]
CP-20.zip,1,CP,2760,3291,281,1,[3291]
Normal-19.zip,0,Normal,2242,697,86,1,[697]
NCP-22.zip,2,NCP,864,2388,214,2,"[2388, 2389]"
NCP-14.zip,2,NCP,377,1906,147,2,"[1906, 1907]"
CP-29.zip,1,CP,3818,5762,29,1,[5762]
CP-23.zip,1,CP,676,3038,291,1,[3038]
NCP-14.zip,2,NCP,389,1928,150,2,"[1928, 1929]"
CP-27.zip,1,CP,3761,5705,16,1,[5705]
NCP-27.zip,2,NCP,1016,2581,179,3,"[2580, 2581, 2582]"
Normal-22.zip,0,Normal,2321,776,90,1,[776]
CP-7.zip,1,CP,1304,3639,212,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-16.zip,2,NCP,438,2030,62,2,"[2029, 2030]"
NCP-2.zip,2,NCP,107,1352,61,2,"[1351, 1352]"
NCP-11.zip,2,NCP,295,1736,97,2,"[1735, 1736]"
CP-2.zip,1,CP,1122,3340,229,1,[3340]
Normal-25.zip,0,Normal,3849,5361,205,1,[5361]
CP-4.zip,1,CP,1189,3407,284,1,[3407]
Normal-1.zip,0,Normal,1721,999,75,4,"[1000, 997, 998, 999]"
NCP-4.zip,2,NCP,152,1449,61,2,"[1448, 1449]"
Normal-13.zip,0,Normal,2044,499,103,1,[499]
Normal-2.zip,0,Normal,1756,1103,65,4,"[1101, 1102, 1103, 1104]"
CP-9.zip,1,CP,1379,3812,52,2,"[3812, 3813]"
CP-20.zip,1,CP,2454,2936,116,2,"[2935, 2936]"
NCP-3.zip,2,NCP,1294,2735,62,1,[2735]
CP-6.zip,1,CP,1230,3448,37,1,[3448]
CP-14.zip,1,CP,1540,4191,221,3,"[4191, 4192, 4193]"
Normal-5.zip,0,Normal,815,250,120,1,[250]
CP-13.zip,1,CP,1488,4066,66,3,"[4064, 4065, 4066]"
NCP-7.zip,2,NCP,241,1627,131,2,"[1627, 1628]"
NCP-6.zip,2,NCP,220,1584,160,2,"[1584, 1585]"
NCP-30.zip,2,NCP,982,2527,242,1,[2527]
Normal-2.zip,0,Normal,1735,1030,76,2,"[1030, 1031]"
CP-12.zip,1,CP,1486,4059,63,2,"[4059, 4060]"
CP-18.zip,1,CP,1781,3573,65,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-26.zip,1,CP,3642,5601,29,1,[5601]
NCP-5.zip,2,NCP,186,1517,48,2,"[1516, 1517]"
Normal-7.zip,0,Normal,1846,301,105,1,[301]
CP-6.zip,1,CP,1252,3470,180,1,[3470]
NCP-8.zip,2,NCP,254,1655,58,2,"[1654, 1655]"
NCP-17.zip,2,NCP,460,2075,45,2,"[2074, 2075]"
NCP-3.zip,2,NCP,138,1421,52,2,"[1420, 1421]"
CP-29.zip,1,CP,3798,5742,21,1,[5742]
NCP-14.zip,2,NCP,389,1929,63,2,"[1928, 1929]"
NCP-22.zip,2,NCP,858,2379,52,1,[2379]
NCP-10.zip,2,NCP,2721,2677,37,1,[2677]
NCP-29.zip,2,NCP,882,2418,257,2,"[2417, 2418]"
NCP-18.zip,2,NCP,495,2146,156,2,"[2146, 2147]"
Normal-18.zip,0,Normal,2210,665,88,1,[665]
CP-7.zip,1,CP,1304,3632,18,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
NCP-18.zip,2,NCP,512,2180,149,2,"[2180, 2181]"
Normal-1.zip,0,Normal,1672,803,75,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
CP-21.zip,1,CP,2774,3305,31,1,[3305]
CP-9.zip,1,CP,1372,3797,193,3,"[3797, 3798, 3799]"
CP-22.zip,1,CP,615,2977,104,1,[2977]
CP-12.zip,1,CP,1469,4019,47,2,"[4018, 4019]"
CP-18.zip,1,CP,1774,3522,65,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-17.zip,2,NCP,472,2100,63,2,"[2099, 2100]"
Normal-14.zip,0,Normal,2069,524,81,1,[524]
CP-18.zip,1,CP,1774,3529,58,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
NCP-27.zip,2,NCP,1031,2601,216,2,"[2601, 2602]"
NCP-22.zip,2,NCP,857,2378,53,1,[2378]
Normal-3.zip,0,Normal,1767,1156,139,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-24.zip,0,Normal,2641,151,41,1,[151]
NCP-9.zip,2,NCP,2696,2662,44,1,[2662]
CP-17.zip,1,CP,1620,4308,24,1,[4308]
NCP-4.zip,2,NCP,149,1443,66,2,"[1442, 1443]"
CP-13.zip,1,CP,1488,4064,158,3,"[4064, 4065, 4066]"
Normal-22.zip,0,Normal,2315,770,82,1,[770]
NCP-12.zip,2,NCP,316,1780,58,2,"[1779, 1780]"
CP-9.zip,1,CP,1360,3767,67,3,"[3767, 3768, 3769]"
NCP-18.zip,2,NCP,512,2181,62,2,"[2180, 2181]"
NCP-20.zip,2,NCP,547,2252,66,2,"[2251, 2252]"
Normal-10.zip,0,Normal,1942,397,81,1,[397]
NCP-5.zip,2,NCP,198,1541,60,2,"[1540, 1541]"
NCP-6.zip,2,NCP,199,1542,138,2,"[1542, 1543]"
CP-14.zip,1,CP,1528,4164,61,2,"[4163, 4164]"
CP-17.zip,1,CP,1631,4319,23,1,[4319]
NCP-13.zip,2,NCP,353,1858,69,2,"[1857, 1858]"
NCP-17.zip,2,NCP,463,2081,60,2,"[2080, 2081]"
NCP-1.zip,2,NCP,1019,2585,363,1,[2585]
NCP-22.zip,2,NCP,845,2362,48,4,"[2360, 2361, 2362, 2363]"
NCP-15.zip,2,NCP,425,2004,58,2,"[2003, 2004]"
NCP-16.zip,2,NCP,449,2052,145,2,"[2052, 2053]"
NCP-28.zip,2,NCP,873,2405,52,2,"[2405, 2406]"
NCP-4.zip,2,NCP,152,1448,145,2,"[1448, 1449]"
NCP-19.zip,2,NCP,543,2244,54,2,"[2243, 2244]"
Normal-14.zip,0,Normal,2062,517,84,1,[517]
NCP-17.zip,2,NCP,465,2086,61,3,"[2084, 2085, 2086]"
Normal-25.zip,0,Normal,3717,5347,25,1,[5347]
CP-4.zip,1,CP,1178,3396,133,1,[3396]
CP-22.zip,1,CP,620,2982,64,1,[2982]
Normal-1.zip,0,Normal,1677,825,65,4,"[823, 824, 825, 826]"
Normal-9.zip,0,Normal,1908,363,81,1,[363]
CP-30.zip,1,CP,3940,5646,33,1,[5646]
NCP-30.zip,2,NCP,942,2485,45,1,[2485]
CP-18.zip,1,CP,1781,3578,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-9.zip,1,CP,1358,3762,126,3,"[3761, 3762, 3763]"
CP-27.zip,1,CP,3764,5708,23,1,[5708]
NCP-8.zip,2,NCP,2673,2692,48,1,[2692]
NCP-19.zip,2,NCP,534,2226,49,2,"[2225, 2226]"
CP-11.zip,1,CP,1440,3950,51,3,"[3948, 3949, 3950]"
NCP-17.zip,2,NCP,465,2084,145,3,"[2084, 2085, 2086]"
NCP-19.zip,2,NCP,522,2200,137,2,"[2200, 2201]"
CP-12.zip,1,CP,1468,4015,54,3,"[4015, 4016, 4017]"
NCP-13.zip,2,NCP,351,1854,61,2,"[1853, 1854]"
CP-7.zip,1,CP,1270,3498,247,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-13.zip,1,CP,1498,4095,60,2,"[4095, 4096]"
CP-18.zip,1,CP,1778,3548,65,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
Normal-1.zip,0,Normal,1670,791,66,6,"[787, 788, 789, 790, 791, 792]"
CP-16.zip,1,CP,1611,4299,19,1,[4299]
Normal-14.zip,0,Normal,2080,535,100,1,[535]
NCP-25.zip,2,NCP,3968,5477,44,1,[5477]
Normal-3.zip,0,Normal,755,190,107,1,[190]
Normal-16.zip,0,Normal,2151,606,93,1,[606]
NCP-4.zip,2,NCP,168,1481,58,2,"[1480, 1481]"
Normal-21.zip,0,Normal,2289,744,77,1,[744]
NCP-6.zip,2,NCP,224,1593,57,2,"[1592, 1593]"
CP-13.zip,1,CP,1502,4103,73,2,"[4102, 4103]"
NCP-22.zip,2,NCP,865,2390,34,2,"[2390, 2391]"
CP-28.zip,1,CP,3787,5731,27,1,[5731]
NCP-5.zip,2,NCP,170,1484,141,2,"[1484, 1485]"
Normal-20.zip,0,Normal,2271,726,81,1,[726]
NCP-7.zip,2,NCP,2485,2644,46,1,[2644]
NCP-17.zip,2,NCP,475,2106,63,2,"[2105, 2106]"
NCP-21.zip,2,NCP,74,1281,127,2,"[1281, 1282]"
CP-13.zip,1,CP,1507,4113,62,2,"[4113, 4114]"
CP-18.zip,1,CP,1781,3570,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-12.zip,1,CP,1462,4002,193,3,"[4002, 4003, 4004]"
CP-8.zip,1,CP,1330,3698,58,3,"[3698, 3699, 3700]"
Normal-1.zip,0,Normal,1672,796,76,8,"[796, 797, 798, 799, 800, 801, 802, 803]"
NCP-12.zip,2,NCP,337,1822,139,2,"[1822, 1823]"
CP-9.zip,1,CP,1353,3749,60,3,"[3748, 3749, 3750]"
Normal-15.zip,0,Normal,2087,542,83,1,[542]
NCP-12.zip,2,NCP,331,1811,66,2,"[1810, 1811]"
CP-22.zip,1,CP,617,2979,110,1,[2979]
CP-18.zip,1,CP,1771,3518,51,4,"[3518, 3519, 3520, 3521]"
CP-26.zip,1,CP,3730,5668,212,2,"[5668, 5669]"
Normal-24.zip,0,Normal,2660,170,38,1,[170]
Normal-11.zip,0,Normal,1967,422,97,1,[422]
NCP-4.zip,2,NCP,149,1442,159,2,"[1442, 1443]"
CP-30.zip,1,CP,3834,5778,26,1,[5778]
NCP-19.zip,2,NCP,540,2237,127,2,"[2237, 2238]"
Normal-26.zip,0,Normal,3862,5374,188,1,[5374]
Normal-7.zip,0,Normal,1842,297,77,1,[297]
Normal-26.zip,0,Normal,3868,5380,30,1,[5380]
NCP-2.zip,2,NCP,116,1374,54,2,"[1373, 1374]"
Normal-12.zip,0,Normal,2003,458,85,1,[458]
NCP-5.zip,2,NCP,17,1167,58,2,"[1166, 1167]"
NCP-2.zip,2,NCP,117,1375,130,2,"[1375, 1376]"
CP-13.zip,1,CP,1511,4124,57,2,"[4123, 4124]"
CP-18.zip,1,CP,1778,3546,66,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
NCP-19.zip,2,NCP,529,2217,58,3,"[2214, 2215, 2217]"
CP-14.zip,1,CP,1520,4143,57,3,"[4143, 4144, 4145]"
Normal-16.zip,0,Normal,2131,586,95,1,[586]
NCP-28.zip,2,NCP,873,2406,228,2,"[2405, 2406]"
NCP-3.zip,2,NCP,137,1418,126,2,"[1418, 1419]"
NCP-10.zip,2,NCP,279,1705,58,2,"[1704, 1705]"
CP-28.zip,1,CP,3796,5740,28,1,[5740]
NCP-19.zip,2,NCP,54,1241,147,2,"[1241, 1242]"
CP-28.zip,1,CP,3768,5712,19,1,[5712]
NCP-2.zip,2,NCP,120,1382,58,2,"[1381, 1382]"
CP-16.zip,1,CP,1603,4291,22,1,[4291]
CP-2.zip,1,CP,1118,3336,173,1,[3336]
NCP-30.zip,2,NCP,939,2482,49,1,[2482]
Normal-8.zip,0,Normal,1874,329,90,1,[329]
Normal-3.zip,0,Normal,746,181,110,1,[181]
CP-21.zip,1,CP,608,2970,86,1,[2970]
Normal-22.zip,0,Normal,2601,111,37,1,[111]
NCP-4.zip,2,NCP,16,1165,48,2,"[1164, 1165]"
NCP-1.zip,2,NCP,1036,2607,441,1,[2607]
NCP-19.zip,2,NCP,528,2213,59,2,"[2212, 2213]"
NCP-6.zip,2,NCP,217,1579,58,2,"[1578, 1579]"
CP-10.zip,1,CP,1416,3897,58,2,"[3897, 3898]"
CP-30.zip,1,CP,4043,5592,41,1,[5592]
CP-30.zip,1,CP,3933,5636,69,2,"[5636, 5637]"
CP-20.zip,1,CP,2667,3246,24,3,"[3246, 3247, 3248]"
Normal-1.zip,0,Normal,1677,824,64,4,"[823, 824, 825, 826]"
NCP-18.zip,2,NCP,514,2185,67,2,"[2184, 2185]"
CP-18.zip,1,CP,1779,3552,59,2,"[3551, 3552]"
Normal-25.zip,0,Normal,3855,5367,209,1,[5367]
CP-24.zip,1,CP,691,3053,72,1,[3053]
CP-6.zip,1,CP,1239,3457,134,1,[3457]
CP-21.zip,1,CP,602,2964,84,1,[2964]
NCP-1.zip,2,NCP,105,1348,61,2,"[1347, 1348]"
CP-3.zip,1,CP,1151,3369,158,1,[3369]
NCP-15.zip,2,NCP,413,1975,110,4,"[1975, 1976, 1977, 1979]"
CP-8.zip,1,CP,1327,3691,64,3,"[3690, 3691, 3692]"
CP-6.zip,1,CP,1237,3455,178,1,[3455]
Normal-11.zip,0,Normal,1959,414,97,1,[414]
Normal-25.zip,0,Normal,3713,5343,27,1,[5343]
CP-21.zip,1,CP,597,2959,305,1,[2959]
CP-9.zip,1,CP,1356,3757,60,2,"[3756, 3757]"
NCP-7.zip,2,NCP,2483,2686,40,1,[2686]
NCP-27.zip,2,NCP,1048,2620,58,2,"[2620, 2621]"
Normal-3.zip,0,Normal,1767,1159,28,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-5.zip,1,CP,1219,3437,179,1,[3437]
NCP-4.zip,2,NCP,145,1434,139,2,"[1434, 1435]"
CP-15.zip,1,CP,1575,4263,20,1,[4263]
NCP-18.zip,2,NCP,516,2188,135,2,"[2188, 2189]"
CP-9.zip,1,CP,1360,3768,67,3,"[3767, 3768, 3769]"
CP-13.zip,1,CP,1488,4065,66,3,"[4064, 4065, 4066]"
CP-18.zip,1,CP,1782,3587,69,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
CP-1.zip,1,CP,1077,3122,74,2,"[3121, 3122]"
Normal-14.zip,0,Normal,2084,539,92,1,[539]
Normal-3.zip,0,Normal,1767,1163,76,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
Normal-2.zip,0,Normal,1746,1063,68,2,"[1063, 1064]"
NCP-12.zip,2,NCP,332,1812,167,2,"[1812, 1813]"
Normal-12.zip,0,Normal,1990,445,97,1,[445]
CP-7.zip,1,CP,1301,3599,294,4,"[3598, 3599, 3600, 3601]"
CP-1.zip,1,CP,1070,3112,104,1,[3112]
CP-13.zip,1,CP,1493,4082,53,3,"[4080, 4081, 4082]"
CP-8.zip,1,CP,1330,3700,58,3,"[3698, 3699, 3700]"
NCP-19.zip,2,NCP,520,2196,129,2,"[2196, 2197]"
NCP-3.zip,2,NCP,137,1419,53,2,"[1418, 1419]"
NCP-30.zip,2,NCP,937,2479,22,1,[2479]
NCP-22.zip,2,NCP,865,2391,260,2,"[2390, 2391]"
NCP-7.zip,2,NCP,230,1605,58,2,"[1604, 1605]"
CP-7.zip,1,CP,1302,3603,207,4,"[3602, 3603, 3604, 3605]"
CP-16.zip,1,CP,1588,4276,20,1,[4276]
Normal-18.zip,0,Normal,2195,650,79,1,[650]
Normal-17.zip,0,Normal,2173,628,96,1,[628]
NCP-22.zip,2,NCP,878,2413,117,2,"[2412, 2413]"
Normal-18.zip,0,Normal,2188,643,88,1,[643]
CP-18.zip,1,CP,1774,3526,66,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
Normal-6.zip,0,Normal,1815,270,91,1,[270]
CP-5.zip,1,CP,1208,3426,321,1,[3426]
NCP-6.zip,2,NCP,22,1176,68,2,"[1175, 1176]"
NCP-15.zip,2,NCP,413,1979,54,4,"[1975, 1976, 1977, 1979]"
CP-18.zip,1,CP,1771,3521,51,4,"[3518, 3519, 3520, 3521]"
CP-4.zip,1,CP,1172,3390,195,1,[3390]
NCP-14.zip,2,NCP,378,1909,69,2,"[1908, 1909]"
CP-26.zip,1,CP,3721,5655,206,2,"[5654, 5655]"
NCP-1.zip,2,NCP,104,1346,58,2,"[1345, 1346]"
CP-27.zip,1,CP,3754,5698,21,1,[5698]
CP-19.zip,1,CP,1784,3592,69,4,"[3590, 3591, 3592, 3593]"
CP-9.zip,1,CP,1372,3799,49,3,"[3797, 3798, 3799]"
NCP-2.zip,2,NCP,113,1367,137,2,"[1367, 1368]"
Normal-22.zip,0,Normal,2318,773,105,1,[773]
CP-18.zip,1,CP,1770,3517,57,1,[3517]
Normal-21.zip,0,Normal,2293,748,88,1,[748]
Normal-22.zip,0,Normal,2595,105,43,1,[105]
NCP-18.zip,2,NCP,50,1234,59,2,"[1233, 1234]"
Normal-2.zip,0,Normal,1757,1106,71,4,"[1105, 1106, 1107, 1108]"
CP-8.zip,1,CP,1327,3692,64,3,"[3690, 3691, 3692]"
CP-18.zip,1,CP,1781,3577,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
CP-30.zip,1,CP,3932,5635,67,2,"[5634, 5635]"
NCP-20.zip,2,NCP,566,2291,67,2,"[2290, 2291]"
NCP-10.zip,2,NCP,2715,2708,51,1,[2708]
CP-23.zip,1,CP,660,3022,82,1,[3022]
Normal-9.zip,0,Normal,1916,371,106,1,[371]
CP-20.zip,1,CP,2757,3288,211,1,[3288]
Normal-7.zip,0,Normal,1845,300,99,1,[300]
Normal-13.zip,0,Normal,2050,505,74,1,[505]
CP-14.zip,1,CP,1534,4177,58,2,"[4176, 4177]"
CP-1.zip,1,CP,1092,3310,216,1,[3310]
Normal-2.zip,0,Normal,1763,1135,70,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
Normal-9.zip,0,Normal,1898,353,72,1,[353]
NCP-21.zip,2,NCP,576,2310,124,1,[2310]
Normal-1.zip,0,Normal,1701,956,70,2,"[955, 956]"
Normal-17.zip,0,Normal,2178,633,85,1,[633]
CP-8.zip,1,CP,1322,3681,56,2,"[3680, 3681]"
Normal-6.zip,0,Normal,1802,257,107,1,[257]
NCP-20.zip,2,NCP,547,2251,159,2,"[2251, 2252]"
NCP-3.zip,2,NCP,1285,2726,66,1,[2726]
Normal-7.zip,0,Normal,1828,283,96,1,[283]
NCP-20.zip,2,NCP,546,2250,57,2,"[2249, 2250]"
Normal-2.zip,0,Normal,1750,1078,69,3,"[1074, 1077, 1078]"
Normal-9.zip,0,Normal,1892,347,77,1,[347]
NCP-19.zip,2,NCP,534,2225,115,2,"[2225, 2226]"
CP-29.zip,1,CP,3806,5750,20,1,[5750]
NCP-13.zip,2,NCP,355,1861,125,2,"[1861, 1862]"
Normal-6.zip,0,Normal,1813,268,80,1,[268]
Normal-2.zip,0,Normal,1756,1101,66,4,"[1101, 1102, 1103, 1104]"
CP-20.zip,1,CP,2759,3290,36,1,[3290]
Normal-17.zip,0,Normal,2183,638,110,1,[638]
CP-7.zip,1,CP,1270,3490,237,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-6.zip,2,NCP,228,1601,67,2,"[1600, 1601]"
NCP-5.zip,2,NCP,197,1539,53,2,"[1538, 1539]"
CP-28.zip,1,CP,3766,5710,24,1,[5710]
CP-10.zip,1,CP,1399,3858,45,2,"[3858, 3859]"
Normal-14.zip,0,Normal,2074,529,82,1,[529]
Normal-2.zip,0,Normal,1733,1026,71,2,"[1026, 1027]"
NCP-11.zip,2,NCP,300,1747,58,2,"[1746, 1747]"
CP-17.zip,1,CP,1650,4338,31,1,[4338]
CP-20.zip,1,CP,2455,2937,116,1,[2937]
Normal-20.zip,0,Normal,2279,734,78,1,[734]
CP-8.zip,1,CP,1329,3697,45,3,"[3695, 3696, 3697]"
NCP-16.zip,2,NCP,444,2042,146,2,"[2042, 2043]"
Normal-12.zip,0,Normal,1999,454,78,1,[454]
CP-17.zip,1,CP,1624,4312,20,1,[4312]
NCP-10.zip,2,NCP,2720,2676,45,1,[2676]
CP-2.zip,1,CP,1107,3325,183,1,[3325]
CP-18.zip,1,CP,1777,3542,62,5,"[3540, 3541, 3542, 3543, 3544]"
NCP-15.zip,2,NCP,403,1955,110,2,"[1955, 1956]"
NCP-3.zip,2,NCP,134,1413,54,2,"[1412, 1413]"
CP-13.zip,1,CP,1500,4099,97,1,[4099]
CP-25.zip,1,CP,712,3074,118,1,[3074]
CP-23.zip,1,CP,648,3010,104,1,[3010]
CP-19.zip,1,CP,2443,2917,98,3,"[2915, 2916, 2917]"
NCP-16.zip,2,NCP,441,2036,115,2,"[2036, 2037]"
Normal-23.zip,0,Normal,2628,138,34,1,[138]
CP-19.zip,1,CP,1795,3596,41,2,"[3596, 3597]"
NCP-27.zip,2,NCP,1016,2580,20,3,"[2580, 2581, 2582]"
Normal-24.zip,0,Normal,2659,169,39,1,[169]
CP-17.zip,1,CP,1619,4307,29,1,[4307]
Normal-26.zip,0,Normal,3861,5373,211,1,[5373]
NCP-19.zip,2,NCP,519,2195,53,2,"[2194, 2195]"
NCP-6.zip,2,NCP,213,1571,66,2,"[1570, 1571]"
Normal-25.zip,0,Normal,3860,5372,212,1,[5372]
NCP-5.zip,2,NCP,192,1529,57,2,"[1528, 1529]"
CP-3.zip,1,CP,1153,3371,179,1,[3371]
CP-3.zip,1,CP,1159,3377,287,1,[3377]
NCP-30.zip,2,NCP,931,2473,21,1,[2473]
CP-6.zip,1,CP,1255,3473,107,1,[3473]
CP-7.zip,1,CP,1270,3493,193,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-4.zip,2,NCP,169,1482,133,2,"[1482, 1483]"
NCP-12.zip,2,NCP,340,1828,128,2,"[1828, 1829]"
CP-18.zip,1,CP,1782,3583,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
CP-26.zip,1,CP,3729,5665,36,3,"[5665, 5666, 5667]"
Normal-11.zip,0,Normal,1976,431,74,1,[431]
CP-9.zip,1,CP,1372,3798,49,3,"[3797, 3798, 3799]"
NCP-4.zip,2,NCP,161,1467,57,2,"[1466, 1467]"
CP-22.zip,1,CP,613,2975,78,1,[2975]
NCP-17.zip,2,NCP,460,2074,106,2,"[2074, 2075]"
NCP-21.zip,2,NCP,577,2311,145,2,"[2311, 2312]"
CP-25.zip,1,CP,741,3103,523,1,[3103]
CP-14.zip,1,CP,1532,4172,50,2,"[4171, 4172]"
NCP-11.zip,2,NCP,299,1744,139,2,"[1744, 1745]"
NCP-14.zip,2,NCP,372,1896,45,2,"[1895, 1896]"
CP-9.zip,1,CP,1356,3756,60,2,"[3756, 3757]"
Normal-11.zip,0,Normal,1968,423,96,1,[423]
CP-14.zip,1,CP,1525,4156,60,2,"[4155, 4156]"
CP-7.zip,1,CP,1270,3502,21,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
CP-22.zip,1,CP,618,2980,166,1,[2980]
CP-17.zip,1,CP,1639,4327,26,1,[4327]
Normal-19.zip,0,Normal,2245,700,83,1,[700]
CP-13.zip,1,CP,1518,4139,67,3,"[4138, 4139, 4140]"
NCP-11.zip,2,NCP,29,1190,132,2,"[1190, 1191]"
CP-16.zip,1,CP,1615,4303,29,1,[4303]
CP-29.zip,1,CP,3823,5767,26,1,[5767]
NCP-20.zip,2,NCP,574,2306,139,2,"[2306, 2307]"
NCP-12.zip,2,NCP,340,1829,54,2,"[1828, 1829]"
Normal-21.zip,0,Normal,2285,740,68,1,[740]
NCP-16.zip,2,NCP,455,2065,56,2,"[2064, 2065]"
NCP-16.zip,2,NCP,436,2026,61,2,"[2025, 2026]"
NCP-14.zip,2,NCP,383,1918,139,2,"[1918, 1919]"
NCP-30.zip,2,NCP,988,2538,287,2,"[2538, 2539]"
NCP-7.zip,2,NCP,247,1641,66,2,"[1640, 1641]"
CP-15.zip,1,CP,1571,4259,16,1,[4259]
Normal-16.zip,0,Normal,2137,592,94,1,[592]
CP-7.zip,1,CP,1304,3637,218,8,"[3632, 3633, 3634, 3635, 3636, 3637, 3638, 3639]"
CP-6.zip,1,CP,1235,3453,155,1,[3453]
Normal-4.zip,0,Normal,776,211,353,1,[211]
CP-7.zip,1,CP,1270,3499,363,15,"[3488, 3489, 3490, 3491, 3492, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502]"
NCP-5.zip,2,NCP,18,1168,135,2,"[1168, 1169]"
Normal-18.zip,0,Normal,2189,644,82,1,[644]
Normal-6.zip,0,Normal,1799,254,97,1,[254]
Normal-15.zip,0,Normal,2113,568,93,1,[568]
CP-3.zip,1,CP,1131,3349,157,1,[3349]
Normal-6.zip,0,Normal,1819,274,91,1,[274]
CP-18.zip,1,CP,1781,3571,62,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-16.zip,2,NCP,455,2064,132,2,"[2064, 2065]"
Normal-8.zip,0,Normal,1888,343,99,1,[343]
NCP-20.zip,2,NCP,571,2301,68,2,"[2300, 2301]"
NCP-7.zip,2,NCP,247,1640,159,2,"[1640, 1641]"
CP-3.zip,1,CP,1137,3355,147,1,[3355]
CP-11.zip,1,CP,1423,3913,53,3,"[3911, 3912, 3913]"
NCP-1.zip,2,NCP,105,1347,145,2,"[1347, 1348]"
NCP-14.zip,2,NCP,377,1907,62,2,"[1906, 1907]"
CP-14.zip,1,CP,1535,4178,53,2,"[4178, 4179]"
Normal-9.zip,0,Normal,1900,355,93,1,[355]
CP-2.zip,1,CP,1125,3343,115,1,[3343]
CP-6.zip,1,CP,1243,3461,176,1,[3461]
NCP-6.zip,2,NCP,203,1550,140,2,"[1550, 1551]"
Normal-1.zip,0,Normal,1670,789,63,6,"[787, 788, 789, 790, 791, 792]"
NCP-29.zip,2,NCP,909,2451,401,1,[2451]
NCP-25.zip,2,NCP,3949,5463,35,1,[5463]
Normal-26.zip,0,Normal,3879,5391,28,1,[5391]
NCP-11.zip,2,NCP,29,1191,56,2,"[1190, 1191]"
CP-3.zip,1,CP,1129,3347,158,1,[3347]
NCP-15.zip,2,NCP,406,1961,146,2,"[1961, 1962]"
NCP-4.zip,2,NCP,151,1446,129,2,"[1446, 1447]"
CP-19.zip,1,CP,1789,3206,64,4,"[3204, 3205, 3206, 3207]"
NCP-6.zip,2,NCP,227,1599,61,2,"[1598, 1599]"
CP-12.zip,1,CP,1462,4003,51,3,"[4002, 4003, 4004]"
CP-3.zip,1,CP,1147,3365,164,1,[3365]
Normal-23.zip,0,Normal,2629,139,36,1,[139]
Normal-1.zip,0,Normal,1700,953,64,2,"[953, 954]"
CP-15.zip,1,CP,1561,4241,49,2,"[4241, 4242]"
NCP-16.zip,2,NCP,437,2028,60,2,"[2027, 2028]"
CP-18.zip,1,CP,1654,4342,23,1,[4342]
Normal-20.zip,0,Normal,2273,728,75,1,[728]
Normal-14.zip,0,Normal,2067,522,94,1,[522]
NCP-29.zip,2,NCP,911,2453,48,1,[2453]
Normal-2.zip,0,Normal,1756,1104,65,4,"[1101, 1102, 1103, 1104]"
Normal-11.zip,0,Normal,1989,444,105,1,[444]
NCP-15.zip,2,NCP,403,1956,47,2,"[1955, 1956]"
NCP-13.zip,2,NCP,348,1848,48,2,"[1847, 1848]"
NCP-28.zip,2,NCP,844,2359,594,1,[2359]
NCP-18.zip,2,NCP,51,1235,141,2,"[1235, 1236]"
CP-28.zip,1,CP,3789,5733,26,1,[5733]
Normal-2.zip,0,Normal,1763,1138,65,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-10.zip,2,NCP,278,1702,137,2,"[1702, 1703]"
CP-28.zip,1,CP,3770,5714,23,1,[5714]
CP-18.zip,1,CP,1782,3585,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
NCP-23.zip,2,NCP,93,1323,66,2,"[1322, 1323]"
NCP-14.zip,2,NCP,390,1930,126,2,"[1930, 1931]"
NCP-8.zip,2,NCP,26,1184,82,2,"[1184, 1185]"
Normal-2.zip,0,Normal,1763,1142,71,11,"[1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142]"
NCP-6.zip,2,NCP,201,1547,62,2,"[1546, 1547]"
NCP-5.zip,2,NCP,176,1496,126,2,"[1496, 1497]"
Normal-23.zip,0,Normal,2626,136,33,1,[136]
NCP-25.zip,2,NCP,3707,5534,50,1,[5534]
Normal-21.zip,0,Normal,2305,760,104,1,[760]
Normal-6.zip,0,Normal,1818,273,87,1,[273]
CP-22.zip,1,CP,641,3003,136,1,[3003]
Normal-7.zip,0,Normal,1836,291,104,1,[291]
Normal-27.zip,0,Normal,3894,5417,287,1,[5417]
NCP-30.zip,2,NCP,981,2526,23,2,"[2525, 2526]"
NCP-1.zip,2,NCP,102,1341,132,2,"[1341, 1342]"
CP-18.zip,1,CP,1782,3581,71,9,"[3579, 3580, 3581, 3582, 3583, 3584, 3585, 3586, 3587]"
NCP-14.zip,2,NCP,387,1924,128,2,"[1924, 1925]"
NCP-2.zip,2,NCP,117,1376,55,2,"[1375, 1376]"
NCP-5.zip,2,NCP,190,1524,152,2,"[1524, 1525]"
CP-26.zip,1,CP,3639,5598,241,1,[5598]
Normal-1.zip,0,Normal,1670,787,58,6,"[787, 788, 789, 790, 791, 792]"
Normal-2.zip,0,Normal,1757,1108,68,4,"[1105, 1106, 1107, 1108]"
CP-10.zip,1,CP,1393,3845,60,2,"[3845, 3846]"
Normal-13.zip,0,Normal,2043,498,84,1,[498]
CP-2.zip,1,CP,1099,3317,198,1,[3317]
CP-7.zip,1,CP,1318,3673,56,1,[3673]
Normal-9.zip,0,Normal,1899,354,88,1,[354]
CP-12.zip,1,CP,1467,4014,60,2,"[4013, 4014]"
NCP-5.zip,2,NCP,197,1538,124,2,"[1538, 1539]"
CP-26.zip,1,CP,3730,5669,202,2,"[5668, 5669]"
NCP-22.zip,2,NCP,845,2363,428,4,"[2360, 2361, 2362, 2363]"
NCP-2.zip,2,NCP,127,1399,139,2,"[1399, 1400]"
Normal-26.zip,0,Normal,3893,5416,63,1,[5416]
NCP-8.zip,2,NCP,2669,2689,37,1,[2689]
CP-18.zip,1,CP,1778,3549,64,6,"[3545, 3546, 3547, 3548, 3549, 3550]"
CP-25.zip,1,CP,722,3084,70,1,[3084]
NCP-4.zip,2,NCP,157,1458,114,2,"[1458, 1459]"
NCP-23.zip,2,NCP,92,1320,87,2,"[1320, 1321]"
CP-11.zip,1,CP,1424,3914,60,2,"[3914, 3915]"
NCP-19.zip,2,NCP,529,2215,33,3,"[2214, 2215, 2217]"
CP-24.zip,1,CP,704,3066,417,1,[3066]
NCP-6.zip,2,NCP,201,1546,149,2,"[1546, 1547]"
Normal-17.zip,0,Normal,2177,632,88,1,[632]
NCP-14.zip,2,NCP,383,1919,58,2,"[1918, 1919]"
Normal-2.zip,0,Normal,1737,1040,80,4,"[1037, 1038, 1039, 1040]"
Normal-26.zip,0,Normal,3881,5393,22,1,[5393]
Normal-3.zip,0,Normal,1767,1161,71,12,"[1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163]"
CP-14.zip,1,CP,1525,4155,60,2,"[4155, 4156]"
NCP-12.zip,2,NCP,341,1832,55,3,"[1830, 1832, 1834]"
Normal-13.zip,0,Normal,2034,489,91,1,[489]
NCP-26.zip,2,NCP,3978,5485,49,1,[5485]
NCP-22.zip,2,NCP,864,2389,221,2,"[2388, 2389]"
NCP-9.zip,2,NCP,2682,2652,47,1,[2652]
NCP-7.zip,2,NCP,2461,2642,42,1,[2642]
Normal-21.zip,0,Normal,2303,758,110,1,[758]
NCP-8.zip,2,NCP,2670,2690,41,1,[2690]
CP-7.zip,1,CP,1315,3666,59,2,"[3665, 3666]"
CP-19.zip,1,CP,2449,2927,118,1,[2927]
CP-19.zip,1,CP,1789,3204,59,4,"[3204, 3205, 3206, 3207]"
Normal-6.zip,0,Normal,1803,258,100,1,[258]
Normal-1.zip,0,Normal,1675,812,73,1,[812]
NCP-25.zip,2,NCP,3705,5532,63,1,[5532]
Normal-1.zip,0,Normal,1727,1010,63,4,"[1009, 1010, 1011, 1012]"
NCP-3.zip,2,NCP,1283,2724,70,1,[2724]
CP-18.zip,1,CP,1774,3524,66,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
CP-18.zip,1,CP,1774,3525,66,8,"[3522, 3523, 3524, 3525, 3526, 3527, 3528, 3529]"
CP-30.zip,1,CP,3919,5543,66,4,"[5543, 5544, 5545, 5546]"
NCP-22.zip,2,NCP,85,1304,58,2,"[1303, 1304]"
Normal-18.zip,0,Normal,2192,647,79,1,[647]
CP-30.zip,1,CP,3935,5641,70,1,[5641]
NCP-6.zip,2,NCP,227,1598,146,2,"[1598, 1599]"
Normal-20.zip,0,Normal,2250,705,76,1,[705]
CP-12.zip,1,CP,1464,4008,63,2,"[4007, 4008]"
CP-29.zip,1,CP,3807,5751,20,1,[5751]
Normal-12.zip,0,Normal,1993,448,97,1,[448]
NCP-19.zip,2,NCP,528,2212,140,2,"[2212, 2213]"
NCP-26.zip,2,NCP,3987,5511,60,1,[5511]
NCP-25.zip,2,NCP,3969,5478,50,1,[5478]
CP-17.zip,1,CP,1638,4326,25,1,[4326]
CP-17.zip,1,CP,1643,4331,24,1,[4331]
CP-17.zip,1,CP,1629,4317,23,1,[4317]
CP-11.zip,1,CP,1423,3912,53,3,"[3911, 3912, 3913]"
Normal-2.zip,0,Normal,1743,1056,73,2,"[1056, 1057]"
Normal-9.zip,0,Normal,1915,370,91,1,[370]
Normal-22.zip,0,Normal,2590,100,41,1,[100]
NCP-11.zip,2,NCP,297,1741,60,2,"[1739, 1741]"
CP-30.zip,1,CP,3919,5545,70,4,"[5543, 5544, 5545, 5546]"
NCP-25.zip,2,NCP,3971,5480,50,1,[5480]
CP-11.zip,1,CP,1454,3983,53,3,"[3982, 3983, 3984]"
Normal-21.zip,0,Normal,2282,737,69,1,[737]
NCP-12.zip,2,NCP,318,1783,150,2,"[1783, 1784]"
NCP-10.zip,2,NCP,279,1704,139,2,"[1704, 1705]"
CP-2.zip,1,CP,1108,3326,135,1,[3326]
Normal-2.zip,0,Normal,1733,1027,71,2,"[1026, 1027]"
CP-32.zip,1,CP,1781,3567,67,12,"[3568, 3569, 3570, 3571, 3573, 3574, 3575, 3576, 3577, 3578, 3567, 3572]"
NCP-10.zip,2,NCP,2725,2681,51,1,[2681]
CP-11.zip,1,CP,1425,3917,49,3,"[3916, 3917, 3918]"
Normal-1.zip,0,Normal,1701,955,70,2,"[955, 956]"
CP-19.zip,1,CP,1787,3195,59,1,[3195]
================================================
FILE: Finetune/CC-CCII/csv/CC_CCII_metadata.csv
================================================
patient_id,scan_id,Age,Sex(Male1/Female2),Critical_illness,Liver_function,Lung_function,Progression (Days)
1399,127,57,1,1,5,2,0.08
1297,82,55,1,1,3,2,0.88
2255,549,3,1,1,1,2,0.02
1184,26,5,2,1,0,2,0.02
1186,27,2,2,1,2,2,0.02
1181,25,0,1,1,2,2,0.03
1610,233,0,1,1,3,2,0.02
2245,544,69,1,1,1,3,0.09
1316,90,8,2,1,1,2,0.02
1268,68,47,2,1,4,4,6.08
1328,96,74,1,1,1,4,4.81
1948,399,44,1,0,0,1,10.25
1950,400,17,2,0,1,2,10.25
1897,373,80,2,1,1,3,0.03
2292,567,64,1,1,1,4,0.08
1785,319,45,1,1,0,2,0.32
1733,294,51,2,1,1,2,3.58
1320,92,9,1,1,1,3,0.17
1318,91,7,1,1,1,2,0.04
1908,378,66,1,1,2,4,3.64
1312,89,34,1,1,4,1,0.73
1387,123,83,1,1,3,4,0.07
1446,151,86,2,1,1,3,0.01
2156,500,44,1,0,0,1,7.32
2113,479,32,2,0,0,1,8.22
2091,468,38,1,0,1,2,8.38
1684,269,75,1,1,2,4,0.07
2060,453,39,2,0,0,2,9.5
2048,447,40,2,0,0,2,9.58
1281,74,63,1,1,1,5,0.84
2210,527,70,2,1,2,4,0.07
2150,497,57,2,1,0,1,1.16
1700,277,85,1,1,2,3,0.05
2313,578,87,2,1,1,3,0.03
1895,372,8,2,1,0,2,0.04
2146,495,61,1,0,0,3,7.41
2311,577,77,1,0,0,3,8.3
2144,494,31,1,1,0,1,0.03
1369,114,57,2,1,1,1,0.06
1572,214,26,1,1,1,1,0.81
1536,196,35,2,1,0,2,0.92
1692,273,48,2,1,1,1,1.03
2302,572,49,1,1,0,1,0.03
1504,180,62,2,1,0,4,1.01
1578,217,40,2,1,5,2,0.9
1458,157,49,2,1,2,1,1.13
1982,415,31,1,1,0,2,0.06
1967,409,26,2,0,0,2,10.14
2054,450,2,2,1,1,2,0.06
2056,451,0,2,1,1,2,0.05
1922,385,0,2,1,2,2,0.03
2310,576,77,2,1,0,4,0.02
1412,134,71,2,1,1,3,0.07
1566,211,48,2,0,1,2,13.03
1923,386,0,2,1,3,2,0.03
2059,452,0,2,1,1,3,0.04
2204,524,73,1,1,5,3,0.06
2031,439,51,1,0,0,2,9.99
1920,384,45,2,1,2,2,8.48
1179,24,62,2,0,1,3,6.24
1634,244,43,1,0,1,2,12.39
1482,169,61,1,1,1,4,1.05
1580,218,44,1,1,3,2,0.87
1476,166,57,2,1,0,2,1.09
1474,165,57,1,1,0,2,7.04
1332,98,9,2,1,1,3,0.11
2190,517,73,2,1,2,3,0.99
1341,102,29,2,1,1,2,0.92
1629,242,75,1,1,2,3,5.07
1590,223,42,1,1,1,1,0.09
1804,328,71,2,1,1,4,0.05
1175,22,82,2,1,1,3,0.05
1287,77,44,1,1,2,2,0.07
1524,190,40,1,1,0,3,0.01
1508,182,54,1,1,0,2,0.02
2296,569,56,1,0,0,1,10.98
2280,561,63,2,1,2,3,6.59
1800,326,69,2,1,0,4,0.08
2074,460,9,1,1,2,2,0.02
2123,484,51,1,1,1,2,0.03
2101,473,46,1,1,5,1,0.03
2214,529,25,2,1,0,2,0.04
1410,133,9,2,1,1,3,0.09
1615,235,47,2,0,0,2,12.51
1877,363,50,2,0,0,1,11.22
1704,279,53,2,0,1,2,12.1
1584,220,56,1,1,0,2,0.04
2192,518,44,1,1,2,2,0.02
1942,396,53,1,0,1,1,10.39
1625,240,51,1,0,1,2,10.45
2087,466,51,1,0,2,3,9
2180,512,46,2,0,1,1,6.12
2202,523,63,1,1,0,4,2.82
2194,519,68,2,0,1,4,5.19
2188,516,65,2,0,0,4,6.12
1436,146,32,1,0,0,1,14
2253,548,37,1,0,1,2,14
1604,230,59,1,1,0,1,0.05
1295,81,26,1,0,4,3,14
1426,141,38,2,1,1,2,0.08
1227,47,24,2,1,2,2,0.94
1798,325,64,2,1,0,4,2.73
1744,299,56,1,1,1,2,5.54
1924,387,65,2,1,0,4,0.92
1932,391,62,2,0,0,3,10.4
1309,88,61,1,1,3,3,0.87
1194,31,41,1,1,0,1,0.99
1654,254,66,2,0,1,4,12.36
1243,55,38,2,1,1,1,0.86
1276,72,41,2,1,5,1,0.08
1690,272,79,1,1,2,4,0.03
1349,106,54,1,1,0,1,0.95
2011,429,58,2,1,0,1,4.54
1987,417,51,2,1,1,2,0.14
2206,525,79,1,0,0,3,5.16
2300,571,51,1,0,1,2,5.18
2243,543,68,2,1,0,4,0.02
2072,459,73,1,1,1,3,0.05
1674,264,40,1,0,0,2,12.32
1906,377,73,1,0,0,4,11.11
1715,285,37,2,0,0,2,12.08
1520,188,68,2,1,0,4,4.68
1901,375,69,2,0,0,4,11.12
1881,365,63,2,0,0,4,11.22
2015,431,28,1,0,0,3,10.09
1783,318,32,1,0,0,3,11.94
1855,352,59,1,1,1,2,0.02
1859,354,29,1,0,0,2,11.35
1710,282,68,2,1,0,3,7.19
1788,320,57,2,1,0,1,4.6
1351,107,49,1,1,1,2,6.83
1867,358,54,1,0,2,2,11.27
1377,118,34,1,1,1,2,6.77
1965,408,54,2,1,0,2,6.12
1808,330,76,1,1,0,5,0.05
1928,389,40,2,0,1,2,10.41
1748,301,29,2,0,0,2,12.02
1608,232,46,2,0,1,2,12.63
1739,297,61,1,1,0,3,8.46
2142,493,33,2,1,0,2,0.59
2241,542,62,1,1,1,4,0.83
2046,446,51,1,1,0,3,7.22
2212,528,29,2,1,1,1,0.24
2170,507,55,2,1,4,1,1.57
2148,496,30,1,0,0,2,7.41
1552,204,48,1,1,4,2,0.03
1779,316,50,2,0,0,1,11.96
2119,482,42,2,1,0,2,1.93
2220,531,49,1,0,2,2,4.21
1210,39,70,2,1,0,5,5.81
1251,59,52,2,0,0,3,14
1405,130,53,2,1,0,3,0.07
1259,63,29,1,1,0,3,5.64
1530,193,48,2,1,2,2,0.02
2222,532,74,1,0,0,3,4.19
2134,489,48,1,1,1,3,0.05
1863,356,56,2,0,4,2,11.3
2005,426,77,1,0,0,4,10.11
1168,18,76,1,0,1,4,10.26
1995,421,55,1,0,3,2,10.11
1891,370,55,2,1,3,2,5.54
2282,562,34,2,0,2,2,7.51
1752,303,57,1,0,0,2,11.98
1558,207,63,1,1,3,4,7.43
1973,412,53,1,1,0,3,9.53
1771,312,24,1,0,4,2,11.97
1794,323,72,2,1,2,5,8.34
2064,455,39,2,0,0,2,9.42
1170,19,57,2,1,0,2,0.04
2174,509,71,2,0,0,3,6.47
2200,522,32,2,0,1,1,5.17
2154,499,34,1,0,0,1,7.35
2198,521,57,1,0,0,1,5.18
2196,520,54,2,0,0,1,5.18
1980,414,69,2,1,2,3,0.87
2288,565,83,1,1,2,4,0.01
1468,162,33,1,1,0,3,0.02
2275,559,75,2,1,2,4,5.01
1375,117,67,2,1,2,4,3.95
2164,504,56,2,1,0,2,1.86
2111,478,35,2,0,0,2,14
1223,45,75,2,0,0,3,10.11
1938,394,84,1,1,0,3,0.88
2237,540,58,2,1,1,2,7.07
1725,290,59,2,0,1,1,12.06
1837,343,52,2,1,1,2,0.04
2082,464,65,2,0,0,4,9.02
2029,438,31,1,0,0,2,10.03
2109,477,16,1,1,0,3,0.03
2040,443,72,2,1,1,3,3.37
1991,419,70,2,0,4,3,10.11
2036,441,50,2,0,0,2,9.59
1961,406,36,1,0,0,2,10.16
2052,449,25,1,0,0,2,9.58
2218,530,61,2,1,0,4,0.04
1235,51,38,1,1,5,2,6
1767,310,35,1,0,0,2,11.97
1177,23,63,1,0,0,4,11.79
2231,537,80,1,1,0,3,0.02
1621,238,78,1,1,4,4,4.06
2224,533,29,1,0,0,1,3.27
1385,122,84,1,0,0,3,2.07
1488,172,40,2,1,0,2,0.03
1596,226,59,2,1,0,1,0.05
1265,66,23,1,0,0,2,14
2321,582,63,1,1,3,3,0.98
1516,186,68,2,1,2,3,0.03
2127,486,48,1,1,2,2,0.02
1682,268,68,1,1,0,3,0.04
2138,491,49,1,1,0,1,0.02
2095,470,65,1,1,5,4,0.04
3988,1456,77,2,1,0,5,0.02
1871,360,46,1,1,0,2,1.47
2308,575,52,1,1,1,2,0.1
1420,138,62,2,1,2,4,0.04
2235,539,65,1,1,0,5,9.63
1642,248,83,1,1,1,4,0.05
1305,86,77,2,1,1,3,0.6
2325,816,74,1,0,1,2,14
2326,817,52,1,1,0,0,7
2327,818,56,2,0,1,1,7
2330,820,40,1,0,3,2,9
2331,821,24,2,0,0,1,8
2332,822,57,1,1,0,0,8
2334,823,46,2,0,0,1,11
2335,824,35,1,0,2,2,9
2338,825,46,2,0,0,1,10
2339,826,24,1,0,0,1,8
2341,828,41,1,0,1,2,7
2343,830,27,1,0,3,1,12
2345,832,27,1,0,1,1,8
2346,833,33,1,0,0,1,8
2350,835,40,2,0,0,1,10
2351,836,35,1,0,0,2,10
2352,837,51,1,0,0,1,10
2353,838,58,2,0,1,1,10
2354,839,43,1,0,1,1,8
2355,840,18,1,0,1,1,10
2356,841,49,2,0,1,1,10
2359,844,68,2,0,1,2,10
2364,846,41,2,0,1,0,9
2365,847,61,2,0,0,3,8
2368,849,22,1,0,0,1,8
2369,850,29,1,0,0,1,9
2370,851,34,1,0,1,2,11
2371,852,39,1,0,0,0,10
2373,853,42,2,0,0,1,10
2374,854,28,2,0,1,0,9
2377,856,56,1,0,1,0,11
2378,857,41,2,0,1,0,10
2379,858,41,1,0,0,0,10
2380,859,39,1,0,0,1,10
2384,861,47,2,0,1,0,11
2387,863,50,2,0,0,0,10
2389,864,39,2,0,0,1,10
2393,866,30,1,1,1,1,10
2394,867,60,2,0,1,1,9
2397,869,31,1,0,0,1,11
2399,870,31,2,0,1,1,14
2402,871,43,1,0,0,0,9
2403,872,26,1,0,1,1,14
2407,874,23,1,0,3,1,12
2408,875,38,2,0,0,1,9
2410,876,54,1,0,0,2,9
2411,877,31,1,0,0,1,8
2413,878,18,2,0,0,1,7
2414,879,52,2,0,1,1,9
2415,880,54,1,0,0,1,7
2420,883,36,2,0,1,1,9
2421,884,56,2,0,0,0,8
2423,885,46,1,0,0,1,11
2424,886,51,1,0,0,0,14
2426,888,51,1,0,1,2,9
2428,889,48,2,0,1,1,8
2429,890,52,1,0,1,1,6
2431,892,29,1,0,0,2,8
2432,893,62,2,0,5,4,14
2435,895,36,1,0,1,1,8
2437,896,53,2,0,1,1,7
2438,897,22,1,0,3,2,7
2439,898,41,1,0,3,0,8
2441,899,32,1,0,1,2,12
2443,901,56,2,0,1,1,14
2444,902,26,2,0,1,1,8
2446,904,59,2,0,1,1,9
2448,906,49,2,0,2,1,14
2449,907,59,1,0,1,0,8
2450,908,34,2,0,0,2,9
2451,909,63,1,1,1,2,9
2452,910,48,1,1,0,3,9
2453,911,28,1,1,0,2,10
2454,912,44,2,0,1,1,14
2455,913,45,1,0,1,2,12
2457,915,45,2,0,0,1,8
2458,916,57,1,0,1,2,7
2459,917,62,1,0,2,3,11
2463,921,40,2,0,1,1,14
2464,922,62,1,0,1,3,9
2465,923,55,1,0,2,2,11
2466,924,60,2,0,0,1,12
2467,925,33,1,0,0,1,10
2468,926,32,1,0,2,1,10
2472,930,58,2,0,2,1,10
2473,931,47,1,0,2,1,13
2475,933,50,2,0,0,0,14
2476,934,39,2,0,0,2,12
2477,935,46,1,0,1,0,10
2479,937,67,2,0,0,3,10
2480,938,38,1,0,1,1,14
2482,939,23,1,0,0,0,8
2483,940,45,1,0,1,1,9
2484,941,27,2,0,0,1,8
2486,943,63,1,0,1,3,14
2487,944,47,1,0,0,2,7
2488,945,29,1,1,1,0,11
2489,946,30,2,0,1,2,9
2490,947,60,2,0,0,1,13
2492,949,40,2,0,1,1,6
2493,950,28,1,0,1,0,8
2494,951,30,1,0,0,1,8
2495,952,50,1,0,0,2,14
2497,954,48,2,0,1,1,10
2501,958,33,2,0,5,2,9
2502,959,28,1,0,1,1,9
2503,960,50,1,0,1,2,10
2505,962,12,1,0,0,2,9
2506,963,47,2,0,1,4,13
2507,965,47,1,0,1,2,14
2510,967,33,1,0,1,1,8
2511,968,71,2,0,1,3,14
2512,969,44,1,0,1,1,14
2513,970,36,1,0,1,2,12
2514,971,42,1,0,1,2,12
2515,972,37,1,0,1,1,14
2516,973,49,2,0,3,1,9
2519,976,50,1,0,1,0,9
2521,977,47,2,0,0,3,9
2522,978,63,1,0,1,2,9
2523,979,44,2,0,1,2,6
2525,981,47,2,0,1,0,9
2527,982,35,2,0,0,1,10
2529,984,28,1,0,2,1,11
2531,985,30,1,0,1,1,10
2535,986,44,2,0,1,0,12
2537,987,35,1,0,1,1,8
2541,989,40,2,0,1,1,14
2543,990,57,1,1,1,0,7
2544,991,46,2,0,1,1,8
2546,993,47,2,0,2,2,10
2547,994,46,1,0,5,2,14
2549,995,72,2,0,1,2,9
2550,996,34,1,1,2,1,9
2555,998,62,1,0,1,3,7
2556,999,33,2,0,0,2,7
2560,1001,43,1,1,2,2,9
2561,1002,39,1,0,0,1,8
2562,1003,47,1,0,2,2,10
2564,1004,51,2,0,0,0,9
2565,1005,56,2,1,2,1,12
2568,1007,19,1,0,1,1,10
2569,1008,58,1,0,1,1,9
2570,1009,50,2,0,0,0,9
2572,1010,71,1,1,0,2,10
2574,1011,65,1,1,1,3,10
2576,1012,47,1,0,1,2,8
2577,1013,47,1,0,1,2,10
2579,1015,23,2,0,0,0,7
2583,1017,64,1,0,1,3,9
2585,1019,57,2,0,5,1,9
2586,1020,35,1,0,1,1,6
2588,1021,45,2,0,1,1,14
2591,1022,9,1,0,1,3,9
2593,1023,52,1,0,2,1,9
2595,1025,31,1,0,1,1,9
2596,1026,39,2,0,0,1,9
2597,1027,28,1,0,1,1,12
2598,1028,47,2,0,0,2,9
2599,1029,58,1,0,2,0,8
2600,1030,50,1,0,1,1,6
2602,1031,40,1,1,0,1,11
2603,1032,47,1,0,0,1,6
2604,1033,73,1,0,1,4,11
2605,1034,27,1,0,2,0,7
2606,1035,33,2,0,1,0,9
2607,1036,24,1,0,1,1,8
2609,1038,19,1,0,1,1,8
2610,1039,42,1,0,1,2,10
2611,1040,61,2,0,2,3,6
2612,1041,63,1,0,1,3,6
2614,1042,47,2,0,0,3,8
2616,1044,55,2,1,0,1,8
2617,1045,81,1,0,0,4,14
2618,1046,36,1,0,0,1,8
2619,1047,41,1,0,1,1,9
2622,1049,45,2,0,0,0,8
2626,1051,52,1,0,1,1,8
2627,1052,47,1,0,0,1,8
2629,1053,28,1,0,1,1,10
2631,1055,52,2,0,0,1,14
2632,1056,51,2,0,1,1,9
2633,1057,45,1,0,1,1,10
2634,1058,68,2,1,0,2,10
2636,1059,48,2,0,0,1,8
2637,1060,69,2,0,0,3,11
2638,1061,51,2,0,1,1,12
2639,1062,43,1,0,0,2,12
2640,1063,29,1,0,1,1,10
2641,1064,47,2,0,1,1,14
================================================
FILE: Finetune/CC-CCII/dataset/__init__.py
================================================
================================================
FILE: Finetune/CC-CCII/eval.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
from torch.cuda.amp import GradScaler, autocast
from utils.data_utils import get_loader
from utils.utils import dice, resample_3d
from utils.utils import AverageMeter, distributed_all_gather
from monai.inferers import sliding_window_inference
from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
os.environ['CUDA_VISIBLE_DEVICES'] = "0"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument("--checkpoint", default=None, help="start training from saved checkpoint")
parser.add_argument("--logdir", default="logs", type=str, help="directory to save the tensorboard logs")
parser.add_argument(
"--pretrained_dir", default="./runs/logs_384/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--csv_list", default="./csv/", type=str, help="csv directory")
parser.add_argument("--fold", default=0, type=int, help="fold")
parser.add_argument("--data_dir", default="/data/jiaxin/data/CC-CCII_public/data/", type=str, help="dataset directory")
parser.add_argument(
"--pretrained_model_name",
default="model.pt",
type=str,
help="pretrained model name",
)
parser.add_argument("--save_checkpoint", default=True, help="save checkpoint during training")
parser.add_argument("--max_epochs", default=100, type=int, help="max number of training epochs")
parser.add_argument("--batch_size", default=4, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=1, type=int, help="number of sliding window batch size")
parser.add_argument("--optim_lr", default=1e-4, type=float, help="optimization learning rate")
parser.add_argument("--optim_name", default="adamw", type=str, help="optimization algorithm")
parser.add_argument("--reg_weight", default=1e-5, type=float, help="regularization weight")
parser.add_argument("--momentum", default=0.99, type=float, help="momentum")
parser.add_argument("--noamp", action="store_true", help="do NOT use amp for training")
parser.add_argument("--val_every", default=5, type=int, help="validation frequency")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--world_size", default=1, type=int, help="number of nodes for distributed training")
parser.add_argument("--rank", default=0, type=int, help="node rank for distributed training")
parser.add_argument("--dist-url", default="tcp://127.0.0.1:23456", type=str, help="distributed url")
parser.add_argument("--dist-backend", default="nccl", type=str, help="distributed backend")
parser.add_argument("--norm_name", default="instance", type=str, help="normalization name")
parser.add_argument("--workers", default=4, type=int, help="number of workers")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=3, type=int, help="number of output channels")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--lrschedule", default="warmup_cosine", type=str, help="type of learning rate scheduler")
# warmup is important !!!
parser.add_argument("--warmup_epochs", default=5, type=int, help="number of warmup epochs")
parser.add_argument("--resume_ckpt", action="store_true", help="resume training from pretrained checkpoint")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
parser.add_argument("--use_ssl_pretrained", default=True, help="use self-supervised pretrained weights")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--squared_dice", action="store_true", help="use squared Dice")
def main():
args = parser.parse_args()
args.test_mode = True
_, loader = get_loader(args)
pretrained_dir = args.pretrained_dir
model_name = args.pretrained_model_name
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
pretrained_pth = os.path.join(pretrained_dir, model_name)
from model import Swin
model = Swin(args)
model_dict = torch.load(pretrained_pth)["state_dict"]
model.load_state_dict(model_dict, strict=True)
model.eval()
model.to(device)
with torch.no_grad():
num_correct = 0.0
metric_count = 0
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
# data = resize(data)
data, target = data.cuda(args.rank), target.cuda(args.rank)
logits = model(data)
value = torch.eq(logits.argmax(dim=1), target)
metric_count += len(value)
num_correct += value.sum().item()
metric = num_correct / metric_count
print(
"Val {}/{}".format(idx, len(loader)),
"acc",
metric,
)
if __name__ == "__main__":
main()
================================================
FILE: Finetune/CC-CCII/main.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import logging
import numpy as np
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import torch.nn.parallel
import torch.utils.data.distributed
from optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR
from trainer import run_training
from utils.data_utils import get_loader
from monai.inferers import sliding_window_inference
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
os.environ['CUDA_VISIBLE_DEVICES'] = "2"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument("--checkpoint", default=None, help="start training from saved checkpoint")
parser.add_argument("--logdir", default="logs", type=str, help="directory to save the tensorboard logs")
parser.add_argument(
"--pretrained_dir", default="./pretrained_models/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument(
"--pretrained_checkpoint",default="VoCo_10k.pt", type=str, help="VoCo_10k pretrained model")
parser.add_argument("--csv_list", default="./csv/", type=str, help="csv directory")
parser.add_argument("--fold", default=0, type=int, help="fold")
parser.add_argument("--data_dir", default="/data/jiaxin/data/CC-CCII_public/data/", type=str, help="dataset directory")
parser.add_argument(
"--pretrained_model_name",
default="model_bestVal_big.pt",
type=str,
help="pretrained model name",
)
parser.add_argument("--save_checkpoint", default=True, help="save checkpoint during training")
parser.add_argument("--max_epochs", default=100, type=int, help="max number of training epochs")
parser.add_argument("--batch_size", default=4, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=1, type=int, help="number of sliding window batch size")
parser.add_argument("--optim_lr", default=3e-4, type=float, help="optimization learning rate")
parser.add_argument("--optim_name", default="adamw", type=str, help="optimization algorithm")
parser.add_argument("--reg_weight", default=1e-5, type=float, help="regularization weight")
parser.add_argument("--momentum", default=0.99, type=float, help="momentum")
parser.add_argument("--noamp", action="store_true", help="do NOT use amp for training")
parser.add_argument("--val_every", default=5, type=int, help="validation frequency")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--world_size", default=1, type=int, help="number of nodes for distributed training")
parser.add_argument("--rank", default=0, type=int, help="node rank for distributed training")
parser.add_argument("--dist-url", default="tcp://127.0.0.1:23456", type=str, help="distributed url")
parser.add_argument("--dist-backend", default="nccl", type=str, help="distributed backend")
parser.add_argument("--norm_name", default="instance", type=str, help="normalization name")
parser.add_argument("--workers", default=4, type=int, help="number of workers")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=3, type=int, help="number of output channels")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--lrschedule", default="warmup_cosine", type=str, help="type of learning rate scheduler")
parser.add_argument("--warmup_epochs", default=5, type=int, help="number of warmup epochs")
parser.add_argument("--resume_ckpt", action="store_true", help="resume training from pretrained checkpoint")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
parser.add_argument("--use_ssl_pretrained", default=True, help="use self-supervised pretrained weights")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--squared_dice", action="store_true", help="use squared Dice")
def main():
args = parser.parse_args()
args.amp = not args.noamp
if args.distributed:
args.ngpus_per_node = torch.cuda.device_count()
print("Found total gpus", args.ngpus_per_node)
args.world_size = args.ngpus_per_node * args.world_size
mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))
else:
main_worker(gpu=0, args=args)
def main_worker(gpu, args):
if args.distributed:
torch.multiprocessing.set_start_method("fork", force=True)
np.set_printoptions(formatter={"float": "{: 0.3f}".format}, suppress=True)
args.gpu = gpu
if args.distributed:
args.rank = args.rank * args.ngpus_per_node + gpu
dist.init_process_group(
backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank
)
torch.cuda.set_device(args.gpu)
torch.backends.cudnn.benchmark = True
args.test_mode = False
loader = get_loader(args)
print(args.rank, " gpu", args.gpu)
if args.rank == 0:
print("Batch size is:", args.batch_size, "epochs", args.max_epochs)
if args.rank == 0:
os.makedirs(args.logdir, exist_ok=True)
logger = init_log('global', logging.INFO)
logger.propagate = 0
pretrained_dir = args.pretrained_dir
from model import Swin
model = Swin(args)
# from densenet import densenet3d
# model = densenet3d()
if args.resume_ckpt:
model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))["state_dict"]
model.load_state_dict(model_dict)
print("Use pretrained weights")
if args.use_ssl_pretrained:
try:
# model_VoCoEMA.pt
# model_dict = torch.load("./pretrained_models/supervised_suprem_swinunetr_2100.pth", map_location=torch.device('cpu'))
# model_dict = torch.load("./pretrained_models/model_VoCoEMA.pt", map_location=torch.device('cpu'))
model_dict = torch.load(args.pretrained_checkpoint,
map_location=torch.device('cpu'))
state_dict = model_dict
# fix potential differences in state dict keys from pre-training to
# fine-tuning
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
# We now load model weights, setting param `strict` to False, i.e.:
# this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves
# the decoder weights untouched (CNN UNet decoder).
model.load_state_dict(state_dict, strict=False)
print("Using pretrained voco ema self-supervised Swin UNETR backbone weights !")
except ValueError:
raise ValueError("Self-supervised pre-trained weights not available for" + str(args.model_name))
pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("Total parameters count", pytorch_total_params)
best_acc = 0
start_epoch = 0
if args.checkpoint is not None:
checkpoint = torch.load(args.checkpoint, map_location="cpu")
from collections import OrderedDict
new_state_dict = OrderedDict()
for k, v in checkpoint["state_dict"].items():
new_state_dict[k.replace("backbone.", "")] = v
model.load_state_dict(new_state_dict, strict=False)
if "epoch" in checkpoint:
start_epoch = checkpoint["epoch"]
if "best_acc" in checkpoint:
best_acc = checkpoint["best_acc"]
print("=> loaded checkpoint '{}' (epoch {}) (bestacc {})".format(args.checkpoint, start_epoch, best_acc))
model.cuda(args.gpu)
if args.distributed:
torch.cuda.set_device(args.gpu)
if args.norm_name == "batch":
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
model.cuda(args.gpu)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)
if args.optim_name == "adam":
optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
elif args.optim_name == "adamw":
optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)
elif args.optim_name == "sgd":
optimizer = torch.optim.SGD(
model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight
)
else:
raise ValueError("Unsupported Optimization Procedure: " + str(args.optim_name))
if args.lrschedule == "warmup_cosine":
max_steps = args.max_epochs * len(loader[0])
warmup_steps = args.warmup_epochs * len(loader[0])
scheduler = LinearWarmupCosineAnnealingLR(
optimizer, warmup_epochs=warmup_steps, max_epochs=max_steps
)
elif args.lrschedule == "cosine_anneal":
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)
if args.checkpoint is not None:
scheduler.step(epoch=start_epoch)
else:
scheduler = None
accuracy = run_training(
model=model,
train_loader=loader[0],
val_loader=loader[1],
optimizer=optimizer,
args=args,
scheduler=scheduler,
start_epoch=start_epoch,
)
return accuracy
logs = set()
def init_log(name, level=logging.INFO):
if (name, level) in logs:
return
logs.add((name, level))
logger = logging.getLogger(name)
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
if "SLURM_PROCID" in os.environ:
rank = int(os.environ["SLURM_PROCID"])
logger.addFilter(lambda record: rank == 0)
else:
rank = 0
format_str = "[%(asctime)s][%(levelname)8s] %(message)s"
formatter = logging.Formatter(format_str)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
if __name__ == "__main__":
main()
================================================
FILE: Finetune/CC-CCII/model.py
================================================
import torch
import torch.nn as nn
import numpy as np
from monai.networks.nets.swin_unetr import *
from monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock
from monai.networks.nets.swin_unetr import SwinTransformer as SwinViT
from monai.utils import ensure_tuple_rep
import argparse
import torch.nn.functional as F
class Swin(nn.Module):
def __init__(self, args):
super(Swin, self).__init__()
patch_size = ensure_tuple_rep(2, args.spatial_dims)
window_size = ensure_tuple_rep(7, args.spatial_dims)
self.swinViT = SwinViT(
in_chans=args.in_channels,
embed_dim=args.feature_size,
window_size=window_size,
patch_size=patch_size,
depths=[2, 2, 2, 2],
num_heads=[3, 6, 12, 24],
mlp_ratio=4.0,
qkv_bias=True,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=args.dropout_path_rate,
norm_layer=torch.nn.LayerNorm,
use_checkpoint=args.use_checkpoint,
spatial_dims=args.spatial_dims,
use_v2=True
)
norm_name = 'instance'
self.encoder1 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=args.in_channels,
out_channels=args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder2 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=args.feature_size,
out_channels=args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder3 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=2 * args.feature_size,
out_channels=2 * args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder4 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=4 * args.feature_size,
out_channels=4 * args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder10 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=16 * args.feature_size,
out_channels=16 * args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.decoder5 = UnetrUpBlock(
spatial_dims=args.spatial_dims,
in_channels=16 * args.feature_size,
out_channels=8 * args.feature_size,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.decoder4 = UnetrUpBlock(
spatial_dims=args.spatial_dims,
in_channels=args.feature_size * 8,
out_channels=args.feature_size * 4,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.decoder3 = UnetrUpBlock(
spatial_dims=args.spatial_dims,
in_channels=args.feature_size * 4,
out_channels=args.feature_size * 2,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.decoder2 = UnetrUpBlock(
spatial_dims=args.spatial_dims,
in_channels=args.feature_size * 2,
out_channels=args.feature_size,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.decoder1 = UnetrUpBlock(
spatial_dims=args.spatial_dims,
in_channels=args.feature_size,
out_channels=args.feature_size,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.head = nn.Linear(args.feature_size, 3)
def forward(self, x_in):
b = x_in.size()[0]
x_in = torch.cat([x_in, x_in], dim=2)
hidden_states_out = self.swinViT(x_in)
enc0 = self.encoder1(x_in)
enc1 = self.encoder2(hidden_states_out[0])
enc2 = self.encoder3(hidden_states_out[1])
enc3 = self.encoder4(hidden_states_out[2])
dec4 = self.encoder10(hidden_states_out[4])
dec3 = self.decoder5(dec4, hidden_states_out[3])
dec2 = self.decoder4(dec3, enc3)
dec1 = self.decoder3(dec2, enc2)
dec0 = self.decoder2(dec1, enc1)
out = self.decoder1(dec0, enc0)
out = F.adaptive_avg_pool3d(out, (1, 1, 1))
out = self.head(out.view(b, -1))
return out
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="PyTorch Training")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--feature_size", default=48, type=int, help="embedding size")
parser.add_argument("--out_channels", default=14, type=int, help="number of output channels")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--use_checkpoint", action="store_true", help="use gradient checkpointing to save memory")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
args = parser.parse_args()
x = torch.rand(2, 1, 32, 128, 128)
model = Swin(args)
y = model(x)
print(y.shape)
================================================
FILE: Finetune/CC-CCII/optimizers/__init__.py
================================================
================================================
FILE: Finetune/CC-CCII/optimizers/lr_scheduler.py
================================================
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import warnings
from typing import List
from torch import nn as nn
from torch.optim import Adam, Optimizer
from torch.optim.lr_scheduler import LambdaLR, _LRScheduler
__all__ = ["LinearLR", "ExponentialLR"]
class _LRSchedulerMONAI(_LRScheduler):
"""Base class for increasing the learning rate between two boundaries over a number
of iterations"""
def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:
"""
Args:
optimizer: wrapped optimizer.
end_lr: the final learning rate.
num_iter: the number of iterations over which the test occurs.
last_epoch: the index of last epoch.
Returns:
None
"""
self.end_lr = end_lr
self.num_iter = num_iter
super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)
class LinearLR(_LRSchedulerMONAI):
"""Linearly increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]
class ExponentialLR(_LRSchedulerMONAI):
"""Exponentially increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]
class WarmupCosineSchedule(LambdaLR):
"""Linear warmup and then cosine decay.
Based on https://huggingface.co/ implementation.
"""
def __init__(
self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1
) -> None:
"""
Args:
optimizer: wrapped optimizer.
warmup_steps: number of warmup iterations.
t_total: total number of training iterations.
cycles: cosine cycles parameter.
last_epoch: the index of last epoch.
Returns:
None
"""
self.warmup_steps = warmup_steps
self.t_total = t_total
self.cycles = cycles
super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
class LinearWarmupCosineAnnealingLR(_LRScheduler):
def __init__(
self,
optimizer: Optimizer,
warmup_epochs: int,
max_epochs: int,
warmup_start_lr: float = 0.0,
eta_min: float = 0.0,
last_epoch: int = -1,
) -> None:
"""
Args:
optimizer (Optimizer): Wrapped optimizer.
warmup_epochs (int): Maximum number of iterations for linear warmup
max_epochs (int): Maximum number of iterations
warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.
eta_min (float): Minimum learning rate. Default: 0.
last_epoch (int): The index of last epoch. Default: -1.
"""
self.warmup_epochs = warmup_epochs
self.max_epochs = max_epochs
self.warmup_start_lr = warmup_start_lr
self.eta_min = eta_min
super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)
def get_lr(self) -> List[float]:
"""
Compute learning rate using chainable form of the scheduler
"""
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
if self.last_epoch == 0:
return [self.warmup_start_lr] * len(self.base_lrs)
elif self.last_epoch < self.warmup_epochs:
return [
group["lr"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
elif self.last_epoch == self.warmup_epochs:
return self.base_lrs
elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:
return [
group["lr"]
+ (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
return [
(1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
/ (
1
+ math.cos(
math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)
)
)
* (group["lr"] - self.eta_min)
+ self.eta_min
for group in self.optimizer.param_groups
]
def _get_closed_form_lr(self) -> List[float]:
"""
Called when epoch is passed as a param to the `step` function of the scheduler.
"""
if self.last_epoch < self.warmup_epochs:
return [
self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr in self.base_lrs
]
return [
self.eta_min
+ 0.5
* (base_lr - self.eta_min)
* (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
for base_lr in self.base_lrs
]
================================================
FILE: Finetune/CC-CCII/train.sh
================================================
now=$(date +"%Y%m%d_%H%M%S")
logdir=runs/logs
mkdir -p $logdir
torchrun --master_port=25584 main.py \
--logdir $logdir | tee $logdir/$now.txt
================================================
FILE: Finetune/CC-CCII/trainer.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import time
import numpy as np
import torch
import torch.nn.parallel
import torch.utils.data.distributed
from tensorboardX import SummaryWriter
from torch.cuda.amp import GradScaler, autocast
from utils.utils import AverageMeter, distributed_all_gather
import torch.nn.functional as F
from monai.data import decollate_batch
def resize(img):
size = 256
b, _, c, h, w = img.size()
new_img = []
for i in range(b):
im = img[i, :, :, :, :]
im = F.interpolate(im, size=[size, size], mode='bilinear', align_corners=True)
new_img.append(im.unsqueeze(0))
new_img = torch.cat(new_img, dim=0)
return new_img
def train_epoch(model, loader, optimizer, scheduler, scaler, epoch, args):
model.train()
start_time = time.time()
run_loss = AverageMeter()
loss_func = torch.nn.CrossEntropyLoss()
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data = resize(data)
data, target = data.cuda(args.rank), target.cuda(args.rank)
for param in model.parameters():
param.grad = None
logits = model(data)
loss = loss_func(logits, target)
# print(logits.argmax(1)[0].item(), target[0].item())
loss.backward()
optimizer.step()
run_loss.update(loss.item(), n=args.batch_size)
lr = optimizer.param_groups[0]["lr"]
if scheduler is not None:
scheduler.step()
length = len(loader) // 4
if args.rank == 0 and (idx + 1) % length == 0:
print(
"Epoch {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"loss: {:.4f}".format(run_loss.avg),
"lr: {:.8f}".format(lr),
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
for param in model.parameters():
param.grad = None
return run_loss.avg
def val_epoch(model, loader, epoch, args):
model.eval()
start_time = time.time()
with torch.no_grad():
num_correct = 0.0
metric_count = 0
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data = resize(data)
data, target = data.cuda(args.rank), target.cuda(args.rank)
with autocast(enabled=args.amp):
logits = model(data)
value = torch.eq(logits.argmax(dim=1), target)
metric_count += len(value)
num_correct += value.sum().item()
metric = num_correct / metric_count
if args.rank == 0:
print(
"Val {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"acc",
metric,
"time {:.2f}s".format(time.time() - start_time),
)
return metric
def save_checkpoint(model, epoch, args, filename="model.pt", best_acc=0, optimizer=None, scheduler=None):
state_dict = model.state_dict() if not args.distributed else model.module.state_dict()
save_dict = {"epoch": epoch, "best_acc": best_acc, "state_dict": state_dict}
if optimizer is not None:
save_dict["optimizer"] = optimizer.state_dict()
if scheduler is not None:
save_dict["scheduler"] = scheduler.state_dict()
filename = os.path.join(args.logdir, filename)
torch.save(save_dict, filename)
print("Saving checkpoint", filename)
def run_training(
model,
train_loader,
val_loader,
optimizer,
args,
scheduler=None,
start_epoch=0,
):
writer = None
if args.logdir is not None and args.rank == 0:
writer = SummaryWriter(log_dir=args.logdir)
if args.rank == 0:
print("Writing Tensorboard logs to ", args.logdir)
scaler = None
if args.amp:
scaler = GradScaler()
val_acc_max = 0.0
for epoch in range(start_epoch, args.max_epochs):
if args.distributed:
train_loader.sampler.set_epoch(epoch)
torch.distributed.barrier()
print(args.rank, time.ctime(), "Epoch:", epoch)
epoch_time = time.time()
train_loss = train_epoch(
model, train_loader, optimizer, scheduler, scaler=scaler, epoch=epoch, args=args
)
if args.rank == 0:
print(
"Final training {}/{}".format(epoch, args.max_epochs - 1),
"loss: {:.4f}".format(train_loss),
"time {:.2f}s".format(time.time() - epoch_time),
)
if args.rank == 0 and writer is not None:
writer.add_scalar("train_loss", train_loss, epoch)
b_new_best = False
if (epoch + 1) % args.val_every == 0:
if args.distributed:
torch.distributed.barrier()
epoch_time = time.time()
val_avg_acc = val_epoch(
model,
val_loader,
epoch=epoch,
args=args,
)
val_avg_acc = np.mean(val_avg_acc)
if args.rank == 0:
print(
"Final validation {}/{}".format(epoch, args.max_epochs - 1),
"acc",
val_avg_acc,
"time {:.2f}s".format(time.time() - epoch_time),
)
if writer is not None:
writer.add_scalar("val_acc", val_avg_acc, epoch)
if val_avg_acc > val_acc_max:
print("new best ({:.6f} --> {:.6f}). ".format(val_acc_max, val_avg_acc))
val_acc_max = val_avg_acc
b_new_best = True
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(
model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler
)
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename="model_final.pt")
if b_new_best:
print("Copying to model.pt new best model!!!!")
shutil.copyfile(os.path.join(args.logdir, "model_final.pt"), os.path.join(args.logdir, "model.pt"))
if scheduler is not None:
scheduler.step()
print("Training Finished !, Best Accuracy: ", val_acc_max)
return val_acc_max
================================================
FILE: Finetune/CC-CCII/utils/__init__.py
================================================
================================================
FILE: Finetune/CC-CCII/utils/data_utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import pickle
import numpy as np
import torch
from monai import data, transforms
from monai.data import *
import pandas as pd
import random
def get_loader(args):
'''Get the dataloader for the CCII dataset.'''
# Transforms
def __transforms__(augmentation=True, npy=None, args=None):
RANDOM_BRIGHTNESS = 7
RANDOM_CONTRAST = 5
pre_size = 420
final_size = 384
spatial_limit = int((pre_size-final_size)/2.0)
# pre_top_left = int((512-pre_size)/2.0)
final_top_left = int((512-final_size)/2.0)
npy_normalized = npy.astype(np.float32) / 255.0 # cast to float
if augmentation:
# random flip
if random.uniform(0, 1) < 0.5: #horizontal flip
npy_normalized = np.flipud(npy_normalized)
# color jitter
br = random.randint(-RANDOM_BRIGHTNESS, RANDOM_BRIGHTNESS) / 100.
npy_normalized = npy_normalized + br
# Random contrast
cr = 1.0 + random.randint(-RANDOM_CONTRAST, RANDOM_CONTRAST) / 100.
npy_normalized = npy_normalized * cr
# clip values to 0-1 range
npy_normalized = np.clip(npy_normalized, 0, 1.0)
# random crop
offset_x = random.randint(-spatial_limit, spatial_limit)
offset_y = random.randint(-spatial_limit, spatial_limit)
npy_normalized = npy_normalized[
:,
final_top_left+offset_x : final_top_left+final_size+offset_x,
final_top_left+offset_y : final_top_left+final_size+offset_y
]
else:
npy_normalized = npy_normalized[
:,
final_top_left : final_top_left+final_size,
final_top_left : final_top_left+final_size
]
return npy_normalized
train_files_name = os.path.join(args.csv_list, f'CC_CCII_fold{args.fold}_train.csv')
val_files_name = os.path.join(args.csv_list, f'CC_CCII_fold{args.fold}_valid.csv')
train_files = pd.read_csv(train_files_name)
val_files = pd.read_csv(val_files_name)
train_ds = CC_CCII(data=train_files, transforms=__transforms__, augmentation=True, args=args)
print(f'=>Train len {len(train_ds)}')
train_loader = torch.utils.data.DataLoader(
train_ds, batch_size=args.batch_size, shuffle=True,
num_workers=8, pin_memory=True, persistent_workers=True,
)
val_ds = CC_CCII(data=val_files, transforms=__transforms__, augmentation=False,args=args)
print(f'=>Val len {len(val_ds)}')
val_loader = torch.utils.data.DataLoader(
val_ds, batch_size=1, shuffle=False, num_workers=1, pin_memory=True, persistent_workers=True)
return train_loader, val_loader
class CC_CCII(torch.utils.data.Dataset):
'''CC_CCII Covid-19 classification dataset.
This dataset is used for Covid-19 classification.
It loads the data from the given directory and csv file.
The data is preprocessed and augmented using various techniques.
http://ncov-ai.big.ac.cn/download?lang=en
'''
def __init__(self, data=None, transforms=None, augmentation=True, args=None):
super().__init__()
self.augmentation = augmentation
self.df_meta = pd.read_csv(os.path.join(args.csv_list, 'CC_CCII_metadata.csv'))
df = data
self.patients = df['patient_id']
self.scans = df['scan_id']
self.targets = df['target']
self.transforms = transforms
self.args = args
def __getitem__(self, index):
target = int(self.targets[index])
npy = np.load(
os.path.join(
self.args.data_dir,
'p'+str(self.patients[index])+'-s'+str(self.scans[index])+'.npy'
)
)
meta = self.df_meta[(self.df_meta['patient_id'] == self.patients[index])]
covariates = [
'Age',
'Sex(Male1/Female2)',
'Critical_illness',
'Liver_function',
'Lung_function',
'Progression (Days)'
]
if meta.size == 0:
meta = np.array([47, 1.5, 0, 1, 2, 6.89],dtype='f8')
else:
meta = meta.sample(frac=1.0, replace=True, weights=None, random_state=0, axis=0)
meta = np.squeeze(meta[covariates].to_numpy(), axis=0)
meta[0] = np.clip(meta[0] / 100, 0.25, 0.95)
meta[1] = meta[1] - 1
meta[3] = meta[3] / 5
meta[4] = meta[4] / 5
meta[-1] = meta[-1] / 14
npy_normalized = self.transforms(self.augmentation, npy, self.args)
npy_normalized = npy_normalized[np.newaxis,]
return {
'image': npy_normalized,
'label': target
}
def __len__(self):
return len(self.targets)
================================================
FILE: Finetune/CC-CCII/utils/utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import scipy.ndimage as ndimage
import torch
def resample_3d(img, target_size):
imx, imy, imz = img.shape
tx, ty, tz = target_size
zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))
img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)
return img_resampled
def dice(x, y):
intersect = np.sum(np.sum(np.sum(x * y)))
y_sum = np.sum(np.sum(np.sum(y)))
if y_sum == 0:
return 0.0
x_sum = np.sum(np.sum(np.sum(x)))
return 2 * intersect / (x_sum + y_sum)
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)
def distributed_all_gather(
tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None
):
if world_size is None:
world_size = torch.distributed.get_world_size()
if valid_batch_size is not None:
valid_batch_size = min(valid_batch_size, world_size)
elif is_valid is not None:
is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)
if not no_barrier:
torch.distributed.barrier()
tensor_list_out = []
with torch.no_grad():
if is_valid is not None:
is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]
torch.distributed.all_gather(is_valid_list, is_valid)
is_valid = [x.item() for x in is_valid_list]
for tensor in tensor_list:
gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]
torch.distributed.all_gather(gather_list, tensor)
if valid_batch_size is not None:
gather_list = gather_list[:valid_batch_size]
elif is_valid is not None:
gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]
if out_numpy:
gather_list = [t.cpu().numpy() for t in gather_list]
tensor_list_out.append(gather_list)
return tensor_list_out
================================================
FILE: Finetune/Flare22/__init__.py
================================================
================================================
FILE: Finetune/Flare22/dataset/__init__.py
================================================
================================================
FILE: Finetune/Flare22/dataset/dataset.json
================================================
{
"description": "0",
"labels": {
"0": "background",
"1": "Liver",
"10": "Esophagus",
"11": "Stomach",
"12": "Duodenum",
"13": "Left Kidney",
"2": "Right kidney",
"3": "Spleen",
"4": "Pancreas",
"5": "Aorta",
"6": "Inferior vena cava",
"7": "Right adrenal gland",
"8": "Left adrenal gland",
"9": "Gallbladder"
},
"licence": "hands off!",
"modality": {
"0": "CT"
},
"name": "FLARE22",
"numTest": 200,
"numTraining": 50,
"reference": "0",
"release": "0.0",
"tensorImageSize": "4D",
"test": [
"./imagesTs/FLARETs_0001_0000.nii.gz",
"./imagesTs/FLARETs_0002_0000.nii.gz",
"./imagesTs/FLARETs_0003_0000.nii.gz",
"./imagesTs/FLARETs_0004_0000.nii.gz",
"./imagesTs/FLARETs_0005_0000.nii.gz",
"./imagesTs/FLARETs_0006_0000.nii.gz",
"./imagesTs/FLARETs_0007_0000.nii.gz",
"./imagesTs/FLARETs_0008_0000.nii.gz",
"./imagesTs/FLARETs_0009_0000.nii.gz",
"./imagesTs/FLARETs_0010_0000.nii.gz",
"./imagesTs/FLARETs_0011_0000.nii.gz",
"./imagesTs/FLARETs_0012_0000.nii.gz",
"./imagesTs/FLARETs_0013_0000.nii.gz",
"./imagesTs/FLARETs_0014_0000.nii.gz",
"./imagesTs/FLARETs_0015_0000.nii.gz",
"./imagesTs/FLARETs_0016_0000.nii.gz",
"./imagesTs/FLARETs_0017_0000.nii.gz",
"./imagesTs/FLARETs_0018_0000.nii.gz",
"./imagesTs/FLARETs_0019_0000.nii.gz",
"./imagesTs/FLARETs_0020_0000.nii.gz",
"./imagesTs/FLARETs_0021_0000.nii.gz",
"./imagesTs/FLARETs_0022_0000.nii.gz",
"./imagesTs/FLARETs_0023_0000.nii.gz",
"./imagesTs/FLARETs_0024_0000.nii.gz",
"./imagesTs/FLARETs_0025_0000.nii.gz",
"./imagesTs/FLARETs_0026_0000.nii.gz",
"./imagesTs/FLARETs_0027_0000.nii.gz",
"./imagesTs/FLARETs_0028_0000.nii.gz",
"./imagesTs/FLARETs_0029_0000.nii.gz",
"./imagesTs/FLARETs_0030_0000.nii.gz",
"./imagesTs/FLARETs_0031_0000.nii.gz",
"./imagesTs/FLARETs_0032_0000.nii.gz",
"./imagesTs/FLARETs_0033_0000.nii.gz",
"./imagesTs/FLARETs_0034_0000.nii.gz",
"./imagesTs/FLARETs_0035_0000.nii.gz",
"./imagesTs/FLARETs_0036_0000.nii.gz",
"./imagesTs/FLARETs_0037_0000.nii.gz",
"./imagesTs/FLARETs_0038_0000.nii.gz",
"./imagesTs/FLARETs_0039_0000.nii.gz",
"./imagesTs/FLARETs_0040_0000.nii.gz",
"./imagesTs/FLARETs_0041_0000.nii.gz",
"./imagesTs/FLARETs_0042_0000.nii.gz",
"./imagesTs/FLARETs_0043_0000.nii.gz",
"./imagesTs/FLARETs_0044_0000.nii.gz",
"./imagesTs/FLARETs_0045_0000.nii.gz",
"./imagesTs/FLARETs_0046_0000.nii.gz",
"./imagesTs/FLARETs_0047_0000.nii.gz",
"./imagesTs/FLARETs_0048_0000.nii.gz",
"./imagesTs/FLARETs_0049_0000.nii.gz",
"./imagesTs/FLARETs_0050_0000.nii.gz",
"./imagesTs/FLARETs_0051_0000.nii.gz",
"./imagesTs/FLARETs_0052_0000.nii.gz",
"./imagesTs/FLARETs_0053_0000.nii.gz",
"./imagesTs/FLARETs_0054_0000.nii.gz",
"./imagesTs/FLARETs_0055_0000.nii.gz",
"./imagesTs/FLARETs_0056_0000.nii.gz",
"./imagesTs/FLARETs_0057_0000.nii.gz",
"./imagesTs/FLARETs_0058_0000.nii.gz",
"./imagesTs/FLARETs_0059_0000.nii.gz",
"./imagesTs/FLARETs_0060_0000.nii.gz",
"./imagesTs/FLARETs_0061_0000.nii.gz",
"./imagesTs/FLARETs_0062_0000.nii.gz",
"./imagesTs/FLARETs_0063_0000.nii.gz",
"./imagesTs/FLARETs_0064_0000.nii.gz",
"./imagesTs/FLARETs_0065_0000.nii.gz",
"./imagesTs/FLARETs_0066_0000.nii.gz",
"./imagesTs/FLARETs_0067_0000.nii.gz",
"./imagesTs/FLARETs_0068_0000.nii.gz",
"./imagesTs/FLARETs_0069_0000.nii.gz",
"./imagesTs/FLARETs_0070_0000.nii.gz",
"./imagesTs/FLARETs_0071_0000.nii.gz",
"./imagesTs/FLARETs_0072_0000.nii.gz",
"./imagesTs/FLARETs_0073_0000.nii.gz",
"./imagesTs/FLARETs_0074_0000.nii.gz",
"./imagesTs/FLARETs_0075_0000.nii.gz",
"./imagesTs/FLARETs_0076_0000.nii.gz",
"./imagesTs/FLARETs_0077_0000.nii.gz",
"./imagesTs/FLARETs_0078_0000.nii.gz",
"./imagesTs/FLARETs_0079_0000.nii.gz",
"./imagesTs/FLARETs_0080_0000.nii.gz",
"./imagesTs/FLARETs_0081_0000.nii.gz",
"./imagesTs/FLARETs_0082_0000.nii.gz",
"./imagesTs/FLARETs_0083_0000.nii.gz",
"./imagesTs/FLARETs_0084_0000.nii.gz",
"./imagesTs/FLARETs_0085_0000.nii.gz",
"./imagesTs/FLARETs_0086_0000.nii.gz",
"./imagesTs/FLARETs_0087_0000.nii.gz",
"./imagesTs/FLARETs_0088_0000.nii.gz",
"./imagesTs/FLARETs_0089_0000.nii.gz",
"./imagesTs/FLARETs_0090_0000.nii.gz",
"./imagesTs/FLARETs_0091_0000.nii.gz",
"./imagesTs/FLARETs_0092_0000.nii.gz",
"./imagesTs/FLARETs_0093_0000.nii.gz",
"./imagesTs/FLARETs_0094_0000.nii.gz",
"./imagesTs/FLARETs_0095_0000.nii.gz",
"./imagesTs/FLARETs_0096_0000.nii.gz",
"./imagesTs/FLARETs_0097_0000.nii.gz",
"./imagesTs/FLARETs_0098_0000.nii.gz",
"./imagesTs/FLARETs_0099_0000.nii.gz",
"./imagesTs/FLARETs_0100_0000.nii.gz",
"./imagesTs/FLARETs_0101_0000.nii.gz",
"./imagesTs/FLARETs_0102_0000.nii.gz",
"./imagesTs/FLARETs_0103_0000.nii.gz",
"./imagesTs/FLARETs_0104_0000.nii.gz",
"./imagesTs/FLARETs_0105_0000.nii.gz",
"./imagesTs/FLARETs_0106_0000.nii.gz",
"./imagesTs/FLARETs_0107_0000.nii.gz",
"./imagesTs/FLARETs_0108_0000.nii.gz",
"./imagesTs/FLARETs_0109_0000.nii.gz",
"./imagesTs/FLARETs_0110_0000.nii.gz",
"./imagesTs/FLARETs_0111_0000.nii.gz",
"./imagesTs/FLARETs_0112_0000.nii.gz",
"./imagesTs/FLARETs_0113_0000.nii.gz",
"./imagesTs/FLARETs_0114_0000.nii.gz",
"./imagesTs/FLARETs_0115_0000.nii.gz",
"./imagesTs/FLARETs_0116_0000.nii.gz",
"./imagesTs/FLARETs_0117_0000.nii.gz",
"./imagesTs/FLARETs_0118_0000.nii.gz",
"./imagesTs/FLARETs_0119_0000.nii.gz",
"./imagesTs/FLARETs_0120_0000.nii.gz",
"./imagesTs/FLARETs_0121_0000.nii.gz",
"./imagesTs/FLARETs_0122_0000.nii.gz",
"./imagesTs/FLARETs_0123_0000.nii.gz",
"./imagesTs/FLARETs_0124_0000.nii.gz",
"./imagesTs/FLARETs_0125_0000.nii.gz",
"./imagesTs/FLARETs_0126_0000.nii.gz",
"./imagesTs/FLARETs_0127_0000.nii.gz",
"./imagesTs/FLARETs_0128_0000.nii.gz",
"./imagesTs/FLARETs_0129_0000.nii.gz",
"./imagesTs/FLARETs_0130_0000.nii.gz",
"./imagesTs/FLARETs_0131_0000.nii.gz",
"./imagesTs/FLARETs_0132_0000.nii.gz",
"./imagesTs/FLARETs_0133_0000.nii.gz",
"./imagesTs/FLARETs_0134_0000.nii.gz",
"./imagesTs/FLARETs_0135_0000.nii.gz",
"./imagesTs/FLARETs_0136_0000.nii.gz",
"./imagesTs/FLARETs_0137_0000.nii.gz",
"./imagesTs/FLARETs_0138_0000.nii.gz",
"./imagesTs/FLARETs_0139_0000.nii.gz",
"./imagesTs/FLARETs_0140_0000.nii.gz",
"./imagesTs/FLARETs_0141_0000.nii.gz",
"./imagesTs/FLARETs_0142_0000.nii.gz",
"./imagesTs/FLARETs_0143_0000.nii.gz",
"./imagesTs/FLARETs_0144_0000.nii.gz",
"./imagesTs/FLARETs_0145_0000.nii.gz",
"./imagesTs/FLARETs_0146_0000.nii.gz",
"./imagesTs/FLARETs_0147_0000.nii.gz",
"./imagesTs/FLARETs_0148_0000.nii.gz",
"./imagesTs/FLARETs_0149_0000.nii.gz",
"./imagesTs/FLARETs_0150_0000.nii.gz",
"./imagesTs/FLARETs_0151_0000.nii.gz",
"./imagesTs/FLARETs_0152_0000.nii.gz",
"./imagesTs/FLARETs_0153_0000.nii.gz",
"./imagesTs/FLARETs_0154_0000.nii.gz",
"./imagesTs/FLARETs_0155_0000.nii.gz",
"./imagesTs/FLARETs_0156_0000.nii.gz",
"./imagesTs/FLARETs_0157_0000.nii.gz",
"./imagesTs/FLARETs_0158_0000.nii.gz",
"./imagesTs/FLARETs_0159_0000.nii.gz",
"./imagesTs/FLARETs_0160_0000.nii.gz",
"./imagesTs/FLARETs_0161_0000.nii.gz",
"./imagesTs/FLARETs_0162_0000.nii.gz",
"./imagesTs/FLARETs_0163_0000.nii.gz",
"./imagesTs/FLARETs_0164_0000.nii.gz",
"./imagesTs/FLARETs_0165_0000.nii.gz",
"./imagesTs/FLARETs_0166_0000.nii.gz",
"./imagesTs/FLARETs_0167_0000.nii.gz",
"./imagesTs/FLARETs_0168_0000.nii.gz",
"./imagesTs/FLARETs_0169_0000.nii.gz",
"./imagesTs/FLARETs_0170_0000.nii.gz",
"./imagesTs/FLARETs_0171_0000.nii.gz",
"./imagesTs/FLARETs_0172_0000.nii.gz",
"./imagesTs/FLARETs_0173_0000.nii.gz",
"./imagesTs/FLARETs_0174_0000.nii.gz",
"./imagesTs/FLARETs_0175_0000.nii.gz",
"./imagesTs/FLARETs_0176_0000.nii.gz",
"./imagesTs/FLARETs_0177_0000.nii.gz",
"./imagesTs/FLARETs_0178_0000.nii.gz",
"./imagesTs/FLARETs_0179_0000.nii.gz",
"./imagesTs/FLARETs_0180_0000.nii.gz",
"./imagesTs/FLARETs_0181_0000.nii.gz",
"./imagesTs/FLARETs_0182_0000.nii.gz",
"./imagesTs/FLARETs_0183_0000.nii.gz",
"./imagesTs/FLARETs_0184_0000.nii.gz",
"./imagesTs/FLARETs_0185_0000.nii.gz",
"./imagesTs/FLARETs_0186_0000.nii.gz",
"./imagesTs/FLARETs_0187_0000.nii.gz",
"./imagesTs/FLARETs_0188_0000.nii.gz",
"./imagesTs/FLARETs_0189_0000.nii.gz",
"./imagesTs/FLARETs_0190_0000.nii.gz",
"./imagesTs/FLARETs_0191_0000.nii.gz",
"./imagesTs/FLARETs_0192_0000.nii.gz",
"./imagesTs/FLARETs_0193_0000.nii.gz",
"./imagesTs/FLARETs_0194_0000.nii.gz",
"./imagesTs/FLARETs_0195_0000.nii.gz",
"./imagesTs/FLARETs_0196_0000.nii.gz",
"./imagesTs/FLARETs_0197_0000.nii.gz",
"./imagesTs/FLARETs_0198_0000.nii.gz",
"./imagesTs/FLARETs_0199_0000.nii.gz",
"./imagesTs/FLARETs_0200_0000.nii.gz"
],
"validation": [{
"image": "./imagesTr/FLARE22_Tr_0001_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0001.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0002_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0002.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0003_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0003.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0004_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0004.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0005_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0005.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0006_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0006.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0007_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0007.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0008_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0008.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0009_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0009.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0010_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0010.nii.gz"
}
],
"training": [
{
"image": "./imagesTr/FLARE22_Tr_0011_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0011.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0012_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0012.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0013_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0013.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0014_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0014.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0015_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0015.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0016_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0016.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0017_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0017.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0018_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0018.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0019_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0019.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0020_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0020.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0021_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0021.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0022_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0022.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0023_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0023.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0024_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0024.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0025_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0025.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0026_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0026.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0027_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0027.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0028_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0028.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0029_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0029.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0030_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0030.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0031_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0031.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0032_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0032.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0033_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0033.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0034_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0034.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0035_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0035.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0036_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0036.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0037_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0037.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0038_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0038.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0039_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0039.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0040_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0040.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0041_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0041.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0042_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0042.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0043_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0043.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0044_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0044.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0045_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0045.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0046_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0046.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0047_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0047.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0048_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0048.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0049_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0049.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0050_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0050.nii.gz"
}
]
}
================================================
FILE: Finetune/Flare22/dataset/dataset_test50.json
================================================
{
"description": "0",
"labels": {
"0": "background",
"1": "Liver",
"10": "Esophagus",
"11": "Stomach",
"12": "Duodenum",
"13": "Left Kidney",
"2": "Right kidney",
"3": "Spleen",
"4": "Pancreas",
"5": "Aorta",
"6": "Inferior vena cava",
"7": "Right adrenal gland",
"8": "Left adrenal gland",
"9": "Gallbladder"
},
"licence": "hands off!",
"modality": {
"0": "CT"
},
"name": "FLARE22",
"numTest": 200,
"numTraining": 50,
"reference": "0",
"release": "0.0",
"tensorImageSize": "4D",
"test": [
"./imagesTs/FLARETs_0001_0000.nii.gz",
"./imagesTs/FLARETs_0002_0000.nii.gz",
"./imagesTs/FLARETs_0003_0000.nii.gz",
"./imagesTs/FLARETs_0004_0000.nii.gz",
"./imagesTs/FLARETs_0005_0000.nii.gz",
"./imagesTs/FLARETs_0006_0000.nii.gz",
"./imagesTs/FLARETs_0007_0000.nii.gz",
"./imagesTs/FLARETs_0008_0000.nii.gz",
"./imagesTs/FLARETs_0009_0000.nii.gz",
"./imagesTs/FLARETs_0010_0000.nii.gz",
"./imagesTs/FLARETs_0011_0000.nii.gz",
"./imagesTs/FLARETs_0012_0000.nii.gz",
"./imagesTs/FLARETs_0013_0000.nii.gz",
"./imagesTs/FLARETs_0014_0000.nii.gz",
"./imagesTs/FLARETs_0015_0000.nii.gz",
"./imagesTs/FLARETs_0016_0000.nii.gz",
"./imagesTs/FLARETs_0017_0000.nii.gz",
"./imagesTs/FLARETs_0018_0000.nii.gz",
"./imagesTs/FLARETs_0019_0000.nii.gz",
"./imagesTs/FLARETs_0020_0000.nii.gz",
"./imagesTs/FLARETs_0021_0000.nii.gz",
"./imagesTs/FLARETs_0022_0000.nii.gz",
"./imagesTs/FLARETs_0023_0000.nii.gz",
"./imagesTs/FLARETs_0024_0000.nii.gz",
"./imagesTs/FLARETs_0025_0000.nii.gz",
"./imagesTs/FLARETs_0026_0000.nii.gz",
"./imagesTs/FLARETs_0027_0000.nii.gz",
"./imagesTs/FLARETs_0028_0000.nii.gz",
"./imagesTs/FLARETs_0029_0000.nii.gz",
"./imagesTs/FLARETs_0030_0000.nii.gz",
"./imagesTs/FLARETs_0031_0000.nii.gz",
"./imagesTs/FLARETs_0032_0000.nii.gz",
"./imagesTs/FLARETs_0033_0000.nii.gz",
"./imagesTs/FLARETs_0034_0000.nii.gz",
"./imagesTs/FLARETs_0035_0000.nii.gz",
"./imagesTs/FLARETs_0036_0000.nii.gz",
"./imagesTs/FLARETs_0037_0000.nii.gz",
"./imagesTs/FLARETs_0038_0000.nii.gz",
"./imagesTs/FLARETs_0039_0000.nii.gz",
"./imagesTs/FLARETs_0040_0000.nii.gz",
"./imagesTs/FLARETs_0041_0000.nii.gz",
"./imagesTs/FLARETs_0042_0000.nii.gz",
"./imagesTs/FLARETs_0043_0000.nii.gz",
"./imagesTs/FLARETs_0044_0000.nii.gz",
"./imagesTs/FLARETs_0045_0000.nii.gz",
"./imagesTs/FLARETs_0046_0000.nii.gz",
"./imagesTs/FLARETs_0047_0000.nii.gz",
"./imagesTs/FLARETs_0048_0000.nii.gz",
"./imagesTs/FLARETs_0049_0000.nii.gz",
"./imagesTs/FLARETs_0050_0000.nii.gz"
],
"validation": [{
"image": "./imagesTr/FLARE22_Tr_0001_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0001.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0002_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0002.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0003_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0003.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0004_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0004.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0005_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0005.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0006_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0006.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0007_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0007.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0008_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0008.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0009_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0009.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0010_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0010.nii.gz"
}
],
"training": [
{
"image": "./imagesTr/FLARE22_Tr_0011_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0011.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0012_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0012.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0013_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0013.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0014_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0014.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0015_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0015.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0016_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0016.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0017_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0017.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0018_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0018.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0019_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0019.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0020_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0020.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0021_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0021.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0022_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0022.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0023_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0023.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0024_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0024.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0025_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0025.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0026_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0026.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0027_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0027.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0028_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0028.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0029_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0029.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0030_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0030.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0031_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0031.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0032_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0032.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0033_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0033.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0034_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0034.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0035_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0035.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0036_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0036.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0037_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0037.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0038_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0038.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0039_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0039.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0040_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0040.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0041_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0041.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0042_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0042.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0043_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0043.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0044_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0044.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0045_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0045.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0046_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0046.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0047_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0047.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0048_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0048.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0049_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0049.nii.gz"
},
{
"image": "./imagesTr/FLARE22_Tr_0050_0000.nii.gz",
"label": "./labelsTr/FLARE22_Tr_0050.nii.gz"
}
]
}
================================================
FILE: Finetune/Flare22/inferers.py
================================================
"""Multiview inferer."""
import warnings
from typing import Any, Callable, Dict, List, Mapping, Sequence, Tuple, Union
import torch
import torch.nn.functional as F
from monai.data.utils import compute_importance_map, dense_patch_slices, get_valid_patch_size
from monai.transforms import Resize
from monai.utils import (
BlendMode,
PytorchPadMode,
convert_data_type,
ensure_tuple,
fall_back_tuple,
look_up_option,
optional_import,
)
from monai.inferers.utils import _get_scan_interval
# from utils import view_ops
# from utils import view_transforms
tqdm, _ = optional_import("tqdm", name="tqdm")
def double_sliding_window_inference(
inputs: torch.Tensor,
view: int,
roi_size: Union[Sequence[int], int],
sw_batch_size: int,
predictor: Callable[..., Union[torch.Tensor, Sequence[torch.Tensor], Dict[Any, torch.Tensor]]],
overlap: float = 0.25,
mode: Union[BlendMode, str] = BlendMode.CONSTANT,
sigma_scale: Union[Sequence[float], float] = 0.125,
padding_mode: Union[PytorchPadMode, str] = PytorchPadMode.CONSTANT,
cval: float = 0.0,
sw_device: Union[torch.device, str, None] = None,
device: Union[torch.device, str, None] = None,
progress: bool = False,
roi_weight_map: Union[torch.Tensor, None] = None,
*args: Any,
**kwargs: Any,
) -> Union[torch.Tensor, Tuple[torch.Tensor, ...], Dict[Any, torch.Tensor]]:
"""
Sliding window inference on two `inputs` with `predictor`.
The outputs of `predictor` could be a tensor, a tuple, or a dictionary of tensors.
Each output in the tuple or dict value is allowed to have different resolutions with respect to the input.
e.g., the input patch spatial size is [128,128,128], the output (a tuple of two patches) patch sizes
could be ([128,64,256], [64,32,128]).
In this case, the parameter `overlap` and `roi_size` need to be carefully chosen to ensure the output ROI is still
an integer. If the predictor's input and output spatial sizes are not equal, we recommend choosing the parameters
so that `overlap*roi_size*output_size/input_size` is an integer (for each spatial dimension).
When roi_size is larger than the inputs' spatial size, the input image are padded during inference.
To maintain the same spatial sizes, the output image will be cropped to the original input size.
Args:
inputs: input image to be processed (assuming NCHW[D])
roi_size: the spatial window size for inferences.
When its components have None or non-positives, the corresponding inputs dimension will be used.
if the components of the `roi_size` are non-positive values, the transform will use the
corresponding components of img size. For example, `roi_size=(32, -1)` will be adapted
to `(32, 64)` if the second spatial dimension size of img is `64`.
sw_batch_size: the batch size to run window slices.
predictor: given input tensor ``patch_data`` in shape NCHW[D],
The outputs of the function call ``predictor(patch_data)`` should be a tensor, a tuple, or a dictionary
with Tensor values. Each output in the tuple or dict value should have the same batch_size, i.e. NM'H'W'[D'];
where H'W'[D'] represents the output patch's spatial size, M is the number of output channels,
N is `sw_batch_size`, e.g., the input shape is (7, 1, 128,128,128),
the output could be a tuple of two tensors, with shapes: ((7, 5, 128, 64, 256), (7, 4, 64, 32, 128)).
In this case, the parameter `overlap` and `roi_size` need to be carefully chosen
to ensure the scaled output ROI sizes are still integers.
If the `predictor`'s input and output spatial sizes are different,
we recommend choosing the parameters so that ``overlap*roi_size*zoom_scale`` is an integer for each dimension.
overlap: Amount of overlap between scans.
mode: {``"constant"``, ``"gaussian"``}
How to blend output of overlapping windows. Defaults to ``"constant"``.
- ``"constant``": gives equal weight to all predictions.
- ``"gaussian``": gives less weight to predictions on edges of windows.
sigma_scale: the standard deviation coefficient of the Gaussian window when `mode` is ``"gaussian"``.
Default: 0.125. Actual window sigma is ``sigma_scale`` * ``dim_size``.
When sigma_scale is a sequence of floats, the values denote sigma_scale at the corresponding
spatial dimensions.
padding_mode: {``"constant"``, ``"reflect"``, ``"replicate"``, ``"circular"``}
Padding mode for ``inputs``, when ``roi_size`` is larger than inputs. Defaults to ``"constant"``
See also: https://pytorch.org/docs/stable/generated/torch.nn.functional.pad.html
cval: fill value for 'constant' padding mode. Default: 0
sw_device: device for the window data.
By default the device (and accordingly the memory) of the `inputs` is used.
Normally `sw_device` should be consistent with the device where `predictor` is defined.
device: device for the stitched output prediction.
By default the device (and accordingly the memory) of the `inputs` is used. If for example
set to device=torch.device('cpu') the gpu memory consumption is less and independent of the
`inputs` and `roi_size`. Output is on the `device`.
progress: whether to print a `tqdm` progress bar.
roi_weight_map: pre-computed (non-negative) weight map for each ROI.
If not given, and ``mode`` is not `constant`, this map will be computed on the fly.
args: optional args to be passed to ``predictor``.
kwargs: optional keyword args to be passed to ``predictor``.
Note:
- input must be channel-first and have a batch dim, supports N-D sliding window.
"""
compute_dtype = inputs.dtype
num_spatial_dims = len(inputs.shape) - 2
if overlap < 0 or overlap >= 1:
raise ValueError("overlap must be >= 0 and < 1.")
# determine image spatial size and batch size
# Note: all input images must have the same image size and batch size
batch_size, _, *image_size_ = inputs.shape
if device is None:
device = inputs.device
if sw_device is None:
sw_device = inputs.device
roi_size = fall_back_tuple(roi_size, image_size_)
# in case that image size is smaller than roi size
image_size = tuple(max(image_size_[i], roi_size[i]) for i in range(num_spatial_dims))
pad_size = []
for k in range(len(inputs.shape) - 1, 1, -1):
diff = max(roi_size[k - 2] - inputs.shape[k], 0)
half = diff // 2
pad_size.extend([half, diff - half])
inputs = F.pad(inputs, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)
# inputs2 = F.pad(inputs2, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)
scan_interval = _get_scan_interval(image_size, roi_size, num_spatial_dims, overlap)
# Store all slices in list
slices = dense_patch_slices(image_size, roi_size, scan_interval)
num_win = len(slices) # number of windows per image
total_slices = num_win * batch_size # total number of windows
# Create window-level importance map
valid_patch_size = get_valid_patch_size(image_size, roi_size)
if valid_patch_size == roi_size and (roi_weight_map is not None):
importance_map = roi_weight_map
else:
try:
importance_map = compute_importance_map(valid_patch_size, mode=mode, sigma_scale=sigma_scale, device=device)
except BaseException as e:
raise RuntimeError(
"Seems to be OOM. Please try smaller patch size or mode='constant' instead of mode='gaussian'."
) from e
importance_map = convert_data_type(importance_map, torch.Tensor, device, compute_dtype)[0] # type: ignore
# handle non-positive weights
min_non_zero = max(importance_map[importance_map != 0].min().item(), 1e-3)
importance_map = torch.clamp(importance_map.to(torch.float32), min=min_non_zero).to(compute_dtype)
# Perform predictions
dict_key, output_image_list_1, output_image_list_2, count_map_list = None, [], [], []
_initialized_ss = -1
is_tensor_output = True # whether the predictor's output is a tensor (instead of dict/tuple)
# for each patch
for slice_g in tqdm(range(0, total_slices, sw_batch_size)) if progress else range(0, total_slices, sw_batch_size):
slice_range = range(slice_g, min(slice_g + sw_batch_size, total_slices))
unravel_slice = [
[slice(int(idx / num_win), int(idx / num_win) + 1), slice(None)] + list(slices[idx % num_win])
for idx in slice_range
]
window_data = torch.cat([inputs[win_slice] for win_slice in unravel_slice]).to(sw_device)
view_list = [view, (view + 1) % len(view_transforms.permutation_transforms)]
window_data_list = [view_ops.get_permute_transform(0, dst)(window_data) for dst in view_list]
# window_data_2 = torch.cat([inputs2[win_slice] for win_slice in unravel_slice]).to(sw_device)
seg_prob_out_1, seg_prob_out_2 = predictor(window_data_list[0], window_data_list[1], view_list, *args, **kwargs) # batched patch segmentation
seg_prob_out_1, seg_prob_out_2 = view_ops.permute_inverse([seg_prob_out_1, seg_prob_out_2], view_list)
# convert seg_prob_out to tuple seg_prob_tuple, this does not allocate new memory.
seg_prob_tuple_1: Tuple[torch.Tensor, ...]
seg_prob_tuple_2: Tuple[torch.Tensor, ...]
if isinstance(seg_prob_out_1, torch.Tensor):
seg_prob_tuple_1 = (seg_prob_out_1,)
seg_prob_tuple_2 = (seg_prob_out_2,)
elif isinstance(seg_prob_out_1, Mapping):
if dict_key is None:
dict_key = sorted(seg_prob_out_1.keys()) # track predictor's output keys
seg_prob_tuple_1 = tuple(seg_prob_out_1[k] for k in dict_key)
seg_prob_tuple_2 = tuple(seg_prob_out_2[k] for k in dict_key)
is_tensor_output = False
else:
seg_prob_tuple_1 = ensure_tuple(seg_prob_out_1)
seg_prob_tuple_2 = ensure_tuple(seg_prob_out_2)
is_tensor_output = False
# for each output in multi-output list
for ss in range(len(seg_prob_tuple_1)):
seg_prob_1 = seg_prob_tuple_1[ss].to(device) # BxCxMxNxP or BxCxMxN
seg_prob_2 = seg_prob_tuple_2[ss].to(device)
# compute zoom scale: out_roi_size/in_roi_size
zoom_scale = []
for axis, (img_s_i, out_w_i, in_w_i) in enumerate(
zip(image_size, seg_prob_1.shape[2:], window_data.shape[2:])
):
_scale = out_w_i / float(in_w_i)
if not (img_s_i * _scale).is_integer():
warnings.warn(
f"For spatial axis: {axis}, output[{ss}] will have non-integer shape. Spatial "
f"zoom_scale between output[{ss}] and input is {_scale}. Please pad inputs."
)
zoom_scale.append(_scale)
if _initialized_ss < ss: # init. the ss-th buffer at the first iteration
# construct multi-resolution outputs
output_classes = seg_prob_1.shape[1]
output_shape = [batch_size, output_classes] + [
int(image_size_d * zoom_scale_d) for image_size_d, zoom_scale_d in zip(image_size, zoom_scale)
]
# allocate memory to store the full output and the count for overlapping parts
output_image_list_1.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))
output_image_list_2.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))
count_map_list.append(torch.zeros([1, 1] + output_shape[2:], dtype=compute_dtype, device=device))
_initialized_ss += 1
# resizing the importance_map
resizer = Resize(spatial_size=seg_prob_1.shape[2:], mode="nearest", anti_aliasing=False)
# store the result in the proper location of the full output. Apply weights from importance map.
for idx, original_idx in zip(slice_range, unravel_slice):
# zoom roi
original_idx_zoom = list(original_idx) # 4D for 2D image, 5D for 3D image
for axis in range(2, len(original_idx_zoom)):
zoomed_start = original_idx[axis].start * zoom_scale[axis - 2]
zoomed_end = original_idx[axis].stop * zoom_scale[axis - 2]
if not zoomed_start.is_integer() or (not zoomed_end.is_integer()):
warnings.warn(
f"For axis-{axis-2} of output[{ss}], the output roi range is not int. "
f"Input roi range is ({original_idx[axis].start}, {original_idx[axis].stop}). "
f"Spatial zoom_scale between output[{ss}] and input is {zoom_scale[axis - 2]}. "
f"Corresponding output roi range is ({zoomed_start}, {zoomed_end}).\n"
f"Please change overlap ({overlap}) or roi_size ({roi_size[axis-2]}) for axis-{axis-2}. "
"Tips: if overlap*roi_size*zoom_scale is an integer, it usually works."
)
original_idx_zoom[axis] = slice(int(zoomed_start), int(zoomed_end), None)
importance_map_zoom = resizer(importance_map.unsqueeze(0))[0].to(compute_dtype)
# store results and weights
output_image_list_1[ss][original_idx_zoom] += importance_map_zoom * seg_prob_1[idx - slice_g]
output_image_list_2[ss][original_idx_zoom] += importance_map_zoom * seg_prob_2[idx - slice_g]
count_map_list[ss][original_idx_zoom] += (
importance_map_zoom.unsqueeze(0).unsqueeze(0).expand(count_map_list[ss][original_idx_zoom].shape)
)
# account for any overlapping sections
for ss in range(len(output_image_list_1)):
count_map_pop = count_map_list.pop(0)
output_image_list_1[ss] = (output_image_list_1[ss] / count_map_pop).to(compute_dtype)
output_image_list_2[ss] = (output_image_list_2[ss] / count_map_pop).to(compute_dtype)
# remove padding if image_size smaller than roi_size
for ss in range(len(output_image_list_1)):
output_i_1, output_i_2 = output_image_list_1[ss], output_image_list_2[ss]
if torch.isnan(output_i_1).any() or torch.isinf(output_i_1).any():
warnings.warn("Sliding window inference results contain NaN or Inf.")
if torch.isnan(output_i_2).any() or torch.isinf(output_i_2).any():
warnings.warn("Sliding window inference results contain NaN or Inf.")
zoom_scale = [
seg_prob_map_shape_d / roi_size_d for seg_prob_map_shape_d, roi_size_d in zip(output_i_1.shape[2:], roi_size)
]
final_slicing: List[slice] = []
for sp in range(num_spatial_dims):
slice_dim = slice(pad_size[sp * 2], image_size_[num_spatial_dims - sp - 1] + pad_size[sp * 2])
slice_dim = slice(
int(round(slice_dim.start * zoom_scale[num_spatial_dims - sp - 1])),
int(round(slice_dim.stop * zoom_scale[num_spatial_dims - sp - 1])),
)
final_slicing.insert(0, slice_dim)
while len(final_slicing) < len(output_i_1.shape):
final_slicing.insert(0, slice(None))
output_image_list_1[ss] = output_i_1[final_slicing]
output_image_list_2[ss] = output_i_2[final_slicing]
if dict_key is not None: # if output of predictor is a dict
final_output_1 = dict(zip(dict_key, output_image_list_1))
final_output_2 = dict(zip(dict_key, output_image_list_2))
else:
final_output_1 = tuple(output_image_list_1) # type: ignore
final_output_2 = tuple(output_image_list_2) # type: ignore
final_output_1 = final_output_1[0] if is_tensor_output else final_output_1 # type: ignore
final_output_2 = final_output_2[0] if is_tensor_output else final_output_2 # type: ignore
return final_output_1, final_output_2
def one_hot(labels: torch.Tensor, num_classes: int, dtype: torch.dtype = torch.float, dim: int = 1) -> torch.Tensor:
"""
For every value v in `labels`, the value in the output will be either 1 or 0. Each vector along the `dim`-th
dimension has the "one-hot" format, i.e., it has a total length of `num_classes`,
with a one and `num_class-1` zeros.
Note that this will include the background label, thus a binary mask should be treated as having two classes.
Args:
labels: input tensor of integers to be converted into the 'one-hot' format. Internally `labels` will be
converted into integers `labels.long()`.
num_classes: number of output channels, the corresponding length of `labels[dim]` will be converted to
`num_classes` from `1`.
dtype: the data type of the output one_hot label.
dim: the dimension to be converted to `num_classes` channels from `1` channel, should be non-negative number.
Example:
For a tensor `labels` of dimensions [B]1[spatial_dims], return a tensor of dimensions `[B]N[spatial_dims]`
when `num_classes=N` number of classes and `dim=1`.
.. code-block:: python
from monai.networks.utils import one_hot
import torch
a = torch.randint(0, 2, size=(1, 2, 2, 2))
out = one_hot(a, num_classes=2, dim=0)
print(out.shape) # torch.Size([2, 2, 2, 2])
a = torch.randint(0, 2, size=(2, 1, 2, 2, 2))
out = one_hot(a, num_classes=2, dim=1)
print(out.shape) # torch.Size([2, 2, 2, 2, 2])
"""
# if `dim` is bigger, add singleton dim at the end
if labels.ndim < dim + 1:
shape = list(labels.shape) + [1] * (dim + 1 - len(labels.shape))
labels = torch.reshape(labels, shape)
sh = list(labels.shape)
if sh[dim] != 1:
raise AssertionError("labels should have a channel with length equal to one.")
sh[dim] = num_classes
o = torch.zeros(size=sh, dtype=dtype, device=labels.device)
labels = o.scatter_(dim=dim, index=labels.long(), value=1)
return labels
"""View operations."""
from typing import Sequence, Tuple
"""View operations.
Input format: [B, C, X, Y, Z, ...]
NOTE(meijieru): 0 is reserved for identify transform.
"""
from typing import Callable, Sequence, Union
import enum
import torch
RotateType = int
PermuteType = int
TransformFuncType = Callable[[torch.Tensor], torch.Tensor]
# A composition of multiple view transoforms.
TransformsType = Sequence[Union[PermuteType, RotateType]]
class GroupName(enum.Enum):
ROTATE = 1
PERMUTE = 2
DEFAULT_ORDER = (GroupName.ROTATE, GroupName.PERMUTE)
rotation_transforms = {
0: lambda x: x,
1: lambda x: x.rot90(1, (3, 4)),
2: lambda x: x.rot90(2, (3, 4)),
3: lambda x: x.rot90(3, (3, 4)),
}
rotation_inverse_transforms = {
0: lambda x: x,
1: lambda x: x.rot90(3, (3, 4)),
2: lambda x: x.rot90(2, (3, 4)),
3: lambda x: x.rot90(1, (3, 4)),
}
permutation_transforms = {
0: lambda x: x,
1: lambda x: x.permute(0, 1, 3, 2, 4),
2: lambda x: x.permute(0, 1, 4, 3, 2),
}
permutation_inverse_transforms = {
0: lambda x: x,
1: lambda x: x.permute(0, 1, 3, 2, 4),
2: lambda x: x.permute(0, 1, 4, 3, 2),
}
all_forward_transforms = {
GroupName.ROTATE: rotation_transforms,
GroupName.PERMUTE: permutation_transforms,
}
all_backward_transforms = {
GroupName.ROTATE: rotation_inverse_transforms,
GroupName.PERMUTE: permutation_inverse_transforms,
}
def get_transforms_func(views: TransformsType,
orders: Sequence[GroupName] = DEFAULT_ORDER,
inverse: bool = False) -> TransformFuncType:
"""Gets sequential transform functions."""
if len(views) != len(orders):
raise ValueError()
all_transforms = (all_forward_transforms
if not inverse else all_backward_transforms)
funcs = [
all_transforms[group_name][view]
for view, group_name in zip(views, orders)
]
funcs = funcs if not inverse else funcs[::-1]
def aux(val):
for func in funcs:
val = func(val)
return val
return aux
import torch
import numpy as np
def get_permute_transform(view_src: PermuteType,
view_dst: PermuteType) -> TransformFuncType:
"""Gets transform function from view src to view dst."""
def transform(x: torch.Tensor) -> torch.Tensor:
x_view_0 = view_transforms.permutation_inverse_transforms[view_src](x)
return view_transforms.permutation_transforms[view_dst](
x_view_0).contiguous()
return transform
def permute_inverse(xs: Sequence[torch.Tensor],
views: Sequence[PermuteType]) -> Sequence[torch.Tensor]:
"""Transforms data back to origin view."""
return [get_permute_transform(view, 0)(x) for x, view in zip(xs, views)]
def permute_rand(
x: torch.Tensor,
num_samples: int = 2
) -> Tuple[Sequence[torch.Tensor], Sequence[PermuteType]]:
"""Samples different transforms of data."""
num_permutes = len(view_transforms.permutation_transforms)
if num_samples > num_permutes:
raise ValueError('Duplicate samples.')
view_dsts = np.random.permutation(num_permutes)[:num_samples].tolist()
return [get_permute_transform(0, view)(x) for view in view_dsts], view_dsts
================================================
FILE: Finetune/Flare22/main.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import logging
import numpy as np
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import torch.nn.parallel
import torch.utils.data.distributed
from optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR
from trainer import run_training
from utils.data_utils import get_loader
import torch.nn as nn
from monai.inferers import sliding_window_inference
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
from monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock
from monai.networks.nets.swin_unetr import SwinTransformer as SwinViT
from monai.utils import ensure_tuple_rep
os.environ['CUDA_VISIBLE_DEVICES'] = "4"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument("--checkpoint", default=None, help="start training from saved checkpoint")
parser.add_argument("--logdir", default="logs", type=str, help="directory to save the tensorboard logs")
parser.add_argument(
"--pretrained_dir", default="./pretrained_models/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="/data/linshan/CTs/Flare22/", type=str, help="dataset directory")
parser.add_argument("--json_list", default="dataset.json", type=str, help="dataset json file")
parser.add_argument(
"--pretrained_checkpoint",default="VoCo_10k.pt", type=str, help="VoCo_10k pretrained model")
parser.add_argument(
"--pretrained_model_name",
default="model_bestVal.pt",
type=str,
help="pretrained model name",
)
roi = 96
parser.add_argument("--save_checkpoint", default=True, help="save checkpoint during training")
parser.add_argument("--max_epochs", default=3000, type=int, help="max number of training epochs")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=16, type=int, help="number of sliding window batch size")
parser.add_argument("--optim_lr", default=3e-4, type=float, help="optimization learning rate")
parser.add_argument("--optim_name", default="adamw", type=str, help="optimization algorithm")
parser.add_argument("--reg_weight", default=0.005, type=float, help="regularization weight")
parser.add_argument("--momentum", default=0.99, type=float, help="momentum")
parser.add_argument("--noamp", default=False, help="do NOT use amp for training")
parser.add_argument("--val_every", default=50, type=int, help="validation frequency")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--world_size", default=1, type=int, help="number of nodes for distributed training")
parser.add_argument("--rank", default=0, type=int, help="node rank for distributed training")
parser.add_argument("--dist-url", default="tcp://127.0.0.1:23456", type=str, help="distributed url")
parser.add_argument("--dist-backend", default="nccl", type=str, help="distributed backend")
parser.add_argument("--norm_name", default="instance", type=str, help="normalization name")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=14, type=int, help="number of output channels")
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--lrschedule", default="warmup_cosine", type=str, help="type of learning rate scheduler")
parser.add_argument("--warmup_epochs", default=100, type=int, help="number of warmup epochs")
parser.add_argument("--resume_ckpt", action="store_true", help="resume training from pretrained checkpoint")
parser.add_argument("--smooth_dr", default=1e-6, type=float, help="constant added to dice denominator to avoid nan")
parser.add_argument("--smooth_nr", default=0.0, type=float, help="constant added to dice numerator to avoid zero")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
parser.add_argument("--use_ssl_pretrained", default=True, help="use self-supervised pretrained weights")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--squared_dice", action="store_true", help="use squared Dice")
def main():
args = parser.parse_args()
args.amp = not args.noamp
if args.distributed:
args.ngpus_per_node = torch.cuda.device_count()
print("Found total gpus", args.ngpus_per_node)
args.world_size = args.ngpus_per_node * args.world_size
mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))
else:
main_worker(gpu=0, args=args)
def main_worker(gpu, args):
if args.distributed:
torch.multiprocessing.set_start_method("fork", force=True)
np.set_printoptions(formatter={"float": "{: 0.3f}".format}, suppress=True)
args.gpu = gpu
if args.distributed:
args.rank = args.rank * args.ngpus_per_node + gpu
dist.init_process_group(
backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank
)
torch.cuda.set_device(0)
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
args.test_mode = False
loader = get_loader(args)
print(args.rank, " gpu", args.gpu)
if args.rank == 0:
print("Batch size is:", args.batch_size, "epochs", args.max_epochs)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
if args.rank == 0:
os.makedirs(args.logdir, exist_ok=True)
logger = init_log('global', logging.INFO)
logger.propagate = 0
pretrained_dir = args.pretrained_dir
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=args.dropout_path_rate,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
if args.resume_ckpt:
model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))["state_dict"]
model.load_state_dict(model_dict)
print("Use pretrained weights")
if args.use_ssl_pretrained:
try:
# model_VoCoEMA.pt
# model_dict = torch.load("./pretrained_models/supervised_suprem_swinunetr_2100.pth", map_location=torch.device('cpu'))
# model_dict = torch.load("./pretrained_models/model_VoCoEMA.pt", map_location=torch.device('cpu'))
model_dict = torch.load(args.pretrained_checkpoint,
map_location=torch.device('cpu'))
state_dict = model_dict
# state_dict = model_dict['net']
# fix potential differences in state dict keys from pre-training to
# fine-tuning
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
# We now load model weights, setting param `strict` to False, i.e.:
# this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves
# the decoder weights untouched (CNN UNet decoder).
model.load_state_dict(state_dict, strict=False)
print("Using pretrained voco ema self-supervised Swin UNETR backbone weights !")
except ValueError:
raise ValueError("Self-supervised pre-trained weights not available for" + str(args.model_name))
if args.squared_dice:
dice_loss = DiceCELoss(
to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr
)
else:
dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)
post_label = AsDiscrete(to_onehot=args.out_channels)
post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)
dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("Total parameters count", pytorch_total_params)
best_acc = 0
start_epoch = 0
if args.checkpoint is not None:
checkpoint = torch.load(args.checkpoint, map_location="cpu")
from collections import OrderedDict
new_state_dict = OrderedDict()
for k, v in checkpoint["state_dict"].items():
new_state_dict[k.replace("backbone.", "")] = v
model.load_state_dict(new_state_dict, strict=False)
if "epoch" in checkpoint:
start_epoch = checkpoint["epoch"]
if "best_acc" in checkpoint:
best_acc = checkpoint["best_acc"]
print("=> loaded checkpoint '{}' (epoch {}) (bestacc {})".format(args.checkpoint, start_epoch, best_acc))
model.cuda(args.gpu)
if args.distributed:
torch.cuda.set_device(args.gpu)
if args.norm_name == "batch":
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
model.cuda(args.gpu)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)
if args.optim_name == "adam":
optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
elif args.optim_name == "adamw":
optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
# optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)
elif args.optim_name == "sgd":
optimizer = torch.optim.SGD(
model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight
)
else:
raise ValueError("Unsupported Optimization Procedure: " + str(args.optim_name))
if args.lrschedule == "warmup_cosine":
scheduler = LinearWarmupCosineAnnealingLR(
optimizer, warmup_epochs=args.warmup_epochs, max_epochs=args.max_epochs
)
elif args.lrschedule == "cosine_anneal":
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)
if args.checkpoint is not None:
scheduler.step(epoch=start_epoch)
else:
scheduler = None
accuracy = run_training(
model=model,
train_loader=loader[0],
val_loader=loader[1],
optimizer=optimizer,
loss_func=dice_loss,
acc_func=dice_acc,
args=args,
model_inferer=model_inferer,
scheduler=scheduler,
start_epoch=start_epoch,
post_label=post_label,
post_pred=post_pred,
)
return accuracy
logs = set()
def init_log(name, level=logging.INFO):
if (name, level) in logs:
return
logs.add((name, level))
logger = logging.getLogger(name)
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
if "SLURM_PROCID" in os.environ:
rank = int(os.environ["SLURM_PROCID"])
logger.addFilter(lambda record: rank == 0)
else:
rank = 0
format_str = "[%(asctime)s][%(levelname)8s] %(message)s"
formatter = logging.Formatter(format_str)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
if __name__ == "__main__":
main()
================================================
FILE: Finetune/Flare22/optimizers/__init__.py
================================================
================================================
FILE: Finetune/Flare22/optimizers/lr_scheduler.py
================================================
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import warnings
from typing import List
from torch import nn as nn
from torch.optim import Adam, Optimizer
from torch.optim.lr_scheduler import LambdaLR, _LRScheduler
__all__ = ["LinearLR", "ExponentialLR"]
class _LRSchedulerMONAI(_LRScheduler):
"""Base class for increasing the learning rate between two boundaries over a number
of iterations"""
def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:
"""
Args:
optimizer: wrapped optimizer.
end_lr: the final learning rate.
num_iter: the number of iterations over which the test occurs.
last_epoch: the index of last epoch.
Returns:
None
"""
self.end_lr = end_lr
self.num_iter = num_iter
super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)
class LinearLR(_LRSchedulerMONAI):
"""Linearly increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]
class ExponentialLR(_LRSchedulerMONAI):
"""Exponentially increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]
class WarmupCosineSchedule(LambdaLR):
"""Linear warmup and then cosine decay.
Based on https://huggingface.co/ implementation.
"""
def __init__(
self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1
) -> None:
"""
Args:
optimizer: wrapped optimizer.
warmup_steps: number of warmup iterations.
t_total: total number of training iterations.
cycles: cosine cycles parameter.
last_epoch: the index of last epoch.
Returns:
None
"""
self.warmup_steps = warmup_steps
self.t_total = t_total
self.cycles = cycles
super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
class LinearWarmupCosineAnnealingLR(_LRScheduler):
def __init__(
self,
optimizer: Optimizer,
warmup_epochs: int,
max_epochs: int,
warmup_start_lr: float = 0.0,
eta_min: float = 0.0,
last_epoch: int = -1,
) -> None:
"""
Args:
optimizer (Optimizer): Wrapped optimizer.
warmup_epochs (int): Maximum number of iterations for linear warmup
max_epochs (int): Maximum number of iterations
warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.
eta_min (float): Minimum learning rate. Default: 0.
last_epoch (int): The index of last epoch. Default: -1.
"""
self.warmup_epochs = warmup_epochs
self.max_epochs = max_epochs
self.warmup_start_lr = warmup_start_lr
self.eta_min = eta_min
super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)
def get_lr(self) -> List[float]:
"""
Compute learning rate using chainable form of the scheduler
"""
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
if self.last_epoch == 0:
return [self.warmup_start_lr] * len(self.base_lrs)
elif self.last_epoch < self.warmup_epochs:
return [
group["lr"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
elif self.last_epoch == self.warmup_epochs:
return self.base_lrs
elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:
return [
group["lr"]
+ (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
return [
(1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
/ (
1
+ math.cos(
math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)
)
)
* (group["lr"] - self.eta_min)
+ self.eta_min
for group in self.optimizer.param_groups
]
def _get_closed_form_lr(self) -> List[float]:
"""
Called when epoch is passed as a param to the `step` function of the scheduler.
"""
if self.last_epoch < self.warmup_epochs:
return [
self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr in self.base_lrs
]
return [
self.eta_min
+ 0.5
* (base_lr - self.eta_min)
* (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
for base_lr in self.base_lrs
]
================================================
FILE: Finetune/Flare22/train.sh
================================================
now=$(date +"%Y%m%d_%H%M%S")
logdir=runs/logs
mkdir -p $logdir
torchrun --master_port=21198 main.py \
--logdir $logdir | tee $logdir/$now.txt
================================================
FILE: Finetune/Flare22/trainer.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import time
import numpy as np
import torch
import torch.nn.parallel
import torch.utils.data.distributed
from tensorboardX import SummaryWriter
from torch.cuda.amp import GradScaler, autocast
from utils.utils import AverageMeter, distributed_all_gather
from monai.data import decollate_batch
def train_epoch(model, loader, optimizer, scaler, epoch, loss_func, args):
model.train()
start_time = time.time()
run_loss = AverageMeter()
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
for param in model.parameters():
param.grad = None
with autocast(enabled=args.amp):
logits = model(data)
loss = loss_func(logits, target)
#
if args.amp:
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
else:
loss.backward()
optimizer.step()
if args.distributed:
loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)
run_loss.update(
np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size
)
else:
run_loss.update(loss.item(), n=args.batch_size)
lr = optimizer.param_groups[0]["lr"]
if args.rank == 0:
print(
"Epoch {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"loss: {:.4f}".format(run_loss.avg),
"lr: {:.8f}".format(lr),
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
for param in model.parameters():
param.grad = None
return run_loss.avg
def val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):
model.eval()
run_acc = AverageMeter()
start_time = time.time()
with torch.no_grad():
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
with autocast(enabled=args.amp):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
if not logits.is_cuda:
target = target.cpu()
val_labels_list = decollate_batch(target)
val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]
val_outputs_list = decollate_batch(logits)
val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]
acc_func.reset()
acc_func(y_pred=val_output_convert, y=val_labels_convert)
acc, not_nans = acc_func.aggregate()
acc = acc.cuda(args.rank)
if args.distributed:
acc_list, not_nans_list = distributed_all_gather(
[acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length
)
for al, nl in zip(acc_list, not_nans_list):
run_acc.update(al, n=nl)
else:
run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())
if args.rank == 0:
avg_acc = np.mean(run_acc.avg)
print(
"Val {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"acc",
avg_acc,
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
return run_acc.avg
def save_checkpoint(model, epoch, args, filename="model.pt", best_acc=0, optimizer=None, scheduler=None):
state_dict = model.state_dict() if not args.distributed else model.module.state_dict()
save_dict = {"epoch": epoch, "best_acc": best_acc, "state_dict": state_dict}
if optimizer is not None:
save_dict["optimizer"] = optimizer.state_dict()
if scheduler is not None:
save_dict["scheduler"] = scheduler.state_dict()
filename = os.path.join(args.logdir, filename)
torch.save(save_dict, filename)
print("Saving checkpoint", filename)
def run_training(
model,
train_loader,
val_loader,
optimizer,
loss_func,
acc_func,
args,
model_inferer=None,
scheduler=None,
start_epoch=0,
post_label=None,
post_pred=None,
):
writer = None
if args.logdir is not None and args.rank == 0:
writer = SummaryWriter(log_dir=args.logdir)
if args.rank == 0:
print("Writing Tensorboard logs to ", args.logdir)
scaler = None
if args.amp:
scaler = GradScaler()
val_acc_max = 0.0
for epoch in range(start_epoch, args.max_epochs):
if args.distributed:
train_loader.sampler.set_epoch(epoch)
torch.distributed.barrier()
print(args.rank, time.ctime(), "Epoch:", epoch)
epoch_time = time.time()
train_loss = train_epoch(
model, train_loader, optimizer, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args
)
if args.rank == 0:
print(
"Final training {}/{}".format(epoch, args.max_epochs - 1),
"loss: {:.4f}".format(train_loss),
"time {:.2f}s".format(time.time() - epoch_time),
)
if args.rank == 0 and writer is not None:
writer.add_scalar("train_loss", train_loss, epoch)
b_new_best = False
if (epoch + 1) % args.val_every == 0:
if args.distributed:
torch.distributed.barrier()
epoch_time = time.time()
val_avg_acc = val_epoch(
model,
val_loader,
epoch=epoch,
acc_func=acc_func,
model_inferer=model_inferer,
args=args,
post_label=post_label,
post_pred=post_pred,
)
val_avg_acc = np.mean(val_avg_acc)
if args.rank == 0:
print(
"Final validation {}/{}".format(epoch, args.max_epochs - 1),
"acc",
val_avg_acc,
"time {:.2f}s".format(time.time() - epoch_time),
)
if writer is not None:
writer.add_scalar("val_acc", val_avg_acc, epoch)
if val_avg_acc > val_acc_max:
print("new best ({:.6f} --> {:.6f}). ".format(val_acc_max, val_avg_acc))
val_acc_max = val_avg_acc
b_new_best = True
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(
model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler
)
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename="model_final.pt")
if b_new_best:
print("Copying to model.pt new best model!!!!")
shutil.copyfile(os.path.join(args.logdir, "model_final.pt"), os.path.join(args.logdir, "model.pt"))
if scheduler is not None:
scheduler.step()
print("Training Finished !, Best Accuracy: ", val_acc_max)
return val_acc_max
================================================
FILE: Finetune/Flare22/utils/__init__.py
================================================
================================================
FILE: Finetune/Flare22/utils/data_test.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import pickle
import numpy as np
import torch
import itertools as it
from monai import data, transforms
from monai.data import *
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank : self.total_size : self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
def get_loader(args):
data_dir = args.data_dir
datalist_json = os.path.join(data_dir, args.json_list)
transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image"]),
transforms.EnsureChannelFirstd(keys=["image"]),
transforms.Orientationd(keys=["image"], axcodes="RAS"),
transforms.Spacingd(
keys=["image"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image"], source_key="image"),
]
)
datalist = load_decathlon_datalist(datalist_json, True, "test", base_dir=data_dir)
print('use persistent')
ds = PersistentDataset(data=datalist,
transform=transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/flare22_test')
# /data/linshan/cache/flare22_test
sampler = Sampler(ds) if args.distributed else None
loader = data.DataLoader(
ds,
batch_size=args.batch_size,
shuffle=(sampler is None),
num_workers=args.workers,
sampler=sampler,
pin_memory=True,
)
return loader, transform
================================================
FILE: Finetune/Flare22/utils/data_utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import pickle
import numpy as np
import torch
import itertools as it
from monai import data, transforms
from monai.data import *
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank : self.total_size : self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
def get_loader(args):
data_dir = args.data_dir
datalist_json = os.path.join(data_dir, args.json_list)
train_transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
transforms.RandCropByPosNegLabeld(
keys=["image", "label"],
label_key="label",
spatial_size=(args.roi_x, args.roi_y, args.roi_z),
pos=9,
neg=1,
num_samples=args.sw_batch_size,
image_key="image",
image_threshold=0,
),
# transforms.RandCropByLabelClassesd(
# keys=["image", "label"],
# image_key="image",
# label_key="label",
# spatial_size=(args.roi_x, args.roi_y, args.roi_z),
# num_classes=args.out_channels,
# ratios=[0, *it.repeat(1, args.out_channels-1)],
# num_samples=args.sw_batch_size,
# image_threshold=0,
# warn=False,
# ),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=0),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=1),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=2),
transforms.RandRotate90d(keys=["image", "label"], prob=args.RandRotate90d_prob, max_k=3),
#transforms.RandShiftIntensityd(keys="image", offsets=0.1, prob=args.RandShiftIntensityd_prob),
transforms.ToTensord(keys=["image", "label"]),
]
)
val_transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
transforms.ToTensord(keys=["image", "label"]),
]
)
if args.test_mode:
test_files = load_decathlon_datalist(datalist_json, True, "validation", base_dir=data_dir)
test_ds = PersistentDataset(data=test_files,
transform=val_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/flare22')
test_sampler = Sampler(test_ds, shuffle=False) if args.distributed else None
test_loader = data.DataLoader(
test_ds,
batch_size=1,
shuffle=False,
num_workers=args.workers,
sampler=test_sampler,
pin_memory=True,
persistent_workers=True,
)
loader = test_loader
else:
datalist = load_decathlon_datalist(datalist_json, True, "training", base_dir=data_dir)
if args.use_normal_dataset:
print('use persistent')
train_ds = PersistentDataset(data=datalist,
transform=train_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/flare22')
# train_ds = data.Dataset(data=datalist, transform=train_transform)
else:
train_ds = data.CacheDataset(
data=datalist, transform=train_transform, cache_num=24, cache_rate=1.0, num_workers=args.workers
)
train_sampler = Sampler(train_ds) if args.distributed else None
train_loader = data.DataLoader(
train_ds,
batch_size=args.batch_size,
shuffle=(train_sampler is None),
num_workers=args.workers,
sampler=train_sampler,
pin_memory=True,
)
val_files = load_decathlon_datalist(datalist_json, True, "validation", base_dir=data_dir)
# val_ds = data.Dataset(data=val_files, transform=val_transform)
val_ds = PersistentDataset(data=val_files,
transform=val_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/flare22')
val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None
val_loader = data.DataLoader(
val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=False
)
loader = [train_loader, val_loader]
return loader
================================================
FILE: Finetune/Flare22/utils/utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import scipy.ndimage as ndimage
import torch
import os
def resample_3d(img, target_size):
imx, imy, imz = img.shape
tx, ty, tz = target_size
zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))
img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)
return img_resampled
def dice(x, y):
intersect = np.sum(np.sum(np.sum(x * y)))
y_sum = np.sum(np.sum(np.sum(y)))
if y_sum == 0:
return 0.0
x_sum = np.sum(np.sum(np.sum(x)))
return 2 * intersect / (x_sum + y_sum)
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)
def distributed_all_gather(
tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None
):
if world_size is None:
world_size = torch.distributed.get_world_size()
if valid_batch_size is not None:
valid_batch_size = min(valid_batch_size, world_size)
elif is_valid is not None:
is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)
if not no_barrier:
torch.distributed.barrier()
tensor_list_out = []
with torch.no_grad():
if is_valid is not None:
is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]
torch.distributed.all_gather(is_valid_list, is_valid)
is_valid = [x.item() for x in is_valid_list]
for tensor in tensor_list:
gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]
torch.distributed.all_gather(gather_list, tensor)
if valid_batch_size is not None:
gather_list = gather_list[:valid_batch_size]
elif is_valid is not None:
gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]
if out_numpy:
gather_list = [t.cpu().numpy() for t in gather_list]
tensor_list_out.append(gather_list)
return tensor_list_out
def color_map(dataset='pascal'):
cmap = np.zeros((256, 3), dtype='uint8')
if dataset == 'pascal' or dataset == 'coco':
def bitget(byteval, idx):
return (byteval & (1 << idx)) != 0
for i in range(256):
r = g = b = 0
c = i
for j in range(8):
r = r | (bitget(c, 0) << 7-j)
g = g | (bitget(c, 1) << 7-j)
b = b | (bitget(c, 2) << 7-j)
c = c >> 3
cmap[i] = np.array([r, g, b])
elif dataset == 'cityscapes':
cmap[0] = np.array([128, 64, 128])
cmap[1] = np.array([244, 35, 232])
cmap[2] = np.array([70, 70, 70])
cmap[3] = np.array([102, 102, 156])
cmap[4] = np.array([190, 153, 153])
cmap[5] = np.array([153, 153, 153])
cmap[6] = np.array([250, 170, 30])
cmap[7] = np.array([220, 220, 0])
cmap[8] = np.array([107, 142, 35])
cmap[9] = np.array([152, 251, 152])
cmap[10] = np.array([70, 130, 180])
cmap[11] = np.array([220, 20, 60])
cmap[12] = np.array([255, 0, 0])
cmap[13] = np.array([0, 0, 142])
cmap[14] = np.array([0, 0, 70])
cmap[15] = np.array([0, 60, 100])
cmap[16] = np.array([0, 80, 100])
cmap[17] = np.array([0, 0, 230])
cmap[18] = np.array([119, 11, 32])
cmap[19] = np.array([0, 0, 0])
cmap[255] = np.array([0, 0, 0])
return cmap
def check_dir(dir):
if not os.path.exists(dir):
os.makedirs(dir)
================================================
FILE: Finetune/Flare22/val.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
import torch.nn.functional as F
from torch.cuda.amp import GradScaler, autocast
from utils.data_utils import get_loader
from utils.utils import dice, resample_3d
from utils.utils import AverageMeter, distributed_all_gather
from monai.inferers import sliding_window_inference
from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
os.environ['CUDA_VISIBLE_DEVICES'] = "0"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument(
"--pretrained_dir", default="./runs/logs_scratch_v2/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="/data/linshan/CTs/BTCV/", type=str, help="dataset directory")
parser.add_argument("--exp_name", default="BTCV_0.8451", type=str, help="experiment name")
parser.add_argument("--json_list", default="dataset_0.json", type=str, help="dataset json file")
parser.add_argument(
"--pretrained_model_name",
default="model_0.8451.pt",
type=str,
help="pretrained model name",
)
roi=96
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=4, type=int, help="number of sliding window batch size")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=14, type=int, help="number of output channels")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
def main():
args = parser.parse_args()
args.test_mode = True
output_directory = "./outputs/" + args.exp_name
if not os.path.exists(output_directory):
os.makedirs(output_directory)
val_loader = get_loader(args)
pretrained_dir = args.pretrained_dir
model_name = args.pretrained_model_name
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
pretrained_pth = os.path.join(pretrained_dir, model_name)
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=0.0,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
model_dict = torch.load(pretrained_pth)["state_dict"]
model.load_state_dict(model_dict, strict=True)
model.eval()
model.to(device)
acc_func = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)
run_acc = AverageMeter()
post_label = AsDiscrete(to_onehot=args.out_channels)
post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)
with torch.no_grad():
all_dice = None
num = np.zeros(13)
dice_list_case = []
for idx, batch_data in enumerate(val_loader):
img_name = batch_data["image_meta_dict"]["filename_or_obj"][0].split("/")[-1]
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
print(data.shape, target.shape)
z = data.shape[-1]
data = F.interpolate(data, size=(263, 218, z), mode='trilinear')
target = F.interpolate(target, size=(263, 218, z), mode='nearest')
print(data.shape, target.shape)
with autocast(enabled=True):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
if not logits.is_cuda:
target = target.cpu()
val_labels_list = decollate_batch(target)
val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]
val_outputs_list = decollate_batch(logits)
val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]
acc_func.reset()
acc_func(y_pred=val_output_convert, y=val_labels_convert)
acc, not_nans = acc_func.aggregate()
run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())
print(np.mean(run_acc.avg))
# # save predict
# print(logits.shape)
# val_outputs = torch.argmax(logits, 1).cpu().numpy()
# np.save(os.path.join(output_directory, 'pre'+ img_name[3:-7]+'.npy'), val_outputs.astype(np.uint8)[0])
# # save label
# val_labels = target.cpu().numpy()
# np.save(os.path.join(output_directory, 'label' + img_name[3:-7] + '.npy'), val_labels.astype(np.uint8)[0][0])
#
# # save input
# img = data.cpu().numpy()
# img = img * 255
# print(np.max(img))
# np.save(os.path.join(output_directory, 'img' + img_name[3:-7] + '.npy'), img.astype(np.uint8)[0][0])
if __name__ == "__main__":
main()
# outputs = torch.argmax(logits, 1).cpu().numpy()
# outputs = outputs.astype(np.uint8)[0]
# val_labels = target.cpu().numpy()[0, 0, :, :, :]
#
# len_class = len(list(np.unique(val_labels))) - 1
# dice_list_sub = []
# for i in range(1, 14):
# # judge this class exist or not, ignore background
# num[i - 1] += (np.sum(val_labels == i) > 0).astype(np.uint8)
# organ_Dice = dice(outputs == i, val_labels == i)
# dice_list_sub.append(organ_Dice)
#
# mean_dice = np.sum(dice_list_sub) / len_class
# print("Mean Organ Dice: {}".format(mean_dice))
#
# # acc of each organ
# print("Organ Dice:", dice_list_sub)
#
# if all_dice is None:
# all_dice = (np.asarray(dice_list_sub)).copy()
# else:
# all_dice = all_dice + np.asarray(dice_list_sub)
# print("Organ Dice accumulate:", all_dice*100 / num)
#
# dice_list_case.append(mean_dice)
# print("Overall Mean Dice: {}".format(100*np.mean(dice_list_case)))
================================================
FILE: Finetune/MM-WHS/dataset.json
================================================
{
"description": "0",
"labels": {
"0": "background",
"1": "Left Ventricle",
"2": "whole aorta",
"3": "Right Ventricle",
"4": "Left Atrium",
"5": "myocardium of Left Ventricle",
"6": "Right Atrium",
"7": "Pulmonary Artery"
},
"licence": "hands off!",
"modality": {
"0": "CT"
},
"name": "MM-WHS",
"numTest": 0,
"numTraining": 20,
"reference": "0",
"release": "0.0",
"tensorImageSize": "4D",
"test": [],
"validation": [{
"image": "./images/ct_train_1001_image.nii.gz",
"label": "./labels/ct_train_1001_label.nii.gz"
},
{
"image": "./images/ct_train_1002_image.nii.gz",
"label": "./labels/ct_train_1002_label.nii.gz"
},
{
"image": "./images/ct_train_1003_image.nii.gz",
"label": "./labels/ct_train_1003_label.nii.gz"
},
{
"image": "./images/ct_train_1004_image.nii.gz",
"label": "./labels/ct_train_1004_label.nii.gz"
}
],
"training": [
{
"image": "./images/ct_train_1005_image.nii.gz",
"label": "./labels/ct_train_1005_label.nii.gz"
},
{
"image": "./images/ct_train_1006_image.nii.gz",
"label": "./labels/ct_train_1006_label.nii.gz"
},
{
"image": "./images/ct_train_1007_image.nii.gz",
"label": "./labels/ct_train_1007_label.nii.gz"
},
{
"image": "./images/ct_train_1008_image.nii.gz",
"label": "./labels/ct_train_1008_label.nii.gz"
},
{
"image": "./images/ct_train_1009_image.nii.gz",
"label": "./labels/ct_train_1009_label.nii.gz"
},
{
"image": "./images/ct_train_1010_image.nii.gz",
"label": "./labels/ct_train_1010_label.nii.gz"
},
{
"image": "./images/ct_train_1011_image.nii.gz",
"label": "./labels/ct_train_1011_label.nii.gz"
},
{
"image": "./images/ct_train_1012_image.nii.gz",
"label": "./labels/ct_train_1012_label.nii.gz"
},
{
"image": "./images/ct_train_1013_image.nii.gz",
"label": "./labels/ct_train_1013_label.nii.gz"
},
{
"image": "./images/ct_train_1014_image.nii.gz",
"label": "./labels/ct_train_1014_label.nii.gz"
},
{
"image": "./images/ct_train_1015_image.nii.gz",
"label": "./labels/ct_train_1015_label.nii.gz"
},
{
"image": "./images/ct_train_1016_image.nii.gz",
"label": "./labels/ct_train_1016_label.nii.gz"
},
{
"image": "./images/ct_train_1017_image.nii.gz",
"label": "./labels/ct_train_1017_label.nii.gz"
},
{
"image": "./images/ct_train_1018_image.nii.gz",
"label": "./labels/ct_train_1018_label.nii.gz"
},
{
"image": "./images/ct_train_1019_image.nii.gz",
"label": "./labels/ct_train_1019_label.nii.gz"
},
{
"image": "./images/ct_train_1020_image.nii.gz",
"label": "./labels/ct_train_1020_label.nii.gz"
}
]
}
================================================
FILE: Finetune/MM-WHS/inferers.py
================================================
"""Multiview inferer."""
import warnings
from typing import Any, Callable, Dict, List, Mapping, Sequence, Tuple, Union
import torch
import torch.nn.functional as F
from monai.data.utils import compute_importance_map, dense_patch_slices, get_valid_patch_size
from monai.transforms import Resize
from monai.utils import (
BlendMode,
PytorchPadMode,
convert_data_type,
ensure_tuple,
fall_back_tuple,
look_up_option,
optional_import,
)
from monai.inferers.utils import _get_scan_interval
# from utils import view_ops
# from utils import view_transforms
tqdm, _ = optional_import("tqdm", name="tqdm")
def double_sliding_window_inference(
inputs: torch.Tensor,
view: int,
roi_size: Union[Sequence[int], int],
sw_batch_size: int,
predictor: Callable[..., Union[torch.Tensor, Sequence[torch.Tensor], Dict[Any, torch.Tensor]]],
overlap: float = 0.25,
mode: Union[BlendMode, str] = BlendMode.CONSTANT,
sigma_scale: Union[Sequence[float], float] = 0.125,
padding_mode: Union[PytorchPadMode, str] = PytorchPadMode.CONSTANT,
cval: float = 0.0,
sw_device: Union[torch.device, str, None] = None,
device: Union[torch.device, str, None] = None,
progress: bool = False,
roi_weight_map: Union[torch.Tensor, None] = None,
*args: Any,
**kwargs: Any,
) -> Union[torch.Tensor, Tuple[torch.Tensor, ...], Dict[Any, torch.Tensor]]:
"""
Sliding window inference on two `inputs` with `predictor`.
The outputs of `predictor` could be a tensor, a tuple, or a dictionary of tensors.
Each output in the tuple or dict value is allowed to have different resolutions with respect to the input.
e.g., the input patch spatial size is [128,128,128], the output (a tuple of two patches) patch sizes
could be ([128,64,256], [64,32,128]).
In this case, the parameter `overlap` and `roi_size` need to be carefully chosen to ensure the output ROI is still
an integer. If the predictor's input and output spatial sizes are not equal, we recommend choosing the parameters
so that `overlap*roi_size*output_size/input_size` is an integer (for each spatial dimension).
When roi_size is larger than the inputs' spatial size, the input image are padded during inference.
To maintain the same spatial sizes, the output image will be cropped to the original input size.
Args:
inputs: input image to be processed (assuming NCHW[D])
roi_size: the spatial window size for inferences.
When its components have None or non-positives, the corresponding inputs dimension will be used.
if the components of the `roi_size` are non-positive values, the transform will use the
corresponding components of img size. For example, `roi_size=(32, -1)` will be adapted
to `(32, 64)` if the second spatial dimension size of img is `64`.
sw_batch_size: the batch size to run window slices.
predictor: given input tensor ``patch_data`` in shape NCHW[D],
The outputs of the function call ``predictor(patch_data)`` should be a tensor, a tuple, or a dictionary
with Tensor values. Each output in the tuple or dict value should have the same batch_size, i.e. NM'H'W'[D'];
where H'W'[D'] represents the output patch's spatial size, M is the number of output channels,
N is `sw_batch_size`, e.g., the input shape is (7, 1, 128,128,128),
the output could be a tuple of two tensors, with shapes: ((7, 5, 128, 64, 256), (7, 4, 64, 32, 128)).
In this case, the parameter `overlap` and `roi_size` need to be carefully chosen
to ensure the scaled output ROI sizes are still integers.
If the `predictor`'s input and output spatial sizes are different,
we recommend choosing the parameters so that ``overlap*roi_size*zoom_scale`` is an integer for each dimension.
overlap: Amount of overlap between scans.
mode: {``"constant"``, ``"gaussian"``}
How to blend output of overlapping windows. Defaults to ``"constant"``.
- ``"constant``": gives equal weight to all predictions.
- ``"gaussian``": gives less weight to predictions on edges of windows.
sigma_scale: the standard deviation coefficient of the Gaussian window when `mode` is ``"gaussian"``.
Default: 0.125. Actual window sigma is ``sigma_scale`` * ``dim_size``.
When sigma_scale is a sequence of floats, the values denote sigma_scale at the corresponding
spatial dimensions.
padding_mode: {``"constant"``, ``"reflect"``, ``"replicate"``, ``"circular"``}
Padding mode for ``inputs``, when ``roi_size`` is larger than inputs. Defaults to ``"constant"``
See also: https://pytorch.org/docs/stable/generated/torch.nn.functional.pad.html
cval: fill value for 'constant' padding mode. Default: 0
sw_device: device for the window data.
By default the device (and accordingly the memory) of the `inputs` is used.
Normally `sw_device` should be consistent with the device where `predictor` is defined.
device: device for the stitched output prediction.
By default the device (and accordingly the memory) of the `inputs` is used. If for example
set to device=torch.device('cpu') the gpu memory consumption is less and independent of the
`inputs` and `roi_size`. Output is on the `device`.
progress: whether to print a `tqdm` progress bar.
roi_weight_map: pre-computed (non-negative) weight map for each ROI.
If not given, and ``mode`` is not `constant`, this map will be computed on the fly.
args: optional args to be passed to ``predictor``.
kwargs: optional keyword args to be passed to ``predictor``.
Note:
- input must be channel-first and have a batch dim, supports N-D sliding window.
"""
compute_dtype = inputs.dtype
num_spatial_dims = len(inputs.shape) - 2
if overlap < 0 or overlap >= 1:
raise ValueError("overlap must be >= 0 and < 1.")
# determine image spatial size and batch size
# Note: all input images must have the same image size and batch size
batch_size, _, *image_size_ = inputs.shape
if device is None:
device = inputs.device
if sw_device is None:
sw_device = inputs.device
roi_size = fall_back_tuple(roi_size, image_size_)
# in case that image size is smaller than roi size
image_size = tuple(max(image_size_[i], roi_size[i]) for i in range(num_spatial_dims))
pad_size = []
for k in range(len(inputs.shape) - 1, 1, -1):
diff = max(roi_size[k - 2] - inputs.shape[k], 0)
half = diff // 2
pad_size.extend([half, diff - half])
inputs = F.pad(inputs, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)
# inputs2 = F.pad(inputs2, pad=pad_size, mode=look_up_option(padding_mode, PytorchPadMode).value, value=cval)
scan_interval = _get_scan_interval(image_size, roi_size, num_spatial_dims, overlap)
# Store all slices in list
slices = dense_patch_slices(image_size, roi_size, scan_interval)
num_win = len(slices) # number of windows per image
total_slices = num_win * batch_size # total number of windows
# Create window-level importance map
valid_patch_size = get_valid_patch_size(image_size, roi_size)
if valid_patch_size == roi_size and (roi_weight_map is not None):
importance_map = roi_weight_map
else:
try:
importance_map = compute_importance_map(valid_patch_size, mode=mode, sigma_scale=sigma_scale, device=device)
except BaseException as e:
raise RuntimeError(
"Seems to be OOM. Please try smaller patch size or mode='constant' instead of mode='gaussian'."
) from e
importance_map = convert_data_type(importance_map, torch.Tensor, device, compute_dtype)[0] # type: ignore
# handle non-positive weights
min_non_zero = max(importance_map[importance_map != 0].min().item(), 1e-3)
importance_map = torch.clamp(importance_map.to(torch.float32), min=min_non_zero).to(compute_dtype)
# Perform predictions
dict_key, output_image_list_1, output_image_list_2, count_map_list = None, [], [], []
_initialized_ss = -1
is_tensor_output = True # whether the predictor's output is a tensor (instead of dict/tuple)
# for each patch
for slice_g in tqdm(range(0, total_slices, sw_batch_size)) if progress else range(0, total_slices, sw_batch_size):
slice_range = range(slice_g, min(slice_g + sw_batch_size, total_slices))
unravel_slice = [
[slice(int(idx / num_win), int(idx / num_win) + 1), slice(None)] + list(slices[idx % num_win])
for idx in slice_range
]
window_data = torch.cat([inputs[win_slice] for win_slice in unravel_slice]).to(sw_device)
view_list = [view, (view + 1) % len(view_transforms.permutation_transforms)]
window_data_list = [view_ops.get_permute_transform(0, dst)(window_data) for dst in view_list]
# window_data_2 = torch.cat([inputs2[win_slice] for win_slice in unravel_slice]).to(sw_device)
seg_prob_out_1, seg_prob_out_2 = predictor(window_data_list[0], window_data_list[1], view_list, *args, **kwargs) # batched patch segmentation
seg_prob_out_1, seg_prob_out_2 = view_ops.permute_inverse([seg_prob_out_1, seg_prob_out_2], view_list)
# convert seg_prob_out to tuple seg_prob_tuple, this does not allocate new memory.
seg_prob_tuple_1: Tuple[torch.Tensor, ...]
seg_prob_tuple_2: Tuple[torch.Tensor, ...]
if isinstance(seg_prob_out_1, torch.Tensor):
seg_prob_tuple_1 = (seg_prob_out_1,)
seg_prob_tuple_2 = (seg_prob_out_2,)
elif isinstance(seg_prob_out_1, Mapping):
if dict_key is None:
dict_key = sorted(seg_prob_out_1.keys()) # track predictor's output keys
seg_prob_tuple_1 = tuple(seg_prob_out_1[k] for k in dict_key)
seg_prob_tuple_2 = tuple(seg_prob_out_2[k] for k in dict_key)
is_tensor_output = False
else:
seg_prob_tuple_1 = ensure_tuple(seg_prob_out_1)
seg_prob_tuple_2 = ensure_tuple(seg_prob_out_2)
is_tensor_output = False
# for each output in multi-output list
for ss in range(len(seg_prob_tuple_1)):
seg_prob_1 = seg_prob_tuple_1[ss].to(device) # BxCxMxNxP or BxCxMxN
seg_prob_2 = seg_prob_tuple_2[ss].to(device)
# compute zoom scale: out_roi_size/in_roi_size
zoom_scale = []
for axis, (img_s_i, out_w_i, in_w_i) in enumerate(
zip(image_size, seg_prob_1.shape[2:], window_data.shape[2:])
):
_scale = out_w_i / float(in_w_i)
if not (img_s_i * _scale).is_integer():
warnings.warn(
f"For spatial axis: {axis}, output[{ss}] will have non-integer shape. Spatial "
f"zoom_scale between output[{ss}] and input is {_scale}. Please pad inputs."
)
zoom_scale.append(_scale)
if _initialized_ss < ss: # init. the ss-th buffer at the first iteration
# construct multi-resolution outputs
output_classes = seg_prob_1.shape[1]
output_shape = [batch_size, output_classes] + [
int(image_size_d * zoom_scale_d) for image_size_d, zoom_scale_d in zip(image_size, zoom_scale)
]
# allocate memory to store the full output and the count for overlapping parts
output_image_list_1.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))
output_image_list_2.append(torch.zeros(output_shape, dtype=compute_dtype, device=device))
count_map_list.append(torch.zeros([1, 1] + output_shape[2:], dtype=compute_dtype, device=device))
_initialized_ss += 1
# resizing the importance_map
resizer = Resize(spatial_size=seg_prob_1.shape[2:], mode="nearest", anti_aliasing=False)
# store the result in the proper location of the full output. Apply weights from importance map.
for idx, original_idx in zip(slice_range, unravel_slice):
# zoom roi
original_idx_zoom = list(original_idx) # 4D for 2D image, 5D for 3D image
for axis in range(2, len(original_idx_zoom)):
zoomed_start = original_idx[axis].start * zoom_scale[axis - 2]
zoomed_end = original_idx[axis].stop * zoom_scale[axis - 2]
if not zoomed_start.is_integer() or (not zoomed_end.is_integer()):
warnings.warn(
f"For axis-{axis-2} of output[{ss}], the output roi range is not int. "
f"Input roi range is ({original_idx[axis].start}, {original_idx[axis].stop}). "
f"Spatial zoom_scale between output[{ss}] and input is {zoom_scale[axis - 2]}. "
f"Corresponding output roi range is ({zoomed_start}, {zoomed_end}).\n"
f"Please change overlap ({overlap}) or roi_size ({roi_size[axis-2]}) for axis-{axis-2}. "
"Tips: if overlap*roi_size*zoom_scale is an integer, it usually works."
)
original_idx_zoom[axis] = slice(int(zoomed_start), int(zoomed_end), None)
importance_map_zoom = resizer(importance_map.unsqueeze(0))[0].to(compute_dtype)
# store results and weights
output_image_list_1[ss][original_idx_zoom] += importance_map_zoom * seg_prob_1[idx - slice_g]
output_image_list_2[ss][original_idx_zoom] += importance_map_zoom * seg_prob_2[idx - slice_g]
count_map_list[ss][original_idx_zoom] += (
importance_map_zoom.unsqueeze(0).unsqueeze(0).expand(count_map_list[ss][original_idx_zoom].shape)
)
# account for any overlapping sections
for ss in range(len(output_image_list_1)):
count_map_pop = count_map_list.pop(0)
output_image_list_1[ss] = (output_image_list_1[ss] / count_map_pop).to(compute_dtype)
output_image_list_2[ss] = (output_image_list_2[ss] / count_map_pop).to(compute_dtype)
# remove padding if image_size smaller than roi_size
for ss in range(len(output_image_list_1)):
output_i_1, output_i_2 = output_image_list_1[ss], output_image_list_2[ss]
if torch.isnan(output_i_1).any() or torch.isinf(output_i_1).any():
warnings.warn("Sliding window inference results contain NaN or Inf.")
if torch.isnan(output_i_2).any() or torch.isinf(output_i_2).any():
warnings.warn("Sliding window inference results contain NaN or Inf.")
zoom_scale = [
seg_prob_map_shape_d / roi_size_d for seg_prob_map_shape_d, roi_size_d in zip(output_i_1.shape[2:], roi_size)
]
final_slicing: List[slice] = []
for sp in range(num_spatial_dims):
slice_dim = slice(pad_size[sp * 2], image_size_[num_spatial_dims - sp - 1] + pad_size[sp * 2])
slice_dim = slice(
int(round(slice_dim.start * zoom_scale[num_spatial_dims - sp - 1])),
int(round(slice_dim.stop * zoom_scale[num_spatial_dims - sp - 1])),
)
final_slicing.insert(0, slice_dim)
while len(final_slicing) < len(output_i_1.shape):
final_slicing.insert(0, slice(None))
output_image_list_1[ss] = output_i_1[final_slicing]
output_image_list_2[ss] = output_i_2[final_slicing]
if dict_key is not None: # if output of predictor is a dict
final_output_1 = dict(zip(dict_key, output_image_list_1))
final_output_2 = dict(zip(dict_key, output_image_list_2))
else:
final_output_1 = tuple(output_image_list_1) # type: ignore
final_output_2 = tuple(output_image_list_2) # type: ignore
final_output_1 = final_output_1[0] if is_tensor_output else final_output_1 # type: ignore
final_output_2 = final_output_2[0] if is_tensor_output else final_output_2 # type: ignore
return final_output_1, final_output_2
def one_hot(labels: torch.Tensor, num_classes: int, dtype: torch.dtype = torch.float, dim: int = 1) -> torch.Tensor:
"""
For every value v in `labels`, the value in the output will be either 1 or 0. Each vector along the `dim`-th
dimension has the "one-hot" format, i.e., it has a total length of `num_classes`,
with a one and `num_class-1` zeros.
Note that this will include the background label, thus a binary mask should be treated as having two classes.
Args:
labels: input tensor of integers to be converted into the 'one-hot' format. Internally `labels` will be
converted into integers `labels.long()`.
num_classes: number of output channels, the corresponding length of `labels[dim]` will be converted to
`num_classes` from `1`.
dtype: the data type of the output one_hot label.
dim: the dimension to be converted to `num_classes` channels from `1` channel, should be non-negative number.
Example:
For a tensor `labels` of dimensions [B]1[spatial_dims], return a tensor of dimensions `[B]N[spatial_dims]`
when `num_classes=N` number of classes and `dim=1`.
.. code-block:: python
from monai.networks.utils import one_hot
import torch
a = torch.randint(0, 2, size=(1, 2, 2, 2))
out = one_hot(a, num_classes=2, dim=0)
print(out.shape) # torch.Size([2, 2, 2, 2])
a = torch.randint(0, 2, size=(2, 1, 2, 2, 2))
out = one_hot(a, num_classes=2, dim=1)
print(out.shape) # torch.Size([2, 2, 2, 2, 2])
"""
# if `dim` is bigger, add singleton dim at the end
if labels.ndim < dim + 1:
shape = list(labels.shape) + [1] * (dim + 1 - len(labels.shape))
labels = torch.reshape(labels, shape)
sh = list(labels.shape)
if sh[dim] != 1:
raise AssertionError("labels should have a channel with length equal to one.")
sh[dim] = num_classes
o = torch.zeros(size=sh, dtype=dtype, device=labels.device)
labels = o.scatter_(dim=dim, index=labels.long(), value=1)
return labels
"""View operations."""
from typing import Sequence, Tuple
"""View operations.
Input format: [B, C, X, Y, Z, ...]
NOTE(meijieru): 0 is reserved for identify transform.
"""
from typing import Callable, Sequence, Union
import enum
import torch
RotateType = int
PermuteType = int
TransformFuncType = Callable[[torch.Tensor], torch.Tensor]
# A composition of multiple view transoforms.
TransformsType = Sequence[Union[PermuteType, RotateType]]
class GroupName(enum.Enum):
ROTATE = 1
PERMUTE = 2
DEFAULT_ORDER = (GroupName.ROTATE, GroupName.PERMUTE)
rotation_transforms = {
0: lambda x: x,
1: lambda x: x.rot90(1, (3, 4)),
2: lambda x: x.rot90(2, (3, 4)),
3: lambda x: x.rot90(3, (3, 4)),
}
rotation_inverse_transforms = {
0: lambda x: x,
1: lambda x: x.rot90(3, (3, 4)),
2: lambda x: x.rot90(2, (3, 4)),
3: lambda x: x.rot90(1, (3, 4)),
}
permutation_transforms = {
0: lambda x: x,
1: lambda x: x.permute(0, 1, 3, 2, 4),
2: lambda x: x.permute(0, 1, 4, 3, 2),
}
permutation_inverse_transforms = {
0: lambda x: x,
1: lambda x: x.permute(0, 1, 3, 2, 4),
2: lambda x: x.permute(0, 1, 4, 3, 2),
}
all_forward_transforms = {
GroupName.ROTATE: rotation_transforms,
GroupName.PERMUTE: permutation_transforms,
}
all_backward_transforms = {
GroupName.ROTATE: rotation_inverse_transforms,
GroupName.PERMUTE: permutation_inverse_transforms,
}
def get_transforms_func(views: TransformsType,
orders: Sequence[GroupName] = DEFAULT_ORDER,
inverse: bool = False) -> TransformFuncType:
"""Gets sequential transform functions."""
if len(views) != len(orders):
raise ValueError()
all_transforms = (all_forward_transforms
if not inverse else all_backward_transforms)
funcs = [
all_transforms[group_name][view]
for view, group_name in zip(views, orders)
]
funcs = funcs if not inverse else funcs[::-1]
def aux(val):
for func in funcs:
val = func(val)
return val
return aux
import torch
import numpy as np
def get_permute_transform(view_src: PermuteType,
view_dst: PermuteType) -> TransformFuncType:
"""Gets transform function from view src to view dst."""
def transform(x: torch.Tensor) -> torch.Tensor:
x_view_0 = view_transforms.permutation_inverse_transforms[view_src](x)
return view_transforms.permutation_transforms[view_dst](
x_view_0).contiguous()
return transform
def permute_inverse(xs: Sequence[torch.Tensor],
views: Sequence[PermuteType]) -> Sequence[torch.Tensor]:
"""Transforms data back to origin view."""
return [get_permute_transform(view, 0)(x) for x, view in zip(xs, views)]
def permute_rand(
x: torch.Tensor,
num_samples: int = 2
) -> Tuple[Sequence[torch.Tensor], Sequence[PermuteType]]:
"""Samples different transforms of data."""
num_permutes = len(view_transforms.permutation_transforms)
if num_samples > num_permutes:
raise ValueError('Duplicate samples.')
view_dsts = np.random.permutation(num_permutes)[:num_samples].tolist()
return [get_permute_transform(0, view)(x) for view in view_dsts], view_dsts
================================================
FILE: Finetune/MM-WHS/main.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import logging
import numpy as np
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import torch.nn.parallel
import torch.utils.data.distributed
from optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR
from trainer import run_training
from utils.data_utils import get_loader
from monai.inferers import sliding_window_inference
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
os.environ['CUDA_VISIBLE_DEVICES'] = "1"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument("--checkpoint", default=None, help="start training from saved checkpoint")
parser.add_argument("--logdir", default="logs", type=str, help="directory to save the tensorboard logs")
parser.add_argument(
"--pretrained_dir", default="./pretrained_models/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="/data/jiaxin/data/MM-WHS/ct_train/", type=str, help="dataset directory")
parser.add_argument("--json_list", default="./dataset.json", type=str, help="dataset json file")
parser.add_argument(
"--pretrained_checkpoint",default="VoCo_10k.pt", type=str, help="VoCo_10k pretrained model")
parser.add_argument(
"--pretrained_model_name",
default="model_bestVal.pt",
type=str,
help="pretrained model name",
)
roi = 64
parser.add_argument("--save_checkpoint", default=True, help="save checkpoint during training")
parser.add_argument("--max_epochs", default=1000, type=int, help="max number of training epochs")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=4, type=int, help="number of sliding window batch size")
parser.add_argument("--optim_lr", default=1e-3, type=float, help="optimization learning rate")
parser.add_argument("--optim_name", default="adamw", type=str, help="optimization algorithm")
parser.add_argument("--reg_weight", default=1e-5, type=float, help="regularization weight")
parser.add_argument("--momentum", default=0.99, type=float, help="momentum")
parser.add_argument("--noamp", default=True, help="do NOT use amp for training")
parser.add_argument("--val_every", default=50, type=int, help="validation frequency")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--world_size", default=1, type=int, help="number of nodes for distributed training")
parser.add_argument("--rank", default=0, type=int, help="node rank for distributed training")
parser.add_argument("--dist-url", default="tcp://127.0.0.1:23456", type=str, help="distributed url")
parser.add_argument("--dist-backend", default="nccl", type=str, help="distributed backend")
parser.add_argument("--norm_name", default="instance", type=str, help="normalization name")
parser.add_argument("--workers", default=4, type=int, help="number of workers")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=8, type=int, help="number of output channels")
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--a_min", default=0.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=1700.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--lrschedule", default="warmup_cosine", type=str, help="type of learning rate scheduler")
parser.add_argument("--warmup_epochs", default=50, type=int, help="number of warmup epochs")
parser.add_argument("--resume_ckpt", action="store_true", help="resume training from pretrained checkpoint")
parser.add_argument("--smooth_dr", default=1e-6, type=float, help="constant added to dice denominator to avoid nan")
parser.add_argument("--smooth_nr", default=0.0, type=float, help="constant added to dice numerator to avoid zero")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
parser.add_argument("--use_ssl_pretrained", default=True, help="use self-supervised pretrained weights")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--squared_dice", action="store_true", help="use squared Dice")
def main():
args = parser.parse_args()
args.amp = not args.noamp
if args.distributed:
args.ngpus_per_node = torch.cuda.device_count()
print("Found total gpus", args.ngpus_per_node)
args.world_size = args.ngpus_per_node * args.world_size
mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))
else:
main_worker(gpu=0, args=args)
def main_worker(gpu, args):
if args.distributed:
torch.multiprocessing.set_start_method("fork", force=True)
np.set_printoptions(formatter={"float": "{: 0.3f}".format}, suppress=True)
args.gpu = gpu
if args.distributed:
args.rank = args.rank * args.ngpus_per_node + gpu
dist.init_process_group(
backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank
)
torch.cuda.set_device(args.gpu)
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
args.test_mode = False
loader = get_loader(args)
print(args.rank, " gpu", args.gpu)
if args.rank == 0:
print("Batch size is:", args.batch_size, "epochs", args.max_epochs)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
if args.rank == 0:
os.makedirs(args.logdir, exist_ok=True)
logger = init_log('global', logging.INFO)
logger.propagate = 0
pretrained_dir = args.pretrained_dir
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=args.dropout_path_rate,
use_checkpoint=args.use_checkpoint,
)
if args.resume_ckpt:
model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))["state_dict"]
model.load_state_dict(model_dict)
print("Use pretrained weights")
if args.use_ssl_pretrained:
try:
# model_VoCoEMA.pt
# model_dict = torch.load("./pretrained_models/supervised_suprem_swinunetr_2100.pth", map_location=torch.device('cpu'))
model_dict = torch.load(args.pretrained_checkpoint,
map_location=torch.device('cpu'))
state_dict = model_dict
# state_dict = model_dict['net']
# fix potential differences in state dict keys from pre-training to
# fine-tuning
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
# We now load model weights, setting param `strict` to False, i.e.:
# this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves
# the decoder weights untouched (CNN UNet decoder).
model.load_state_dict(state_dict, strict=False)
print("Using pretrained voco ema self-supervised Swin UNETR backbone weights !")
except ValueError:
raise ValueError("Self-supervised pre-trained weights not available for" + str(args.model_name))
if args.squared_dice:
dice_loss = DiceCELoss(
to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr
)
else:
dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)
post_label = AsDiscrete(to_onehot=args.out_channels)
post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)
dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("Total parameters count", pytorch_total_params)
best_acc = 0
start_epoch = 0
if args.checkpoint is not None:
checkpoint = torch.load(args.checkpoint, map_location="cpu")
from collections import OrderedDict
new_state_dict = OrderedDict()
for k, v in checkpoint["state_dict"].items():
new_state_dict[k.replace("backbone.", "")] = v
model.load_state_dict(new_state_dict, strict=False)
if "epoch" in checkpoint:
start_epoch = checkpoint["epoch"]
if "best_acc" in checkpoint:
best_acc = checkpoint["best_acc"]
print("=> loaded checkpoint '{}' (epoch {}) (bestacc {})".format(args.checkpoint, start_epoch, best_acc))
model.cuda(args.gpu)
if args.distributed:
torch.cuda.set_device(args.gpu)
if args.norm_name == "batch":
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
model.cuda(args.gpu)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)
if args.optim_name == "adam":
optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
elif args.optim_name == "adamw":
optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)
elif args.optim_name == "sgd":
optimizer = torch.optim.SGD(
model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight
)
else:
raise ValueError("Unsupported Optimization Procedure: " + str(args.optim_name))
if args.lrschedule == "warmup_cosine":
scheduler = LinearWarmupCosineAnnealingLR(
optimizer, warmup_epochs=args.warmup_epochs, max_epochs=args.max_epochs
)
elif args.lrschedule == "cosine_anneal":
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)
if args.checkpoint is not None:
scheduler.step(epoch=start_epoch)
else:
scheduler = None
accuracy = run_training(
model=model,
train_loader=loader[0],
val_loader=loader[1],
optimizer=optimizer,
loss_func=dice_loss,
acc_func=dice_acc,
args=args,
model_inferer=model_inferer,
scheduler=scheduler,
start_epoch=start_epoch,
post_label=post_label,
post_pred=post_pred,
)
return accuracy
logs = set()
def init_log(name, level=logging.INFO):
if (name, level) in logs:
return
logs.add((name, level))
logger = logging.getLogger(name)
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
if "SLURM_PROCID" in os.environ:
rank = int(os.environ["SLURM_PROCID"])
logger.addFilter(lambda record: rank == 0)
else:
rank = 0
format_str = "[%(asctime)s][%(levelname)8s] %(message)s"
formatter = logging.Formatter(format_str)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
if __name__ == "__main__":
main()
================================================
FILE: Finetune/MM-WHS/optimizers/__init__.py
================================================
================================================
FILE: Finetune/MM-WHS/optimizers/lr_scheduler.py
================================================
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import warnings
from typing import List
from torch import nn as nn
from torch.optim import Adam, Optimizer
from torch.optim.lr_scheduler import LambdaLR, _LRScheduler
__all__ = ["LinearLR", "ExponentialLR"]
class _LRSchedulerMONAI(_LRScheduler):
"""Base class for increasing the learning rate between two boundaries over a number
of iterations"""
def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:
"""
Args:
optimizer: wrapped optimizer.
end_lr: the final learning rate.
num_iter: the number of iterations over which the test occurs.
last_epoch: the index of last epoch.
Returns:
None
"""
self.end_lr = end_lr
self.num_iter = num_iter
super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)
class LinearLR(_LRSchedulerMONAI):
"""Linearly increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]
class ExponentialLR(_LRSchedulerMONAI):
"""Exponentially increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]
class WarmupCosineSchedule(LambdaLR):
"""Linear warmup and then cosine decay.
Based on https://huggingface.co/ implementation.
"""
def __init__(
self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1
) -> None:
"""
Args:
optimizer: wrapped optimizer.
warmup_steps: number of warmup iterations.
t_total: total number of training iterations.
cycles: cosine cycles parameter.
last_epoch: the index of last epoch.
Returns:
None
"""
self.warmup_steps = warmup_steps
self.t_total = t_total
self.cycles = cycles
super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
class LinearWarmupCosineAnnealingLR(_LRScheduler):
def __init__(
self,
optimizer: Optimizer,
warmup_epochs: int,
max_epochs: int,
warmup_start_lr: float = 0.0,
eta_min: float = 0.0,
last_epoch: int = -1,
) -> None:
"""
Args:
optimizer (Optimizer): Wrapped optimizer.
warmup_epochs (int): Maximum number of iterations for linear warmup
max_epochs (int): Maximum number of iterations
warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.
eta_min (float): Minimum learning rate. Default: 0.
last_epoch (int): The index of last epoch. Default: -1.
"""
self.warmup_epochs = warmup_epochs
self.max_epochs = max_epochs
self.warmup_start_lr = warmup_start_lr
self.eta_min = eta_min
super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)
def get_lr(self) -> List[float]:
"""
Compute learning rate using chainable form of the scheduler
"""
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
if self.last_epoch == 0:
return [self.warmup_start_lr] * len(self.base_lrs)
elif self.last_epoch < self.warmup_epochs:
return [
group["lr"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
elif self.last_epoch == self.warmup_epochs:
return self.base_lrs
elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:
return [
group["lr"]
+ (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
return [
(1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
/ (
1
+ math.cos(
math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)
)
)
* (group["lr"] - self.eta_min)
+ self.eta_min
for group in self.optimizer.param_groups
]
def _get_closed_form_lr(self) -> List[float]:
"""
Called when epoch is passed as a param to the `step` function of the scheduler.
"""
if self.last_epoch < self.warmup_epochs:
return [
self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr in self.base_lrs
]
return [
self.eta_min
+ 0.5
* (base_lr - self.eta_min)
* (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
for base_lr in self.base_lrs
]
================================================
FILE: Finetune/MM-WHS/pretrained_models/__init__.py
================================================
================================================
FILE: Finetune/MM-WHS/test.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import nibabel as nib
import numpy as np
import torch
from torch.cuda.amp import GradScaler, autocast
from utils.data_utils import get_loader
from utils.utils import dice, resample_3d
from utils.utils import AverageMeter, distributed_all_gather
from monai.inferers import sliding_window_inference
from monai.data import decollate_batch
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR
from monai.transforms import *
from monai.utils.enums import MetricReduction
from monai import data, transforms
from monai.data import *
from utils.utils import *
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
os.environ['CUDA_VISIBLE_DEVICES'] = "2"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument(
"--pretrained_dir", default="./runs/logs_0.9054/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data_dir", default="/data/jiaxin/data/MM-WHS/ct_train/", type=str, help="dataset directory")
parser.add_argument("--exp_name", default="MMWHS", type=str, help="experiment name")
parser.add_argument(
"--trained_pth", default="./runs/logs/model.pt", type=str, help="your trained checkpoint directory")
parser.add_argument(
"--save_prediction_path", default="./pred/MM-WHS/", type=str, help="test_prediction_path")
parser.add_argument(
"--pretrained_model_name",
default="model_0.9054.pt",
type=str,
help="pretrained model name",
)
roi=64
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=2, type=int, help="number of sliding window batch size")
parser.add_argument("--infer_overlap", default=0.7, type=float, help="sliding window inference overlap")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=8, type=int, help="number of output channels")
parser.add_argument("--a_min", default=-1000.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=1000.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.1, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
def get_test_loader(args):
"""
Creates training transforms, constructs a dataset, and returns a dataloader.
Args:
args: Command line arguments containing dataset paths and hyperparameters.
"""
test_transforms = transforms.Compose([
LoadImaged(keys=["image"]),
EnsureChannelFirstd(keys=["image"]),
Orientationd(keys=["image"], axcodes="RAS"),
Spacingd(keys=["image"], pixdim=(args.space_x, args.space_y, args.space_z),
mode=("bilinear")),
ScaleIntensityRanged(
keys=["image"],
a_min=args.a_min,
a_max=args.a_max,
b_min=0.0,
b_max=1.0,
clip=True,
),
CropForegroundd(keys=["image"], source_key="image"),
SpatialPadd(keys=["image"], spatial_size=(args.roi_x, args.roi_y, args.roi_z),
mode='constant'),
])
# constructing training dataset
test_img = []
test_name = []
dataset_list = os.listdir(args.data_dir)
check_dir(args.save_prediction_path)
already_exist_list = os.listdir(args.save_prediction_path)
for item in dataset_list:
if item not in already_exist_list and item.endswith('_image.nii.gz'):
name = item
test_img_path = os.path.join(args.data_dir, name)
test_img.append(test_img_path)
test_name.append(name)
data_dicts_test = [{'image': image, 'name': name}
for image, name in zip(test_img, test_name)]
print('test len {}'.format(len(data_dicts_test)))
test_ds = Dataset(data=data_dicts_test, transform=test_transforms)
test_loader = DataLoader(
test_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=None, pin_memory=True
)
return test_loader, test_transforms
def main():
args = parser.parse_args()
test_loader, test_transforms = get_test_loader(args)
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=0.0,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model_dict = torch.load(args.trained_pth)["state_dict"]
model.load_state_dict(model_dict, strict=True)
model.eval()
model.to(device)
# enable cuDNN benchmark
torch.backends.cudnn.benchmark = True
post_transforms = Compose([EnsureTyped(keys=["pred"]),
Invertd(keys=["pred"],
transform=test_transforms,
orig_keys="image",
meta_keys="pred_meta_dict",
orig_meta_keys="image_meta_dict",
meta_key_postfix="meta_dict",
nearest_interp=True,
to_tensor=True),
AsDiscreted(keys="pred", argmax=False, to_onehot=None),
SaveImaged(keys="pred", meta_keys="pred_meta_dict", output_dir=args.save_prediction_path,
separate_folder=False, folder_layout=None,
resample=False),
])
with torch.no_grad():
for idx, batch_data in enumerate(test_loader):
torch.cuda.empty_cache()
data = batch_data["image"]
data = data.cuda()
name = batch_data['name'][0]
with autocast(enabled=True):
logits = model_inferer(data)
logits = logits.argmax(1)
output = logits
print(torch.unique(output))
batch_data['pred'] = output.unsqueeze(1)
batch_data = [post_transforms(i) for i in
decollate_batch(batch_data)]
os.rename(os.path.join(args.save_prediction_path, name[:-7]+'_trans.nii.gz'),
os.path.join(args.save_prediction_path, name))
if __name__ == "__main__":
main()
================================================
FILE: Finetune/MM-WHS/train.sh
================================================
now=$(date +"%Y%m%d_%H%M%S")
logdir=runs/logs
mkdir -p $logdir
torchrun --master_port=21120 --max-restart=10 main.py \
--logdir $logdir | tee $logdir/$now.txt
================================================
FILE: Finetune/MM-WHS/trainer.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import time
import numpy as np
import torch
import torch.nn.parallel
import torch.utils.data.distributed
from tensorboardX import SummaryWriter
from torch.cuda.amp import GradScaler, autocast
from utils.utils import AverageMeter, distributed_all_gather
from monai.data import decollate_batch
def train_epoch(model, loader, optimizer, scaler, epoch, loss_func, args):
model.train()
start_time = time.time()
run_loss = AverageMeter()
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
for param in model.parameters():
param.grad = None
with autocast(enabled=args.amp):
logits = model(data)
loss = loss_func(logits, target)
#
if args.amp:
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
else:
loss.backward()
optimizer.step()
if args.distributed:
loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)
run_loss.update(
np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size
)
else:
run_loss.update(loss.item(), n=args.batch_size)
lr = optimizer.param_groups[0]["lr"]
if args.rank == 0:
print(
"Epoch {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"loss: {:.4f}".format(run_loss.avg),
"lr: {:.8f}".format(lr),
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
for param in model.parameters():
param.grad = None
return run_loss.avg
def val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):
model.eval()
run_acc = AverageMeter()
start_time = time.time()
with torch.no_grad():
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
with autocast(enabled=args.amp):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
if not logits.is_cuda:
target = target.cpu()
val_labels_list = decollate_batch(target)
val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]
val_outputs_list = decollate_batch(logits)
val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]
acc_func.reset()
acc_func(y_pred=val_output_convert, y=val_labels_convert)
acc, not_nans = acc_func.aggregate()
acc = acc.cuda(args.rank)
if args.distributed:
acc_list, not_nans_list = distributed_all_gather(
[acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length
)
for al, nl in zip(acc_list, not_nans_list):
run_acc.update(al, n=nl)
else:
run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())
if args.rank == 0:
avg_acc = np.mean(run_acc.avg)
print(
"Val {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"acc",
avg_acc,
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
return run_acc.avg
def save_checkpoint(model, epoch, args, filename="model.pt", best_acc=0, optimizer=None, scheduler=None):
state_dict = model.state_dict() if not args.distributed else model.module.state_dict()
save_dict = {"epoch": epoch, "best_acc": best_acc, "state_dict": state_dict}
if optimizer is not None:
save_dict["optimizer"] = optimizer.state_dict()
if scheduler is not None:
save_dict["scheduler"] = scheduler.state_dict()
filename = os.path.join(args.logdir, filename)
torch.save(save_dict, filename)
print("Saving checkpoint", filename)
def run_training(
model,
train_loader,
val_loader,
optimizer,
loss_func,
acc_func,
args,
model_inferer=None,
scheduler=None,
start_epoch=0,
post_label=None,
post_pred=None,
):
writer = None
if args.logdir is not None and args.rank == 0:
writer = SummaryWriter(log_dir=args.logdir)
if args.rank == 0:
print("Writing Tensorboard logs to ", args.logdir)
scaler = None
if args.amp:
scaler = GradScaler()
val_acc_max = 0.0
for epoch in range(start_epoch, args.max_epochs):
if args.distributed:
train_loader.sampler.set_epoch(epoch)
torch.distributed.barrier()
print(args.rank, time.ctime(), "Epoch:", epoch)
epoch_time = time.time()
train_loss = train_epoch(
model, train_loader, optimizer, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args
)
if args.rank == 0:
print(
"Final training {}/{}".format(epoch, args.max_epochs - 1),
"loss: {:.4f}".format(train_loss),
"time {:.2f}s".format(time.time() - epoch_time),
)
if args.rank == 0 and writer is not None:
writer.add_scalar("train_loss", train_loss, epoch)
b_new_best = False
if (epoch + 1) % args.val_every == 0:
if args.distributed:
torch.distributed.barrier()
epoch_time = time.time()
val_avg_acc = val_epoch(
model,
val_loader,
epoch=epoch,
acc_func=acc_func,
model_inferer=model_inferer,
args=args,
post_label=post_label,
post_pred=post_pred,
)
val_avg_acc = np.mean(val_avg_acc)
if args.rank == 0:
print(
"Final validation {}/{}".format(epoch, args.max_epochs - 1),
"acc",
val_avg_acc,
"time {:.2f}s".format(time.time() - epoch_time),
)
if writer is not None:
writer.add_scalar("val_acc", val_avg_acc, epoch)
if val_avg_acc > val_acc_max:
print("new best ({:.6f} --> {:.6f}). ".format(val_acc_max, val_avg_acc))
val_acc_max = val_avg_acc
b_new_best = True
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(
model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler
)
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename="model_final.pt")
if b_new_best:
print("Copying to model.pt new best model!!!!")
shutil.copyfile(os.path.join(args.logdir, "model_final.pt"), os.path.join(args.logdir, "model.pt"))
if scheduler is not None:
scheduler.step()
print("Training Finished !, Best Accuracy: ", val_acc_max)
return val_acc_max
================================================
FILE: Finetune/MM-WHS/utils/__init__.py
================================================
================================================
FILE: Finetune/MM-WHS/utils/data_utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import pickle
import numpy as np
import torch
import itertools as it
from monai import data, transforms
from monai.data import *
from monai.transforms import *
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank: self.total_size: self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank: self.total_size: self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
def get_loader(args):
data_dir = args.data_dir
datalist_json = args.json_list
train_transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
Convert_WHS_label(keys="label"),
transforms.RandCropByPosNegLabeld(
keys=["image", "label"],
label_key="label",
spatial_size=(args.roi_x, args.roi_y, args.roi_z),
pos=9,
neg=1,
num_samples=args.sw_batch_size,
image_key="image",
image_threshold=0,
),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=0),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=1),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=2),
transforms.RandRotate90d(keys=["image", "label"], prob=args.RandRotate90d_prob, max_k=3),
transforms.ToTensord(keys=["image", "label"]),
]
)
val_transform = transforms.Compose(
[
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
Convert_WHS_label(keys="label"),
transforms.ToTensord(keys=["image", "label"]),
]
)
datalist = load_decathlon_datalist(datalist_json, True, "training", base_dir=data_dir)
print('use persistent')
train_ds = PersistentDataset(data=datalist,
transform=train_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/MM-WHS')
# train_ds = data.Dataset(data=datalist, transform=train_transform)
train_sampler = Sampler(train_ds) if args.distributed else None
train_loader = data.DataLoader(
train_ds,
batch_size=args.batch_size,
shuffle=(train_sampler is None),
num_workers=args.workers,
sampler=train_sampler,
pin_memory=True,
)
val_files = load_decathlon_datalist(datalist_json, True, "validation", base_dir=data_dir)
# val_ds = data.Dataset(data=val_files, transform=val_transform)
val_ds = PersistentDataset(data=val_files,
transform=val_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/MM-WHS')
val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None
val_loader = data.DataLoader(
val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=False
)
loader = [train_loader, val_loader]
return loader
class Convert_WHS_label(MapTransform):
def __call__(self, data):
d = dict(data)
for key in self.keys:
out = d[key].clone()
out[d[key] == 205] = 1
out[d[key] == 420] = 2
out[d[key] == 500] = 3
out[d[key] == 550] = 4
out[d[key] == 600] = 5
out[d[key] == 820] = 6
out[d[key] == 850] = 7
d[key] = out.float()
return d
================================================
FILE: Finetune/MM-WHS/utils/utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import scipy.ndimage as ndimage
import torch
import os
def resample_3d(img, target_size):
imx, imy, imz = img.shape
tx, ty, tz = target_size
zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))
img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)
return img_resampled
def dice(x, y):
intersect = np.sum(np.sum(np.sum(x * y)))
y_sum = np.sum(np.sum(np.sum(y)))
if y_sum == 0:
return 0.0
x_sum = np.sum(np.sum(np.sum(x)))
return 2 * intersect / (x_sum + y_sum)
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)
def distributed_all_gather(
tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None
):
if world_size is None:
world_size = torch.distributed.get_world_size()
if valid_batch_size is not None:
valid_batch_size = min(valid_batch_size, world_size)
elif is_valid is not None:
is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)
if not no_barrier:
torch.distributed.barrier()
tensor_list_out = []
with torch.no_grad():
if is_valid is not None:
is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]
torch.distributed.all_gather(is_valid_list, is_valid)
is_valid = [x.item() for x in is_valid_list]
for tensor in tensor_list:
gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]
torch.distributed.all_gather(gather_list, tensor)
if valid_batch_size is not None:
gather_list = gather_list[:valid_batch_size]
elif is_valid is not None:
gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]
if out_numpy:
gather_list = [t.cpu().numpy() for t in gather_list]
tensor_list_out.append(gather_list)
return tensor_list_out
def color_map(dataset='pascal'):
cmap = np.zeros((256, 3), dtype='uint8')
if dataset == 'pascal' or dataset == 'coco':
def bitget(byteval, idx):
return (byteval & (1 << idx)) != 0
for i in range(256):
r = g = b = 0
c = i
for j in range(8):
r = r | (bitget(c, 0) << 7-j)
g = g | (bitget(c, 1) << 7-j)
b = b | (bitget(c, 2) << 7-j)
c = c >> 3
cmap[i] = np.array([r, g, b])
elif dataset == 'cityscapes':
cmap[0] = np.array([128, 64, 128])
cmap[1] = np.array([244, 35, 232])
cmap[2] = np.array([70, 70, 70])
cmap[3] = np.array([102, 102, 156])
cmap[4] = np.array([190, 153, 153])
cmap[5] = np.array([153, 153, 153])
cmap[6] = np.array([250, 170, 30])
cmap[7] = np.array([220, 220, 0])
cmap[8] = np.array([107, 142, 35])
cmap[9] = np.array([152, 251, 152])
cmap[10] = np.array([70, 130, 180])
cmap[11] = np.array([220, 20, 60])
cmap[12] = np.array([255, 0, 0])
cmap[13] = np.array([0, 0, 142])
cmap[14] = np.array([0, 0, 70])
cmap[15] = np.array([0, 60, 100])
cmap[16] = np.array([0, 80, 100])
cmap[17] = np.array([0, 0, 230])
cmap[18] = np.array([119, 11, 32])
cmap[19] = np.array([0, 0, 0])
cmap[255] = np.array([0, 0, 0])
return cmap
def check_dir(dir):
if not os.path.exists(dir):
os.makedirs(dir)
def load(model, model_dict):
if "state_dict" in model_dict.keys():
state_dict = model_dict["state_dict"]
elif "network_weights" in model_dict.keys():
state_dict = model_dict["network_weights"]
elif "net" in model_dict.keys():
state_dict = model_dict["net"]
else:
state_dict = model_dict
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "backbone." in list(state_dict.keys())[0]:
print("Tag 'backbone.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("backbone.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
current_model_dict = model.state_dict()
new_state_dict = {
k: state_dict[k] if (k in state_dict.keys()) and (state_dict[k].size() == current_model_dict[k].size()) else current_model_dict[k]
for k in current_model_dict.keys()}
model.load_state_dict(new_state_dict, strict=True)
print("Using VoCo pretrained backbone weights !!!!!!!")
return model
================================================
FILE: Finetune/Word/dataset/__init__.py
================================================
================================================
FILE: Finetune/Word/dataset/dataset_word.json
================================================
{
"name": "WORD-V0.1.0",
"description": "Whole abdomen ORgan segmentation Dataset (WORD), just for research use !!!",
"reference": "WORD: Revisiting Organs Segmentation in the Whole Abdominal Region, link:https://arxiv.org/pdf/2111.02403.pdf, https://github.com/HiLab-git/WORD",
"licence": "GNU General Public License v3.0",
"release": "v0.1.0 10/11/2021",
"tensorImageSize": "3D",
"modality": {
"0": "CT"
},
"labels": {
"0": "background",
"1": "liver",
"2": "spleen",
"3": "left_kidney",
"4": "right_kidney",
"5": "stomach",
"6": "gallbladder",
"7": "esophagus",
"8": "pancreas",
"9": "duodenum",
"10": "colon",
"11": "intestine",
"12": "adrenal",
"13": "rectum",
"14": "bladder",
"15": "Head_of_femur_L",
"16": "Head_of_femur_R"
},
"numTraining": 100,
"numValidation": 20,
"numTest": 30,
"training": [
{
"image": "./imagesTr/word_0096.nii.gz",
"label": "./labelsTr/word_0096.nii.gz"
},
{
"image": "./imagesTr/word_0010.nii.gz",
"label": "./labelsTr/word_0010.nii.gz"
},
{
"image": "./imagesTr/word_0078.nii.gz",
"label": "./labelsTr/word_0078.nii.gz"
},
{
"image": "./imagesTr/word_0109.nii.gz",
"label": "./labelsTr/word_0109.nii.gz"
},
{
"image": "./imagesTr/word_0051.nii.gz",
"label": "./labelsTr/word_0051.nii.gz"
},
{
"image": "./imagesTr/word_0067.nii.gz",
"label": "./labelsTr/word_0067.nii.gz"
},
{
"image": "./imagesTr/word_0107.nii.gz",
"label": "./labelsTr/word_0107.nii.gz"
},
{
"image": "./imagesTr/word_0105.nii.gz",
"label": "./labelsTr/word_0105.nii.gz"
},
{
"image": "./imagesTr/word_0065.nii.gz",
"label": "./labelsTr/word_0065.nii.gz"
},
{
"image": "./imagesTr/word_0144.nii.gz",
"label": "./labelsTr/word_0144.nii.gz"
},
{
"image": "./imagesTr/word_0118.nii.gz",
"label": "./labelsTr/word_0118.nii.gz"
},
{
"image": "./imagesTr/word_0140.nii.gz",
"label": "./labelsTr/word_0140.nii.gz"
},
{
"image": "./imagesTr/word_0002.nii.gz",
"label": "./labelsTr/word_0002.nii.gz"
},
{
"image": "./imagesTr/word_0091.nii.gz",
"label": "./labelsTr/word_0091.nii.gz"
},
{
"image": "./imagesTr/word_0009.nii.gz",
"label": "./labelsTr/word_0009.nii.gz"
},
{
"image": "./imagesTr/word_0100.nii.gz",
"label": "./labelsTr/word_0100.nii.gz"
},
{
"image": "./imagesTr/word_0032.nii.gz",
"label": "./labelsTr/word_0032.nii.gz"
},
{
"image": "./imagesTr/word_0040.nii.gz",
"label": "./labelsTr/word_0040.nii.gz"
},
{
"image": "./imagesTr/word_0130.nii.gz",
"label": "./labelsTr/word_0130.nii.gz"
},
{
"image": "./imagesTr/word_0101.nii.gz",
"label": "./labelsTr/word_0101.nii.gz"
},
{
"image": "./imagesTr/word_0018.nii.gz",
"label": "./labelsTr/word_0018.nii.gz"
},
{
"image": "./imagesTr/word_0090.nii.gz",
"label": "./labelsTr/word_0090.nii.gz"
},
{
"image": "./imagesTr/word_0071.nii.gz",
"label": "./labelsTr/word_0071.nii.gz"
},
{
"image": "./imagesTr/word_0042.nii.gz",
"label": "./labelsTr/word_0042.nii.gz"
},
{
"image": "./imagesTr/word_0126.nii.gz",
"label": "./labelsTr/word_0126.nii.gz"
},
{
"image": "./imagesTr/word_0135.nii.gz",
"label": "./labelsTr/word_0135.nii.gz"
},
{
"image": "./imagesTr/word_0138.nii.gz",
"label": "./labelsTr/word_0138.nii.gz"
},
{
"image": "./imagesTr/word_0116.nii.gz",
"label": "./labelsTr/word_0116.nii.gz"
},
{
"image": "./imagesTr/word_0070.nii.gz",
"label": "./labelsTr/word_0070.nii.gz"
},
{
"image": "./imagesTr/word_0084.nii.gz",
"label": "./labelsTr/word_0084.nii.gz"
},
{
"image": "./imagesTr/word_0056.nii.gz",
"label": "./labelsTr/word_0056.nii.gz"
},
{
"image": "./imagesTr/word_0148.nii.gz",
"label": "./labelsTr/word_0148.nii.gz"
},
{
"image": "./imagesTr/word_0132.nii.gz",
"label": "./labelsTr/word_0132.nii.gz"
},
{
"image": "./imagesTr/word_0102.nii.gz",
"label": "./labelsTr/word_0102.nii.gz"
},
{
"image": "./imagesTr/word_0082.nii.gz",
"label": "./labelsTr/word_0082.nii.gz"
},
{
"image": "./imagesTr/word_0062.nii.gz",
"label": "./labelsTr/word_0062.nii.gz"
},
{
"image": "./imagesTr/word_0073.nii.gz",
"label": "./labelsTr/word_0073.nii.gz"
},
{
"image": "./imagesTr/word_0046.nii.gz",
"label": "./labelsTr/word_0046.nii.gz"
},
{
"image": "./imagesTr/word_0146.nii.gz",
"label": "./labelsTr/word_0146.nii.gz"
},
{
"image": "./imagesTr/word_0113.nii.gz",
"label": "./labelsTr/word_0113.nii.gz"
},
{
"image": "./imagesTr/word_0006.nii.gz",
"label": "./labelsTr/word_0006.nii.gz"
},
{
"image": "./imagesTr/word_0127.nii.gz",
"label": "./labelsTr/word_0127.nii.gz"
},
{
"image": "./imagesTr/word_0095.nii.gz",
"label": "./labelsTr/word_0095.nii.gz"
},
{
"image": "./imagesTr/word_0058.nii.gz",
"label": "./labelsTr/word_0058.nii.gz"
},
{
"image": "./imagesTr/word_0128.nii.gz",
"label": "./labelsTr/word_0128.nii.gz"
},
{
"image": "./imagesTr/word_0111.nii.gz",
"label": "./labelsTr/word_0111.nii.gz"
},
{
"image": "./imagesTr/word_0049.nii.gz",
"label": "./labelsTr/word_0049.nii.gz"
},
{
"image": "./imagesTr/word_0029.nii.gz",
"label": "./labelsTr/word_0029.nii.gz"
},
{
"image": "./imagesTr/word_0086.nii.gz",
"label": "./labelsTr/word_0086.nii.gz"
},
{
"image": "./imagesTr/word_0123.nii.gz",
"label": "./labelsTr/word_0123.nii.gz"
},
{
"image": "./imagesTr/word_0011.nii.gz",
"label": "./labelsTr/word_0011.nii.gz"
},
{
"image": "./imagesTr/word_0005.nii.gz",
"label": "./labelsTr/word_0005.nii.gz"
},
{
"image": "./imagesTr/word_0036.nii.gz",
"label": "./labelsTr/word_0036.nii.gz"
},
{
"image": "./imagesTr/word_0114.nii.gz",
"label": "./labelsTr/word_0114.nii.gz"
},
{
"image": "./imagesTr/word_0145.nii.gz",
"label": "./labelsTr/word_0145.nii.gz"
},
{
"image": "./imagesTr/word_0136.nii.gz",
"label": "./labelsTr/word_0136.nii.gz"
},
{
"image": "./imagesTr/word_0055.nii.gz",
"label": "./labelsTr/word_0055.nii.gz"
},
{
"image": "./imagesTr/word_0047.nii.gz",
"label": "./labelsTr/word_0047.nii.gz"
},
{
"image": "./imagesTr/word_0093.nii.gz",
"label": "./labelsTr/word_0093.nii.gz"
},
{
"image": "./imagesTr/word_0026.nii.gz",
"label": "./labelsTr/word_0026.nii.gz"
},
{
"image": "./imagesTr/word_0044.nii.gz",
"label": "./labelsTr/word_0044.nii.gz"
},
{
"image": "./imagesTr/word_0061.nii.gz",
"label": "./labelsTr/word_0061.nii.gz"
},
{
"image": "./imagesTr/word_0125.nii.gz",
"label": "./labelsTr/word_0125.nii.gz"
},
{
"image": "./imagesTr/word_0064.nii.gz",
"label": "./labelsTr/word_0064.nii.gz"
},
{
"image": "./imagesTr/word_0087.nii.gz",
"label": "./labelsTr/word_0087.nii.gz"
},
{
"image": "./imagesTr/word_0013.nii.gz",
"label": "./labelsTr/word_0013.nii.gz"
},
{
"image": "./imagesTr/word_0104.nii.gz",
"label": "./labelsTr/word_0104.nii.gz"
},
{
"image": "./imagesTr/word_0008.nii.gz",
"label": "./labelsTr/word_0008.nii.gz"
},
{
"image": "./imagesTr/word_0079.nii.gz",
"label": "./labelsTr/word_0079.nii.gz"
},
{
"image": "./imagesTr/word_0030.nii.gz",
"label": "./labelsTr/word_0030.nii.gz"
},
{
"image": "./imagesTr/word_0094.nii.gz",
"label": "./labelsTr/word_0094.nii.gz"
},
{
"image": "./imagesTr/word_0022.nii.gz",
"label": "./labelsTr/word_0022.nii.gz"
},
{
"image": "./imagesTr/word_0134.nii.gz",
"label": "./labelsTr/word_0134.nii.gz"
},
{
"image": "./imagesTr/word_0063.nii.gz",
"label": "./labelsTr/word_0063.nii.gz"
},
{
"image": "./imagesTr/word_0117.nii.gz",
"label": "./labelsTr/word_0117.nii.gz"
},
{
"image": "./imagesTr/word_0142.nii.gz",
"label": "./labelsTr/word_0142.nii.gz"
},
{
"image": "./imagesTr/word_0081.nii.gz",
"label": "./labelsTr/word_0081.nii.gz"
},
{
"image": "./imagesTr/word_0053.nii.gz",
"label": "./labelsTr/word_0053.nii.gz"
},
{
"image": "./imagesTr/word_0106.nii.gz",
"label": "./labelsTr/word_0106.nii.gz"
},
{
"image": "./imagesTr/word_0003.nii.gz",
"label": "./labelsTr/word_0003.nii.gz"
},
{
"image": "./imagesTr/word_0072.nii.gz",
"label": "./labelsTr/word_0072.nii.gz"
},
{
"image": "./imagesTr/word_0119.nii.gz",
"label": "./labelsTr/word_0119.nii.gz"
},
{
"image": "./imagesTr/word_0068.nii.gz",
"label": "./labelsTr/word_0068.nii.gz"
},
{
"image": "./imagesTr/word_0027.nii.gz",
"label": "./labelsTr/word_0027.nii.gz"
},
{
"image": "./imagesTr/word_0121.nii.gz",
"label": "./labelsTr/word_0121.nii.gz"
},
{
"image": "./imagesTr/word_0147.nii.gz",
"label": "./labelsTr/word_0147.nii.gz"
},
{
"image": "./imagesTr/word_0020.nii.gz",
"label": "./labelsTr/word_0020.nii.gz"
},
{
"image": "./imagesTr/word_0133.nii.gz",
"label": "./labelsTr/word_0133.nii.gz"
},
{
"image": "./imagesTr/word_0108.nii.gz",
"label": "./labelsTr/word_0108.nii.gz"
},
{
"image": "./imagesTr/word_0004.nii.gz",
"label": "./labelsTr/word_0004.nii.gz"
},
{
"image": "./imagesTr/word_0038.nii.gz",
"label": "./labelsTr/word_0038.nii.gz"
},
{
"image": "./imagesTr/word_0089.nii.gz",
"label": "./labelsTr/word_0089.nii.gz"
},
{
"image": "./imagesTr/word_0059.nii.gz",
"label": "./labelsTr/word_0059.nii.gz"
},
{
"image": "./imagesTr/word_0041.nii.gz",
"label": "./labelsTr/word_0041.nii.gz"
},
{
"image": "./imagesTr/word_0150.nii.gz",
"label": "./labelsTr/word_0150.nii.gz"
},
{
"image": "./imagesTr/word_0122.nii.gz",
"label": "./labelsTr/word_0122.nii.gz"
},
{
"image": "./imagesTr/word_0012.nii.gz",
"label": "./labelsTr/word_0012.nii.gz"
},
{
"image": "./imagesTr/word_0115.nii.gz",
"label": "./labelsTr/word_0115.nii.gz"
},
{
"image": "./imagesTr/word_0143.nii.gz",
"label": "./labelsTr/word_0143.nii.gz"
},
{
"image": "./imagesTr/word_0028.nii.gz",
"label": "./labelsTr/word_0028.nii.gz"
}
],
"validation": ["imagesVal", "labelsVal"],
"testing": ["imagesTs", "unseen"],
"addition_validation_from_LiTS":["addition_validation_from_LiTS/imagesTs", "addition_validation_from_LiTS/labelsTs"]
}
================================================
FILE: Finetune/Word/main.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from functools import partial
import logging
import numpy as np
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import torch.nn.parallel
import torch.utils.data.distributed
from optimizers.lr_scheduler import LinearWarmupCosineAnnealingLR
from trainer import run_training
from utils.data_utils import get_loader_word
import torch.nn as nn
from monai.inferers import sliding_window_inference
from monai.losses import DiceCELoss
from monai.metrics import DiceMetric
from monai.networks.nets import SwinUNETR, UNETR
from monai.transforms import Activations, AsDiscrete, Compose
from monai.utils.enums import MetricReduction
from monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock
from monai.networks.nets.swin_unetr import SwinTransformer as SwinViT
from monai.utils import ensure_tuple_rep
os.environ['CUDA_VISIBLE_DEVICES'] = "4"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
parser = argparse.ArgumentParser(description="Swin UNETR segmentation pipeline")
parser.add_argument("--checkpoint", default=None, help="start training from saved checkpoint")
parser.add_argument("--logdir", default="logs_amos", type=str, help="directory to save the tensorboard logs")
parser.add_argument(
"--pretrained_dir", default="./pretrained_models/", type=str, help="pretrained checkpoint directory"
)
parser.add_argument("--data", default="word", type=str, help="dataset name")
parser.add_argument("--out_channels", default=17, type=int, help="number of output channels")
parser.add_argument(
"--pretrained_checkpoint",default="VoCo_10k.pt", type=str, help="VoCo_10k pretrained model")
parser.add_argument(
"--pretrained_model_name",
default="model_bestVal.pt",
type=str,
help="pretrained model name",
)
roi = 96
parser.add_argument("--save_checkpoint", default=True, help="save checkpoint during training")
parser.add_argument("--max_epochs", default=200, type=int, help="max number of training epochs")
parser.add_argument("--warmup_epochs", default=5, type=int, help="number of warmup epochs")
parser.add_argument("--batch_size", default=1, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=4, type=int, help="number of sliding window batch size")
parser.add_argument("--optim_lr", default=3e-4, type=float, help="optimization learning rate")
parser.add_argument("--optim_name", default="adamw", type=str, help="optimization algorithm")
parser.add_argument("--reg_weight", default=1e-5, type=float, help="regularization weight")
parser.add_argument("--momentum", default=0.99, type=float, help="momentum")
parser.add_argument("--noamp", default=False, help="do NOT use amp for training")
parser.add_argument("--val_every", default=10, type=int, help="validation frequency")
parser.add_argument("--distributed", action="store_true", help="start distributed training")
parser.add_argument("--world_size", default=1, type=int, help="number of nodes for distributed training")
parser.add_argument("--rank", default=0, type=int, help="node rank for distributed training")
parser.add_argument("--dist-url", default="tcp://127.0.0.1:23456", type=str, help="distributed url")
parser.add_argument("--dist-backend", default="nccl", type=str, help="distributed backend")
parser.add_argument("--norm_name", default="instance", type=str, help="normalization name")
parser.add_argument("--workers", default=8, type=int, help="number of workers")
parser.add_argument("--feature_size", default=96, type=int, help="feature size")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--use_normal_dataset", default=True, help="use monai Dataset class")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--dropout_rate", default=0.0, type=float, help="dropout rate")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--RandFlipd_prob", default=0.2, type=float, help="RandFlipd aug probability")
parser.add_argument("--RandRotate90d_prob", default=0.2, type=float, help="RandRotate90d aug probability")
parser.add_argument("--RandScaleIntensityd_prob", default=0.1, type=float, help="RandScaleIntensityd aug probability")
parser.add_argument("--RandShiftIntensityd_prob", default=0.5, type=float, help="RandShiftIntensityd aug probability")
parser.add_argument("--infer_overlap", default=0.75, type=float, help="sliding window inference overlap")
parser.add_argument("--lrschedule", default="warmup_cosine", type=str, help="type of learning rate scheduler")
parser.add_argument("--resume_ckpt", action="store_true", help="resume training from pretrained checkpoint")
parser.add_argument("--smooth_dr", default=1e-6, type=float, help="constant added to dice denominator to avoid nan")
parser.add_argument("--smooth_nr", default=0.0, type=float, help="constant added to dice numerator to avoid zero")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
parser.add_argument("--use_ssl_pretrained", default=False, help="use self-supervised pretrained weights")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--squared_dice", action="store_true", help="use squared Dice")
def main():
args = parser.parse_args()
args.amp = not args.noamp
if args.distributed:
args.ngpus_per_node = torch.cuda.device_count()
print("Found total gpus", args.ngpus_per_node)
args.world_size = args.ngpus_per_node * args.world_size
mp.spawn(main_worker, nprocs=args.ngpus_per_node, args=(args,))
else:
main_worker(gpu=0, args=args)
def main_worker(gpu, args):
if args.distributed:
torch.multiprocessing.set_start_method("fork", force=True)
np.set_printoptions(formatter={"float": "{: 0.3f}".format}, suppress=True)
args.gpu = gpu
if args.distributed:
args.rank = args.rank * args.ngpus_per_node + gpu
dist.init_process_group(
backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank
)
torch.cuda.set_device(0)
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
args.test_mode = False
loader = get_loader_word(args)
print(args.rank, " gpu", args.gpu)
if args.rank == 0:
print("Batch size is:", args.batch_size, "epochs", args.max_epochs)
inf_size = [args.roi_x, args.roi_y, args.roi_z]
if args.rank == 0:
os.makedirs(args.logdir, exist_ok=True)
logger = init_log('global', logging.INFO)
logger.propagate = 0
pretrained_dir = args.pretrained_dir
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
drop_rate=0.0,
attn_drop_rate=0.0,
dropout_path_rate=args.dropout_path_rate,
use_checkpoint=args.use_checkpoint,
use_v2=True
)
if args.resume_ckpt:
model_dict = torch.load(os.path.join(pretrained_dir, args.pretrained_model_name))["state_dict"]
model.load_state_dict(model_dict)
print("Use pretrained weights")
if args.use_ssl_pretrained:
try:
# model_VoCoEMA.pt
# model_dict = torch.load("./pretrained_models/supervised_suprem_swinunetr_2100.pth", map_location=torch.device('cpu'))
# model_dict = torch.load("./pretrained_models/model_VoCoEMA.pt", map_location=torch.device('cpu'))
model_dict = torch.load(args.pretrained_checkpoint,
map_location=torch.device('cpu'))
# state_dict = model_dict['net']
# fix potential differences in state dict keys from pre-training to
# fine-tuning
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
# We now load model weights, setting param `strict` to False, i.e.:
# this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves
# the decoder weights untouched (CNN UNet decoder).
model.load_state_dict(state_dict, strict=False)
print("Using pretrained voco ema self-supervised Swin UNETR backbone weights !")
except ValueError:
raise ValueError("Self-supervised pre-trained weights not available for" + str(args.model_name))
if args.squared_dice:
dice_loss = DiceCELoss(
to_onehot_y=True, softmax=True, squared_pred=True, smooth_nr=args.smooth_nr, smooth_dr=args.smooth_dr
)
else:
dice_loss = DiceCELoss(include_background=False, to_onehot_y=True, softmax=True)
post_label = AsDiscrete(to_onehot=args.out_channels)
post_pred = AsDiscrete(argmax=True, to_onehot=args.out_channels)
dice_acc = DiceMetric(include_background=False, reduction=MetricReduction.MEAN, get_not_nans=True)
model_inferer = partial(
sliding_window_inference,
roi_size=inf_size,
sw_batch_size=args.sw_batch_size,
predictor=model,
overlap=args.infer_overlap,
)
pytorch_total_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("Total parameters count", pytorch_total_params)
best_acc = 0
start_epoch = 0
if args.checkpoint is not None:
checkpoint = torch.load(args.checkpoint, map_location="cpu")
from collections import OrderedDict
new_state_dict = OrderedDict()
for k, v in checkpoint["state_dict"].items():
new_state_dict[k.replace("backbone.", "")] = v
model.load_state_dict(new_state_dict, strict=False)
if "epoch" in checkpoint:
start_epoch = checkpoint["epoch"]
if "best_acc" in checkpoint:
best_acc = checkpoint["best_acc"]
print("=> loaded checkpoint '{}' (epoch {}) (bestacc {})".format(args.checkpoint, start_epoch, best_acc))
model.cuda(args.gpu)
if args.distributed:
torch.cuda.set_device(args.gpu)
if args.norm_name == "batch":
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
model.cuda(args.gpu)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], output_device=args.gpu)
if args.optim_name == "adam":
optimizer = torch.optim.Adam(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
elif args.optim_name == "adamw":
optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, weight_decay=args.reg_weight)
# optimizer = torch.optim.AdamW(model.parameters(), lr=args.optim_lr, amsgrad=True)
elif args.optim_name == "sgd":
optimizer = torch.optim.SGD(
model.parameters(), lr=args.optim_lr, momentum=args.momentum, nesterov=True, weight_decay=args.reg_weight
)
else:
raise ValueError("Unsupported Optimization Procedure: " + str(args.optim_name))
if args.lrschedule == "warmup_cosine":
max_steps = args.max_epochs*len(loader[0])
warmup_steps = args.warmup_epochs*len(loader[0])
scheduler = LinearWarmupCosineAnnealingLR(
optimizer, warmup_epochs=warmup_steps, max_epochs=max_steps
)
elif args.lrschedule == "cosine_anneal":
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.max_epochs)
if args.checkpoint is not None:
scheduler.step(epoch=start_epoch)
else:
scheduler = None
accuracy = run_training(
model=model,
train_loader=loader[0],
val_loader=loader[1],
optimizer=optimizer,
loss_func=dice_loss,
acc_func=dice_acc,
args=args,
model_inferer=model_inferer,
scheduler=scheduler,
start_epoch=start_epoch,
post_label=post_label,
post_pred=post_pred,
)
return accuracy
logs = set()
def init_log(name, level=logging.INFO):
if (name, level) in logs:
return
logs.add((name, level))
logger = logging.getLogger(name)
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
if "SLURM_PROCID" in os.environ:
rank = int(os.environ["SLURM_PROCID"])
logger.addFilter(lambda record: rank == 0)
else:
rank = 0
format_str = "[%(asctime)s][%(levelname)8s] %(message)s"
formatter = logging.Formatter(format_str)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
if __name__ == "__main__":
main()
================================================
FILE: Finetune/Word/optimizers/__init__.py
================================================
================================================
FILE: Finetune/Word/optimizers/lr_scheduler.py
================================================
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import warnings
from typing import List
from torch import nn as nn
from torch.optim import Adam, Optimizer
from torch.optim.lr_scheduler import LambdaLR, _LRScheduler
__all__ = ["LinearLR", "ExponentialLR"]
class _LRSchedulerMONAI(_LRScheduler):
"""Base class for increasing the learning rate between two boundaries over a number
of iterations"""
def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:
"""
Args:
optimizer: wrapped optimizer.
end_lr: the final learning rate.
num_iter: the number of iterations over which the test occurs.
last_epoch: the index of last epoch.
Returns:
None
"""
self.end_lr = end_lr
self.num_iter = num_iter
super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)
class LinearLR(_LRSchedulerMONAI):
"""Linearly increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]
class ExponentialLR(_LRSchedulerMONAI):
"""Exponentially increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]
class WarmupCosineSchedule(LambdaLR):
"""Linear warmup and then cosine decay.
Based on https://huggingface.co/ implementation.
"""
def __init__(
self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1
) -> None:
"""
Args:
optimizer: wrapped optimizer.
warmup_steps: number of warmup iterations.
t_total: total number of training iterations.
cycles: cosine cycles parameter.
last_epoch: the index of last epoch.
Returns:
None
"""
self.warmup_steps = warmup_steps
self.t_total = t_total
self.cycles = cycles
super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
class LinearWarmupCosineAnnealingLR(_LRScheduler):
def __init__(
self,
optimizer: Optimizer,
warmup_epochs: int,
max_epochs: int,
warmup_start_lr: float = 0.0,
eta_min: float = 0.0,
last_epoch: int = -1,
) -> None:
"""
Args:
optimizer (Optimizer): Wrapped optimizer.
warmup_epochs (int): Maximum number of iterations for linear warmup
max_epochs (int): Maximum number of iterations
warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.
eta_min (float): Minimum learning rate. Default: 0.
last_epoch (int): The index of last epoch. Default: -1.
"""
self.warmup_epochs = warmup_epochs
self.max_epochs = max_epochs
self.warmup_start_lr = warmup_start_lr
self.eta_min = eta_min
super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)
def get_lr(self) -> List[float]:
"""
Compute learning rate using chainable form of the scheduler
"""
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
if self.last_epoch == 0:
return [self.warmup_start_lr] * len(self.base_lrs)
elif self.last_epoch < self.warmup_epochs:
return [
group["lr"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
elif self.last_epoch == self.warmup_epochs:
return self.base_lrs
elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:
return [
group["lr"]
+ (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
return [
(1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
/ (
1
+ math.cos(
math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)
)
)
* (group["lr"] - self.eta_min)
+ self.eta_min
for group in self.optimizer.param_groups
]
def _get_closed_form_lr(self) -> List[float]:
"""
Called when epoch is passed as a param to the `step` function of the scheduler.
"""
if self.last_epoch < self.warmup_epochs:
return [
self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr in self.base_lrs
]
return [
self.eta_min
+ 0.5
* (base_lr - self.eta_min)
* (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
for base_lr in self.base_lrs
]
================================================
FILE: Finetune/Word/train.sh
================================================
now=$(date +"%Y%m%d_%H%M%S")
logdir=runs/logs_swin_large_scratch
mkdir -p $logdir
torchrun --master_port=20482 main.py \
--logdir $logdir | tee $logdir/$now.txt
================================================
FILE: Finetune/Word/train.slurm
================================================
#!/bin/bash
# NOTE: Lines starting with "#SBATCH" are valid SLURM commands or statements,
# while those starting with "#" and "##SBATCH" are comments.
#SBATCH -J Omni_btcv
#SBATCH -t 72:00:00 #Maximum runtime of 48 hours
# Enable email notificaitons when job begins and ends
#SBATCH --mail-user=lwubf@connect.ust.hk #Update your email address
#SBATCH --mail-type=begin
#SBATCH --mail-type=end
# Choose partition (queue) with "gpu"
#SBATCH -p project
# To use 24 cpu core and 1 gpu devices in a node
#SBATCH -N 1 -n 16 --gres=gpu:1
# Setup runtime environment if necessary
source ~/.bashrc
source activate nnunet
# Go to the job submission directory and run your application
cd /home/lwubf/SwinUNETR/Omni/
sh train.sh
================================================
FILE: Finetune/Word/trainer.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import time
import numpy as np
import torch
import torch.nn.parallel
import torch.utils.data.distributed
from tensorboardX import SummaryWriter
from torch.cuda.amp import GradScaler, autocast
from utils.utils import AverageMeter, distributed_all_gather
from monai.data import decollate_batch
def train_epoch(model, loader, optimizer, scheduler, scaler, epoch, loss_func, args):
model.train()
start_time = time.time()
run_loss = AverageMeter()
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
for param in model.parameters():
param.grad = None
with autocast(enabled=args.amp):
logits = model(data)
loss = loss_func(logits, target)
#
if args.amp:
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
else:
loss.backward()
optimizer.step()
if args.distributed:
loss_list = distributed_all_gather([loss], out_numpy=True, is_valid=idx < loader.sampler.valid_length)
run_loss.update(
np.mean(np.mean(np.stack(loss_list, axis=0), axis=0), axis=0), n=args.batch_size * args.world_size
)
else:
run_loss.update(loss.item(), n=args.batch_size)
lr = optimizer.param_groups[0]["lr"]
if scheduler is not None:
scheduler.step()
if args.rank == 0 and (idx + 1) % 100 == 0:
print(
"Epoch {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"loss: {:.4f}".format(run_loss.avg),
"lr: {:.8f}".format(lr),
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
for param in model.parameters():
param.grad = None
return run_loss.avg
def val_epoch(model, loader, epoch, acc_func, args, model_inferer=None, post_label=None, post_pred=None):
model.eval()
run_acc = AverageMeter()
start_time = time.time()
with torch.no_grad():
for idx, batch_data in enumerate(loader):
if isinstance(batch_data, list):
data, target = batch_data
else:
data, target = batch_data["image"], batch_data["label"]
data, target = data.cuda(), target.cuda()
with autocast(enabled=args.amp):
if model_inferer is not None:
logits = model_inferer(data)
else:
logits = model(data)
if not logits.is_cuda:
target = target.cpu()
val_labels_list = decollate_batch(target)
val_labels_convert = [post_label(val_label_tensor) for val_label_tensor in val_labels_list]
val_outputs_list = decollate_batch(logits)
val_output_convert = [post_pred(val_pred_tensor) for val_pred_tensor in val_outputs_list]
acc_func.reset()
acc_func(y_pred=val_output_convert, y=val_labels_convert)
acc, not_nans = acc_func.aggregate()
acc = acc.cuda(args.rank)
if args.distributed:
acc_list, not_nans_list = distributed_all_gather(
[acc, not_nans], out_numpy=True, is_valid=idx < loader.sampler.valid_length
)
for al, nl in zip(acc_list, not_nans_list):
run_acc.update(al, n=nl)
else:
run_acc.update(acc.cpu().numpy(), n=not_nans.cpu().numpy())
if args.rank == 0:
avg_acc = np.mean(run_acc.avg)
print(
"Val {}/{} {}/{}".format(epoch, args.max_epochs, idx, len(loader)),
"acc",
avg_acc,
"time {:.2f}s".format(time.time() - start_time),
)
start_time = time.time()
torch.cuda.empty_cache()
return run_acc.avg
def save_checkpoint(model, epoch, args, filename="model.pt", best_acc=0, optimizer=None, scheduler=None):
state_dict = model.state_dict() if not args.distributed else model.module.state_dict()
save_dict = {"epoch": epoch, "best_acc": best_acc, "state_dict": state_dict}
if optimizer is not None:
save_dict["optimizer"] = optimizer.state_dict()
if scheduler is not None:
save_dict["scheduler"] = scheduler.state_dict()
filename = os.path.join(args.logdir, filename)
torch.save(save_dict, filename)
print("Saving checkpoint", filename)
def run_training(
model,
train_loader,
val_loader,
optimizer,
loss_func,
acc_func,
args,
model_inferer=None,
scheduler=None,
start_epoch=0,
post_label=None,
post_pred=None,
):
writer = None
if args.logdir is not None and args.rank == 0:
writer = SummaryWriter(log_dir=args.logdir)
if args.rank == 0:
print("Writing Tensorboard logs to ", args.logdir)
scaler = None
if args.amp:
scaler = GradScaler()
val_acc_max = 0.0
for epoch in range(start_epoch, args.max_epochs):
if args.distributed:
train_loader.sampler.set_epoch(epoch)
torch.distributed.barrier()
print(args.rank, time.ctime(), "Epoch:", epoch)
epoch_time = time.time()
train_loss = train_epoch(
model, train_loader, optimizer, scheduler, scaler=scaler, epoch=epoch, loss_func=loss_func, args=args
)
if args.rank == 0:
print(
"Final training {}/{}".format(epoch, args.max_epochs - 1),
"loss: {:.4f}".format(train_loss),
"time {:.2f}s".format(time.time() - epoch_time),
)
if args.rank == 0 and writer is not None:
writer.add_scalar("train_loss", train_loss, epoch)
b_new_best = False
if (epoch + 1) % args.val_every == 0:
if args.distributed:
torch.distributed.barrier()
epoch_time = time.time()
val_avg_acc = val_epoch(
model,
val_loader,
epoch=epoch,
acc_func=acc_func,
model_inferer=model_inferer,
args=args,
post_label=post_label,
post_pred=post_pred,
)
val_avg_acc = np.mean(val_avg_acc)
if args.rank == 0:
print(
"Final validation {}/{}".format(epoch, args.max_epochs - 1),
"acc",
val_avg_acc,
"time {:.2f}s".format(time.time() - epoch_time),
)
if writer is not None:
writer.add_scalar("val_acc", val_avg_acc, epoch)
if val_avg_acc > val_acc_max:
print("new best ({:.6f} --> {:.6f}). ".format(val_acc_max, val_avg_acc))
val_acc_max = val_avg_acc
b_new_best = True
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(
model, epoch, args, best_acc=val_acc_max, optimizer=optimizer, scheduler=scheduler
)
if args.rank == 0 and args.logdir is not None and args.save_checkpoint:
save_checkpoint(model, epoch, args, best_acc=val_acc_max, filename="model_final.pt")
if b_new_best:
print("Copying to model.pt new best model!!!!")
shutil.copyfile(os.path.join(args.logdir, "model_final.pt"), os.path.join(args.logdir, "model.pt"))
print("Training Finished !, Best Accuracy: ", val_acc_max)
return val_acc_max
================================================
FILE: Finetune/Word/utils/__init__.py
================================================
================================================
FILE: Finetune/Word/utils/data_utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import os
import pickle
import numpy as np
import torch
import itertools as it
from monai import data, transforms
from monai.data import *
from torch.utils.data import DataLoader, ConcatDataset
from monai.transforms import *
class Sampler(torch.utils.data.Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, make_even=True):
if num_replicas is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
num_replicas = torch.distributed.get_world_size()
if rank is None:
if not torch.distributed.is_available():
raise RuntimeError("Requires distributed package to be available")
rank = torch.distributed.get_rank()
self.shuffle = shuffle
self.make_even = make_even
self.dataset = dataset
self.num_replicas = num_replicas
self.rank = rank
self.epoch = 0
self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas))
self.total_size = self.num_samples * self.num_replicas
indices = list(range(len(self.dataset)))
self.valid_length = len(indices[self.rank : self.total_size : self.num_replicas])
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = list(range(len(self.dataset)))
if self.make_even:
if len(indices) < self.total_size:
if self.total_size - len(indices) < len(indices):
indices += indices[: (self.total_size - len(indices))]
else:
extra_ids = np.random.randint(low=0, high=len(indices), size=self.total_size - len(indices))
indices += [indices[ids] for ids in extra_ids]
assert len(indices) == self.total_size
indices = indices[self.rank : self.total_size : self.num_replicas]
self.num_samples = len(indices)
return iter(indices)
def __len__(self):
return self.num_samples
def set_epoch(self, epoch):
self.epoch = epoch
def get_loader_word(args):
datadir = "/data/linshan/CTs/WORD/"
splits = "dataset.json"
jsonlist = os.path.join(datadir, splits)
datalist = load_decathlon_datalist(jsonlist, True, "training", base_dir=datadir)
print("Dataset 4 WORD: number of data: {}".format(len(datalist)))
print("Dataset all training and val: number of data: {}".format(len(datalist)))
train_trans = [
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
transforms.RandCropByPosNegLabeld(
keys=["image", "label"],
label_key="label",
spatial_size=(args.roi_x, args.roi_y, args.roi_z),
pos=9,
neg=1,
num_samples=args.sw_batch_size,
image_key="image",
image_threshold=0,
),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=0),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=1),
transforms.RandFlipd(keys=["image", "label"], prob=args.RandFlipd_prob, spatial_axis=2),
transforms.RandRotate90d(keys=["image", "label"], prob=args.RandRotate90d_prob, max_k=3),
transforms.RandScaleIntensityd(keys="image", factors=0.1, prob=args.RandScaleIntensityd_prob),
transforms.RandShiftIntensityd(keys="image", offsets=0.1, prob=args.RandShiftIntensityd_prob),
]
val_transform = [
transforms.LoadImaged(keys=["image", "label"]),
transforms.EnsureChannelFirstd(keys=["image", "label"]),
transforms.Orientationd(keys=["image", "label"], axcodes="RAS"),
transforms.Spacingd(
keys=["image", "label"], pixdim=(args.space_x, args.space_y, args.space_z), mode=("bilinear", "nearest")
),
transforms.ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max, b_min=args.b_min, b_max=args.b_max, clip=True
),
transforms.CropForegroundd(keys=["image", "label"], source_key="image"),
]
print('use persistent')
train_ds = PersistentDataset(data=datalist[:-20],
transform=train_trans,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/word')
train_sampler = Sampler(train_ds) if args.distributed else None
train_loader = data.DataLoader(
train_ds,
batch_size=args.batch_size,
shuffle=(train_sampler is None),
num_workers=args.workers,
sampler=train_sampler,
pin_memory=True,
)
val_ds = PersistentDataset(data=datalist[-20:],
transform=val_transform,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/word')
val_sampler = Sampler(val_ds, shuffle=False) if args.distributed else None
val_loader = data.DataLoader(
val_ds, batch_size=1, shuffle=False, num_workers=args.workers, sampler=val_sampler, pin_memory=True
)
loader = [train_loader, val_loader]
return loader
================================================
FILE: Finetune/Word/utils/utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import scipy.ndimage as ndimage
import torch
import os
def resample_3d(img, target_size):
imx, imy, imz = img.shape
tx, ty, tz = target_size
zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))
img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)
return img_resampled
def dice(x, y):
intersect = np.sum(np.sum(np.sum(x * y)))
y_sum = np.sum(np.sum(np.sum(y)))
if y_sum == 0:
return 0.0
x_sum = np.sum(np.sum(np.sum(x)))
return 2 * intersect / (x_sum + y_sum)
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)
def distributed_all_gather(
tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None
):
if world_size is None:
world_size = torch.distributed.get_world_size()
if valid_batch_size is not None:
valid_batch_size = min(valid_batch_size, world_size)
elif is_valid is not None:
is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)
if not no_barrier:
torch.distributed.barrier()
tensor_list_out = []
with torch.no_grad():
if is_valid is not None:
is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]
torch.distributed.all_gather(is_valid_list, is_valid)
is_valid = [x.item() for x in is_valid_list]
for tensor in tensor_list:
gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]
torch.distributed.all_gather(gather_list, tensor)
if valid_batch_size is not None:
gather_list = gather_list[:valid_batch_size]
elif is_valid is not None:
gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]
if out_numpy:
gather_list = [t.cpu().numpy() for t in gather_list]
tensor_list_out.append(gather_list)
return tensor_list_out
def color_map(dataset='pascal'):
cmap = np.zeros((256, 3), dtype='uint8')
if dataset == 'pascal' or dataset == 'coco':
def bitget(byteval, idx):
return (byteval & (1 << idx)) != 0
for i in range(256):
r = g = b = 0
c = i
for j in range(8):
r = r | (bitget(c, 0) << 7-j)
g = g | (bitget(c, 1) << 7-j)
b = b | (bitget(c, 2) << 7-j)
c = c >> 3
cmap[i] = np.array([r, g, b])
elif dataset == 'cityscapes':
cmap[0] = np.array([128, 64, 128])
cmap[1] = np.array([244, 35, 232])
cmap[2] = np.array([70, 70, 70])
cmap[3] = np.array([102, 102, 156])
cmap[4] = np.array([190, 153, 153])
cmap[5] = np.array([153, 153, 153])
cmap[6] = np.array([250, 170, 30])
cmap[7] = np.array([220, 220, 0])
cmap[8] = np.array([107, 142, 35])
cmap[9] = np.array([152, 251, 152])
cmap[10] = np.array([70, 130, 180])
cmap[11] = np.array([220, 20, 60])
cmap[12] = np.array([255, 0, 0])
cmap[13] = np.array([0, 0, 142])
cmap[14] = np.array([0, 0, 70])
cmap[15] = np.array([0, 60, 100])
cmap[16] = np.array([0, 80, 100])
cmap[17] = np.array([0, 0, 230])
cmap[18] = np.array([119, 11, 32])
cmap[19] = np.array([0, 0, 0])
cmap[255] = np.array([0, 0, 0])
return cmap
def check_dir(dir):
if not os.path.exists(dir):
os.makedirs(dir)
================================================
FILE: Finetune/nnUNet/LICENSE
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [2019] [Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: Finetune/nnUNet/documentation/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/documentation/benchmarking.md
================================================
# nnU-Netv2 benchmarks
Does your system run like it should? Is your epoch time longer than expected? What epoch times should you expect?
Look no further for we have the solution here!
## What does the nnU-netv2 benchmark do?
nnU-Net's benchmark trains models for 5 epochs. At the end, the fastest epoch will
be noted down, along with the GPU name, torch version and cudnn version. You can find the benchmark output in the
corresponding nnUNet_results subfolder (see example below). Don't worry, we also provide scripts to collect your
results. Or you just start a benchmark and look at the console output. Everything is possible. Nothing is forbidden.
The benchmark implementation revolves around two trainers:
- `nnUNetTrainerBenchmark_5epochs` runs a regular training for 5 epochs. When completed, writes a .json file with the fastest
epoch time as well as the GPU used and the torch and cudnn versions. Useful for speed testing the entire pipeline
(data loading, augmentation, GPU training)
- `nnUNetTrainerBenchmark_5epochs_noDataLoading` is the same, but it doesn't do any data loading or augmentation. It
just presents dummy arrays to the GPU. Useful for checking pure GPU speed.
## How to run the nnU-Netv2 benchmark?
It's quite simple, actually. It looks just like a regular nnU-Net training.
We provide reference numbers for some of the Medical Segmentation Decathlon datasets because they are easily
accessible: [download here](https://drive.google.com/drive/folders/1HqEgzS8BV2c7xYNrZdEAnrHk7osJJ--2). If it needs to be
quick and dirty, focus on Tasks 2 and 4. Download and extract the data and convert them to the nnU-Net format with
`nnUNetv2_convert_MSD_dataset`.
Run `nnUNetv2_plan_and_preprocess` for them.
Then, for each dataset, run the following commands (only one per GPU! Or one after the other):
```bash
nnUNetv2_train DATSET_ID 2d 0 -tr nnUNetTrainerBenchmark_5epochs
nnUNetv2_train DATSET_ID 3d_fullres 0 -tr nnUNetTrainerBenchmark_5epochs
nnUNetv2_train DATSET_ID 2d 0 -tr nnUNetTrainerBenchmark_5epochs_noDataLoading
nnUNetv2_train DATSET_ID 3d_fullres 0 -tr nnUNetTrainerBenchmark_5epochs_noDataLoading
```
If you want to inspect the outcome manually, check (for example!) your
`nnUNet_results/DATASET_NAME/nnUNetTrainerBenchmark_5epochs__nnUNetPlans__3d_fullres/fold_0/` folder for the `benchmark_result.json` file.
Note that there can be multiple entries in this file if the benchmark was run on different GPU types, torch versions or cudnn versions!
If you want to summarize your results like we did in our [results](#results), check the
[summary script](../nnunetv2/batch_running/benchmarking/summarize_benchmark_results.py). Here you need to change the
torch version, cudnn version and dataset you want to summarize, then execute the script. You can find the exact
values you need to put there in one of your `benchmark_result.json` files.
## Results
We have tested a variety of GPUs and summarized the results in a
[spreadsheet](https://docs.google.com/spreadsheets/d/12Cvt_gr8XU2qWaE0XJk5jJlxMEESPxyqW0CWbQhTNNY/edit?usp=sharing).
Note that you can select the torch and cudnn versions at the bottom! There may be comments in this spreadsheet. Read them!
## Result interpretation
Results are shown as epoch time in seconds. Lower is better (duh). Epoch times can fluctuate between runs, so as
long as you are within like 5-10% of the numbers we report, everything should be dandy.
If not, here is how you can try to find the culprit!
The first thing to do is to compare the performance between the `nnUNetTrainerBenchmark_5epochs_noDataLoading` and
`nnUNetTrainerBenchmark_5epochs` trainers. If the difference is about the same as we report in our spreadsheet, but
both your numbers are worse, the problem is with your GPU:
- Are you certain you compare the correct GPU? (duh)
- If yes, then you might want to install PyTorch in a different way. Never `pip install torch`! Go to the
[PyTorch installation](https://pytorch.org/get-started/locally/) page, select the most recent cuda version your
system supports and only then copy and execute the correct command! Either pip or conda should work
- If the problem is still not fixed, we recommend you try
[compiling pytorch from source](https://github.com/pytorch/pytorch#from-source). It's more difficult but that's
how we roll here at the DKFZ (at least the cool kids here).
- Another thing to consider is to try exactly the same torch + cudnn version as we did in our spreadsheet.
Sometimes newer versions can actually degrade performance and there might be bugs from time to time. Older versions
are also often a lot slower!
- Finally, some very basic things that could impact your GPU performance:
- Is the GPU cooled adequately? Check the temperature with `nvidia-smi`. Hot GPUs throttle performance in order to not self-destruct
- Is your OS using the GPU for displaying your desktop at the same time? If so then you can expect a performance
penalty (I dunno like 10% !?). That's expected and OK.
- Are other users using the GPU as well?
If you see a large performance difference between `nnUNetTrainerBenchmark_5epochs_noDataLoading` (fast) and
`nnUNetTrainerBenchmark_5epochs` (slow) then the problem might be related to data loading and augmentation. As a
reminder, nnU-net does not use pre-augmented images (offline augmentation) but instead generates augmented training
samples on the fly during training (no, you cannot switch it to offline). This requires that your system can do partial
reads of the image files fast enough (SSD storage required!) and that your CPU is powerful enough to run the augmentations.
Check the following:
- [CPU bottleneck] How many CPU threads are running during the training? nnU-Net uses 12 processes for data augmentation by default.
If you see those 12 running constantly during training, consider increasing the number of processes used for data
augmentation (provided there is headroom on your CPU!). Increase the number until you see less active workers than
you configured (or just set the number to 32 and forget about it). You can do so by setting the `nnUNet_n_proc_DA`
environment variable (Linux: `export nnUNet_n_proc_DA=24`). Read [here](set_environment_variables.md) on how to do this.
If your CPU does not support more processes (setting more processes than your CPU has threads makes
no sense!) you are out of luck and in desperate need of a system upgrade!
- [I/O bottleneck] If you don't see 12 (or nnUNet_n_proc_DA if you set it) processes running but your training times
are still slow then open up `top` (sorry, Windows users. I don't know how to do this on Windows) and look at the value
left of 'wa' in the row that begins
with '%Cpu (s)'. If this is >1.0 (arbitrarily set threshold here, essentially look for unusually high 'wa'. In a
healthy training 'wa' will be almost 0) then your storage cannot keep up with data loading. Make sure to set
nnUNet_preprocessed to a folder that is located on an SSD. nvme is preferred over SATA. PCIe3 is enough. 3000MB/s
sequential read recommended.
- [funky stuff] Sometimes there is funky stuff going on, especially when batch sizes are large, files are small and
patch sizes are small as well. As part of the data loading process, nnU-Net needs to open and close a file for each
training sample. Now imagine a dataset like Dataset004_Hippocampus where for the 2d config we have a batch size of
366 and we run 250 iterations in <10s on an A100. That's a lotta files per second (366 * 250 / 10 = 9150 files per second).
Oof. If the files are on some network drive (even if it's nvme) then (probably) good night. The good news: nnU-Net
has got you covered: add `export nnUNet_keep_files_open=True` to your .bashrc and the problem goes away. The neat
part: it causes new problems if you are not allowed to have enough open files. You may have to increase the number
of allowed open files. `ulimit -n` gives your current limit (Linux only). It should not be something like 1024.
Increasing that to 65535 works well for me. See here for how to change these limits:
[Link](https://kupczynski.info/posts/ubuntu-18-10-ulimits/)
(works for Ubuntu 18, google for your OS!).
================================================
FILE: Finetune/nnUNet/documentation/changelog.md
================================================
# What is different in v2?
- We now support **hierarchical labels** (named regions in nnU-Net). For example, instead of training BraTS with the
'edema', 'necrosis' and 'enhancing tumor' labels you can directly train it on the target areas 'whole tumor',
'tumor core' and 'enhancing tumor'. See [here](region_based_training.md) for a detailed description + also have a look at the
[BraTS 2021 conversion script](../nnunetv2/dataset_conversion/Dataset137_BraTS21.py).
- Cross-platform support. Cuda, mps (Apple M1/M2) and of course CPU support! Simply select the device with
`-device` in `nnUNetv2_train` and `nnUNetv2_predict`.
- Unified trainer class: nnUNetTrainer. No messing around with cascaded trainer, DDP trainer, region-based trainer,
ignore trainer etc. All default functionality is in there!
- Supports more input/output data formats through ImageIO classes.
- I/O formats can be extended by implementing new Adapters based on `BaseReaderWriter`.
- The nnUNet_raw_cropped folder no longer exists -> saves disk space at no performance penalty. magic! (no jk the
saving of cropped npz files was really slow, so it's actually faster to crop on the fly).
- Preprocessed data and segmentation are stored in different files when unpacked. Seg is stored as int8 and thus
takes 1/4 of the disk space per pixel (and I/O throughput) as in v1.
- Native support for multi-GPU (DDP) TRAINING.
Multi-GPU INFERENCE should still be run with `CUDA_VISIBLE_DEVICES=X nnUNetv2_predict [...] -num_parts Y -part_id X`.
There is no cross-GPU communication in inference, so it doesn't make sense to add additional complexity with DDP.
- All nnU-Net functionality is now also accessible via API. Check the corresponding entry point in `setup.py` to see
what functions you need to call.
- Dataset fingerprint is now explicitly created and saved in a json file (see nnUNet_preprocessed).
- Complete overhaul of plans files (read also [this](explanation_plans_files.md):
- Plans are now .json and can be opened and read more easily
- Configurations are explicitly named ("3d_fullres" , ...)
- Configurations can inherit from each other to make manual experimentation easier
- A ton of additional functionality is now included in and can be changed through the plans, for example normalization strategy, resampling etc.
- Stages of the cascade are now explicitly listed in the plans. 3d_lowres has 'next_stage' (which can also be a
list of configurations!). 3d_cascade_fullres has a 'previous_stage' entry. By manually editing plans files you can
now connect anything you want, for example 2d with 3d_fullres or whatever. Be wild! (But don't create cycles!)
- Multiple configurations can point to the same preprocessed data folder to save disk space. Careful! Only
configurations that use the same spacing, resampling, normalization etc. should share a data source! By default,
3d_fullres and 3d_cascade_fullres share the same data
- Any number of configurations can be added to the plans (remember to give them a unique "data_identifier"!)
Folder structures are different and more user-friendly:
- nnUNet_preprocessed
- By default, preprocessed data is now saved as: `nnUNet_preprocessed/DATASET_NAME/PLANS_IDENTIFIER_CONFIGURATION` to clearly link them to their corresponding plans and configuration
- Name of the folder containing the preprocessed images can be adapted with the `data_identifier` key.
- nnUNet_results
- Results are now sorted as follows: DATASET_NAME/TRAINERCLASS__PLANSIDENTIFIER__CONFIGURATION/FOLD
## What other changes are planned and not yet implemented?
- Integration into MONAI (together with our friends at Nvidia)
- New pretrained weights for a large number of datasets (coming very soon))
[//]: # (- nnU-Net now also natively supports an **ignore label**. Pixels with this label will not contribute to the loss. )
[//]: # (Use this to learn from sparsely annotated data, or excluding irrelevant areas from training. Read more [here](ignore_label.md).)
================================================
FILE: Finetune/nnUNet/documentation/competitions/AutoPETII.md
================================================
# Look Ma, no code: fine tuning nnU-Net for the AutoPET II challenge by only adjusting its JSON plans
Please cite our paper :-*
```text
COMING SOON
```
## Intro
See the [Challenge Website](https://autopet-ii.grand-challenge.org/) for details on the challenge.
Our solution to this challenge rewuires no code changes at all. All we do is optimize nnU-Net's hyperparameters
(architecture, batch size, patch size) through modifying the nnUNetplans.json file.
## Prerequisites
Use the latest pytorch version!
We recommend you use the latest nnU-Net version as well! We ran our trainings with commit 913705f which you can try in case something doesn't work as expected:
`pip install git+https://github.com/MIC-DKFZ/nnUNet.git@913705f`
## How to reproduce our trainings
### Download and convert the data
1. Download and extract the AutoPET II dataset
2. Convert it to nnU-Net format by running `python nnunetv2/dataset_conversion/Dataset221_AutoPETII_2023.py FOLDER` where folder is the extracted AutoPET II dataset.
### Experiment planning and preprocessing
We deviate a little from the standard nnU-Net procedure because all our experiments are based on just the 3d_fullres configuration
Run the following commands:
- `nnUNetv2_extract_fingerprint -d 221` extracts the dataset fingerprint
- `nnUNetv2_plan_experiment -d 221` does the planning for the plain unet
- `nnUNetv2_plan_experiment -d 221 -pl ResEncUNetPlanner` does the planning for the residual encoder unet
- `nnUNetv2_preprocess -d 221 -c 3d_fullres` runs all the preprocessing we need
### Modification of plans files
Please read the [information on how to modify plans files](../explanation_plans_files.md) first!!!
It is easier to have everything in one plans file, so the first thing we do is transfer the ResEnc UNet to the
default plans file. We use the configuration inheritance feature of nnU-Net to make it use the same data as the
3d_fullres configuration.
Add the following to the 'configurations' dict in 'nnUNetPlans.json':
```json
"3d_fullres_resenc": {
"inherits_from": "3d_fullres",
"UNet_class_name": "ResidualEncoderUNet",
"n_conv_per_stage_encoder": [
1,
3,
4,
6,
6,
6
],
"n_conv_per_stage_decoder": [
1,
1,
1,
1,
1
]
},
```
(these values are basically just copied from the 'nnUNetResEncUNetPlans.json' file! With everything redundant being omitted thanks to inheritance from 3d_fullres)
Now we crank up the patch and batch sizes. Add the following configurations:
```json
"3d_fullres_resenc_bs80": {
"inherits_from": "3d_fullres_resenc",
"batch_size": 80
},
"3d_fullres_resenc_192x192x192_b24": {
"inherits_from": "3d_fullres_resenc",
"patch_size": [
192,
192,
192
],
"batch_size": 24
}
```
Save the file (and check for potential Syntax Errors!)
### Run trainings
Training each model requires 8 Nvidia A100 40GB GPUs. Expect training to run for 5-7 days. You'll need a really good
CPU to handle the data augmentation! 128C/256T are a must! If you have less threads available, scale down nnUNet_n_proc_DA accordingly.
```bash
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 0 -num_gpus 8
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 1 -num_gpus 8
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 2 -num_gpus 8
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 3 -num_gpus 8
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_bs80 4 -num_gpus 8
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 0 -num_gpus 8
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 1 -num_gpus 8
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 2 -num_gpus 8
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 3 -num_gpus 8
nnUNet_compile=T nnUNet_n_proc_DA=28 nnUNetv2_train 221 3d_fullres_resenc_192x192x192_b24 4 -num_gpus 8
```
Done!
(We also provide pretrained weights in case you don't want to invest the GPU resources, see below)
## How to make predictions with pretrained weights
Our final model is an ensemble of two configurations:
- ResEnc UNet with batch size 80
- ResEnc UNet with patch size 192x192x192 and batch size 24
To run inference with these models, do the following:
1. Download the pretrained model weights from [Zenodo](https://zenodo.org/record/8362371)
2. Install both .zip files using `nnUNetv2_install_pretrained_model_from_zip`
3. Make sure
4. Now you can run inference on new cases with `nnUNetv2_predict`:
- `nnUNetv2_predict -i INPUT -o OUTPUT1 -d 221 -c 3d_fullres_resenc_bs80 -f 0 1 2 3 4 -step_size 0.6 --save_probabilities`
- `nnUNetv2_predict -i INPUT -o OUTPUT2 -d 221 -c 3d_fullres_resenc_192x192x192_b24 -f 0 1 2 3 4 --save_probabilities`
- `nnUNetv2_ensemble -i OUTPUT1 OUTPUT2 -o OUTPUT_ENSEMBLE`
Note that our inference Docker omitted TTA via mirroring along the axial direction during prediction (only sagittal +
coronal mirroring). This was
done to keep the inference time below 10 minutes per image on a T4 GPU (we actually never tested whether we could
have left this enabled). Just leave it on! You can also leave the step_size at default for the 3d_fullres_resenc_bs80.
================================================
FILE: Finetune/nnUNet/documentation/convert_msd_dataset.md
================================================
Use `nnUNetv2_convert_MSD_dataset`.
Read `nnUNetv2_convert_MSD_dataset -h` for usage instructions.
================================================
FILE: Finetune/nnUNet/documentation/dataset_format.md
================================================
# nnU-Net dataset format
The only way to bring your data into nnU-Net is by storing it in a specific format. Due to nnU-Net's roots in the
[Medical Segmentation Decathlon](http://medicaldecathlon.com/) (MSD), its dataset is heavily inspired but has since
diverged (see also [here](#how-to-use-decathlon-datasets)) from the format used in the MSD.
Datasets consist of three components: raw images, corresponding segmentation maps and a dataset.json file specifying
some metadata.
If you are migrating from nnU-Net v1, read [this](#how-to-use-nnu-net-v1-tasks) to convert your existing Tasks.
## What do training cases look like?
Each training case is associated with an identifier = a unique name for that case. This identifier is used by nnU-Net to
connect images with the correct segmentation.
A training case consists of images and their corresponding segmentation.
**Images** is plural because nnU-Net supports arbitrarily many input channels. In order to be as flexible as possible,
nnU-net requires each input channel to be stored in a separate image (with the sole exception being RGB natural
images). So these images could for example be a T1 and a T2 MRI (or whatever else you want). The different input
channels MUST have the same geometry (same shape, spacing (if applicable) etc.) and
must be co-registered (if applicable). Input channels are identified by nnU-Net by their FILE_ENDING: a four-digit integer at the end
of the filename. Image files must therefore follow the following naming convention: {CASE_IDENTIFIER}_{XXXX}.{FILE_ENDING}.
Hereby, XXXX is the 4-digit modality/channel identifier (should be unique for each modality/channel, e.g., “0000” for T1, “0001” for
T2 MRI, …) and FILE_ENDING is the file extension used by your image format (.png, .nii.gz, ...). See below for concrete examples.
The dataset.json file connects channel names with the channel identifiers in the 'channel_names' key (see below for details).
Side note: Typically, each channel/modality needs to be stored in a separate file and is accessed with the XXXX channel identifier.
Exception are natural images (RGB; .png) where the three color channels can all be stored in one file (see the
[road segmentation](../nnunetv2/dataset_conversion/Dataset120_RoadSegmentation.py) dataset as an example).
**Segmentations** must share the same geometry with their corresponding images (same shape etc.). Segmentations are
integer maps with each value representing a semantic class. The background must be 0. If there is no background, then
do not use the label 0 for something else! Integer values of your semantic classes must be consecutive (0, 1, 2, 3,
...). Of course, not all labels have to be present in each training case. Segmentations are saved as {CASE_IDENTIFER}.{FILE_ENDING} .
Within a training case, all image geometries (input channels, corresponding segmentation) must match. Between training
cases, they can of course differ. nnU-Net takes care of that.
Important: The input channels must be consistent! Concretely, **all images need the same input channels in the same
order and all input channels have to be present every time**. This is also true for inference!
## Supported file formats
nnU-Net expects the same file format for images and segmentations! These will also be used for inference. For now, it
is thus not possible to train .png and then run inference on .jpg.
One big change in nnU-Net V2 is the support of multiple input file types. Gone are the days of converting everything to .nii.gz!
This is implemented by abstracting the input and output of images + segmentations through `BaseReaderWriter`. nnU-Net
comes with a broad collection of Readers+Writers and you can even add your own to support your data format!
See [here](../nnunetv2/imageio/readme.md).
As a nice bonus, nnU-Net now also natively supports 2D input images and you no longer have to mess around with
conversions to pseudo 3D niftis. Yuck. That was disgusting.
Note that internally (for storing and accessing preprocessed images) nnU-Net will use its own file format, irrespective
of what the raw data was provided in! This is for performance reasons.
By default, the following file formats are supported:
- NaturalImage2DIO: .png, .bmp, .tif
- NibabelIO: .nii.gz, .nrrd, .mha
- NibabelIOWithReorient: .nii.gz, .nrrd, .mha. This reader will reorient images to RAS!
- SimpleITKIO: .nii.gz, .nrrd, .mha
- Tiff3DIO: .tif, .tiff. 3D tif images! Since TIF does not have a standardized way of storing spacing information,
nnU-Net expects each TIF file to be accompanied by an identically named .json file that contains this information (see
[here](#datasetjson)).
The file extension lists are not exhaustive and depend on what the backend supports. For example, nibabel and SimpleITK
support more than the three given here. The file endings given here are just the ones we tested!
IMPORTANT: nnU-Net can only be used with file formats that use lossless (or no) compression! Because the file
format is defined for an entire dataset (and not separately for images and segmentations, this could be a todo for
the future), we must ensure that there are no compression artifacts that destroy the segmentation maps. So no .jpg and
the likes!
## Dataset folder structure
Datasets must be located in the `nnUNet_raw` folder (which you either define when installing nnU-Net or export/set every
time you intend to run nnU-Net commands!).
Each segmentation dataset is stored as a separate 'Dataset'. Datasets are associated with a dataset ID, a three digit
integer, and a dataset name (which you can freely choose): For example, Dataset005_Prostate has 'Prostate' as dataset name and
the dataset id is 5. Datasets are stored in the `nnUNet_raw` folder like this:
nnUNet_raw/
├── Dataset001_BrainTumour
├── Dataset002_Heart
├── Dataset003_Liver
├── Dataset004_Hippocampus
├── Dataset005_Prostate
├── ...
Within each dataset folder, the following structure is expected:
Dataset001_BrainTumour/
├── dataset.json
├── imagesTr
├── imagesTs # optional
└── labelsTr
When adding your custom dataset, take a look at the [dataset_conversion](../nnunetv2/dataset_conversion) folder and
pick an id that is not already taken. IDs 001-010 are for the Medical Segmentation Decathlon.
- **imagesTr** contains the images belonging to the training cases. nnU-Net will perform pipeline configuration, training with
cross-validation, as well as finding postprocessing and the best ensemble using this data.
- **imagesTs** (optional) contains the images that belong to the test cases. nnU-Net does not use them! This could just
be a convenient location for you to store these images. Remnant of the Medical Segmentation Decathlon folder structure.
- **labelsTr** contains the images with the ground truth segmentation maps for the training cases.
- **dataset.json** contains metadata of the dataset.
The scheme introduced [above](#what-do-training-cases-look-like) results in the following folder structure. Given
is an example for the first Dataset of the MSD: BrainTumour. This dataset hat four input channels: FLAIR (0000),
T1w (0001), T1gd (0002) and T2w (0003). Note that the imagesTs folder is optional and does not have to be present.
nnUNet_raw/Dataset001_BrainTumour/
├── dataset.json
├── imagesTr
│ ├── BRATS_001_0000.nii.gz
│ ├── BRATS_001_0001.nii.gz
│ ├── BRATS_001_0002.nii.gz
│ ├── BRATS_001_0003.nii.gz
│ ├── BRATS_002_0000.nii.gz
│ ├── BRATS_002_0001.nii.gz
│ ├── BRATS_002_0002.nii.gz
│ ├── BRATS_002_0003.nii.gz
│ ├── ...
├── imagesTs
│ ├── BRATS_485_0000.nii.gz
│ ├── BRATS_485_0001.nii.gz
│ ├── BRATS_485_0002.nii.gz
│ ├── BRATS_485_0003.nii.gz
│ ├── BRATS_486_0000.nii.gz
│ ├── BRATS_486_0001.nii.gz
│ ├── BRATS_486_0002.nii.gz
│ ├── BRATS_486_0003.nii.gz
│ ├── ...
└── labelsTr
├── BRATS_001.nii.gz
├── BRATS_002.nii.gz
├── ...
Here is another example of the second dataset of the MSD, which has only one input channel:
nnUNet_raw/Dataset002_Heart/
├── dataset.json
├── imagesTr
│ ├── la_003_0000.nii.gz
│ ├── la_004_0000.nii.gz
│ ├── ...
├── imagesTs
│ ├── la_001_0000.nii.gz
│ ├── la_002_0000.nii.gz
│ ├── ...
└── labelsTr
├── la_003.nii.gz
├── la_004.nii.gz
├── ...
Remember: For each training case, all images must have the same geometry to ensure that their pixel arrays are aligned. Also
make sure that all your data is co-registered!
See also [dataset format inference](dataset_format_inference.md)!!
## dataset.json
The dataset.json contains metadata that nnU-Net needs for training. We have greatly reduced the number of required
fields since version 1!
Here is what the dataset.json should look like at the example of the Dataset005_Prostate from the MSD:
{
"channel_names": { # formerly modalities
"0": "T2",
"1": "ADC"
},
"labels": { # THIS IS DIFFERENT NOW!
"background": 0,
"PZ": 1,
"TZ": 2
},
"numTraining": 32,
"file_ending": ".nii.gz"
"overwrite_image_reader_writer": "SimpleITKIO" # optional! If not provided nnU-Net will automatically determine the ReaderWriter
}
The channel_names determine the normalization used by nnU-Net. If a channel is marked as 'CT', then a global
normalization based on the intensities in the foreground pixels will be used. If it is something else, per-channel
z-scoring will be used. Refer to the methods section in [our paper](https://www.nature.com/articles/s41592-020-01008-z)
for more details. nnU-Net v2 introduces a few more normalization schemes to
choose from and allows you to define your own, see [here](explanation_normalization.md) for more information.
Important changes relative to nnU-Net v1:
- "modality" is now called "channel_names" to remove strong bias to medical images
- labels are structured differently (name -> int instead of int -> name). This was needed to support [region-based training](region_based_training.md)
- "file_ending" is added to support different input file types
- "overwrite_image_reader_writer" optional! Can be used to specify a certain (custom) ReaderWriter class that should
be used with this dataset. If not provided, nnU-Net will automatically determine the ReaderWriter
- "regions_class_order" only used in [region-based training](region_based_training.md)
There is a utility with which you can generate the dataset.json automatically. You can find it
[here](../nnunetv2/dataset_conversion/generate_dataset_json.py).
See our examples in [dataset_conversion](../nnunetv2/dataset_conversion) for how to use it. And read its documentation!
As described above, a json file that contains spacing information is required for TIFF files.
An example for a 3D TIFF stack with units corresponding to 7.6 in x and y, 80 in z is:
```
{
"spacing": [7.6, 7.6, 80.0]
}
```
Within the dataset folder, this file (named `cell6.json` in this example) would be placed in the following folders:
nnUNet_raw/Dataset123_Foo/
├── dataset.json
├── imagesTr
│ ├── cell6.json
│ └── cell6_0000.tif
└── labelsTr
├── cell6.json
└── cell6.tif
## How to use nnU-Net v1 Tasks
If you are migrating from the old nnU-Net, convert your existing datasets with `nnUNetv2_convert_old_nnUNet_dataset`!
Example for migrating a nnU-Net v1 Task:
```bash
nnUNetv2_convert_old_nnUNet_dataset /media/isensee/raw_data/nnUNet_raw_data_base/nnUNet_raw_data/Task027_ACDC Dataset027_ACDC
```
Use `nnUNetv2_convert_old_nnUNet_dataset -h` for detailed usage instructions.
## How to use decathlon datasets
See [convert_msd_dataset.md](convert_msd_dataset.md)
## How to use 2D data with nnU-Net
2D is now natively supported (yay!). See [here](#supported-file-formats) as well as the example dataset in this
[script](../nnunetv2/dataset_conversion/Dataset120_RoadSegmentation.py).
## How to update an existing dataset
When updating a dataset it is best practice to remove the preprocessed data in `nnUNet_preprocessed/DatasetXXX_NAME`
to ensure a fresh start. Then replace the data in `nnUNet_raw` and rerun `nnUNetv2_plan_and_preprocess`. Optionally,
also remove the results from old trainings.
# Example dataset conversion scripts
In the `dataset_conversion` folder (see [here](../nnunetv2/dataset_conversion)) are multiple example scripts for
converting datasets into nnU-Net format. These scripts cannot be run as they are (you need to open them and change
some paths) but they are excellent examples for you to learn how to convert your own datasets into nnU-Net format.
Just pick the dataset that is closest to yours as a starting point.
The list of dataset conversion scripts is continually updated. If you find that some publicly available dataset is
missing, feel free to open a PR to add it!
================================================
FILE: Finetune/nnUNet/documentation/dataset_format_inference.md
================================================
# Data format for Inference
Read the documentation on the overall [data format](dataset_format.md) first!
The data format for inference must match the one used for the raw data (**specifically, the images must be in exactly
the same format as in the imagesTr folder**). As before, the filenames must start with a
unique identifier, followed by a 4-digit modality identifier. Here is an example for two different datasets:
1) Task005_Prostate:
This task has 2 modalities, so the files in the input folder must look like this:
input_folder
├── prostate_03_0000.nii.gz
├── prostate_03_0001.nii.gz
├── prostate_05_0000.nii.gz
├── prostate_05_0001.nii.gz
├── prostate_08_0000.nii.gz
├── prostate_08_0001.nii.gz
├── ...
_0000 has to be the T2 image and _0001 has to be the ADC image (as specified by 'channel_names' in the
dataset.json), exactly the same as was used for training.
2) Task002_Heart:
imagesTs
├── la_001_0000.nii.gz
├── la_002_0000.nii.gz
├── la_006_0000.nii.gz
├── ...
Task002 only has one modality, so each case only has one _0000.nii.gz file.
The segmentations in the output folder will be named {CASE_IDENTIFIER}.nii.gz (omitting the modality identifier).
Remember that the file format used for inference (.nii.gz in this example) must be the same as was used for training
(and as was specified in 'file_ending' in the dataset.json)!
================================================
FILE: Finetune/nnUNet/documentation/explanation_normalization.md
================================================
# Intensity normalization in nnU-Net
The type of intensity normalization applied in nnU-Net can be controlled via the `channel_names` (former `modalities`)
entry in the dataset.json. Just like the old nnU-Net, per-channel z-scoring as well as dataset-wide z-scoring based on
foreground intensities are supported. However, there have been a few additions as well.
Reminder: The `channel_names` entry typically looks like this:
"channel_names": {
"0": "T2",
"1": "ADC"
},
It has as many entries as there are input channels for the given dataset.
To tell you a secret, nnU-Net does not really care what your channels are called. We just use this to determine what normalization
scheme will be used for the given dataset. nnU-Net requires you to specify a normalization strategy for each of your input channels!
If you enter a channel name that is not in the following list, the default (`zscore`) will be used.
Here is a list of currently available normalization schemes:
- `CT`: Perform CT normalization. Specifically, collect intensity values from the foreground classes (all but the
background and ignore) from all training cases, compute the mean, standard deviation as well as the 0.5 and
99.5 percentile of the values. Then clip to the percentiles, followed by subtraction of the mean and division with the
standard deviation. The normalization that is applied is the same for each training case (for this input channel).
The values used by nnU-Net for normalization are stored in the `foreground_intensity_properties_per_channel` entry in the
corresponding plans file. This normalization is suitable for modalities presenting physical quantities such as CT
images and ADC maps.
- `noNorm` : do not perform any normalization at all
- `rescale_to_0_1`: rescale the intensities to [0, 1]
- `rgb_to_0_1`: assumes uint8 inputs. Divides by 255 to rescale uint8 to [0, 1]
- `zscore`/anything else: perform z-scoring (subtract mean and standard deviation) separately for each train case
**Important:** The nnU-Net default is to perform 'CT' normalization for CT images and 'zscore' for everything else! If
you deviate from that path, make sure to benchmark whether that actually improves results!
# How to implement custom normalization strategies?
- Head over to nnunetv2/preprocessing/normalization
- implement a new image normalization class by deriving from ImageNormalization
- register it in nnunetv2/preprocessing/normalization/map_channel_name_to_normalization.py:channel_name_to_normalization_mapping.
This is where you specify a channel name that should be associated with it
- use it by specifying the correct channel_name
Normalization can only be applied to one channel at a time. There is currently no way of implementing a normalization scheme
that gets multiple channels as input to be used jointly!
================================================
FILE: Finetune/nnUNet/documentation/explanation_plans_files.md
================================================
# Modifying the nnU-Net Configurations
nnU-Net provides unprecedented out-of-the-box segmentation performance for essentially any dataset we have evaluated
it on. That said, there is always room for improvements. A fool-proof strategy for squeezing out the last bit of
performance is to start with the default nnU-Net, and then further tune it manually to a concrete dataset at hand.
**This guide is about changes to the nnU-Net configuration you can make via the plans files. It does not cover code
extensions of nnU-Net. For that, take a look [here](extending_nnunet.md)**
In nnU-Net V2, plans files are SO MUCH MORE powerful than they were in v1. There are a lot more knobs that you can
turn without resorting to hacky solutions or even having to touch the nnU-Net code at all! And as an added bonus:
plans files are now also .json files and no longer require users to fiddle with pickle. Just open them in your text
editor of choice!
If overwhelmed, look at our [Examples](#examples)!
# plans.json structure
Plans have global and local settings. Global settings are applied to all configurations in that plans file while
local settings are attached to a specific configuration.
## Global settings
- `foreground_intensity_properties_by_modality`: Intensity statistics of the foreground regions (all labels except
background and ignore label), computed over all training cases. Used by [CT normalization scheme](explanation_normalization.md).
- `image_reader_writer`: Name of the image reader/writer class that should be used with this dataset. You might want
to change this if, for example, you would like to run inference with files that have a different file format. The
class that is named here must be located in nnunetv2.imageio!
- `label_manager`: The name of the class that does label handling. Take a look at
nnunetv2.utilities.label_handling.LabelManager to see what it does. If you decide to change it, place your version
in nnunetv2.utilities.label_handling!
- `transpose_forward`: nnU-Net transposes the input data so that the axes with the highest resolution (lowest spacing)
come last. This is because the 2D U-Net operates on the trailing dimensions (more efficient slicing due to internal
memory layout of arrays). Future work might move this setting to affect only individual configurations.
- transpose_backward is what numpy.transpose gets as new axis ordering.
- `transpose_backward`: the axis ordering that inverts "transpose_forward"
- \[`original_median_shape_after_transp`\]: just here for your information
- \[`original_median_spacing_after_transp`\]: just here for your information
- \[`plans_name`\]: do not change. Used internally
- \[`experiment_planner_used`\]: just here as metadata so that we know what planner originally generated this file
- \[`dataset_name`\]: do not change. This is the dataset these plans are intended for
## Local settings
Plans also have a `configurations` key in which the actual configurations are stored. `configurations` are again a
dictionary, where the keys are the configuration names and the values are the local settings for each configuration.
To better understand the components describing the network topology in our plans files, please read section 6.2
in the [supplementary information](https://static-content.springer.com/esm/art%3A10.1038%2Fs41592-020-01008-z/MediaObjects/41592_2020_1008_MOESM1_ESM.pdf)
(page 13) of our paper!
Local settings:
- `spacing`: the target spacing used in this configuration
- `patch_size`: the patch size used for training this configuration
- `data_identifier`: the preprocessed data for this configuration will be saved in
nnUNet_preprocessed/DATASET_NAME/_data_identifier_. If you add a new configuration, remember to set a unique
data_identifier in order to not create conflicts with other configurations (unless you plan to reuse the data from
another configuration, for example as is done in the cascade)
- `batch_size`: batch size used for training
- `batch_dice`: whether to use batch dice (pretend all samples in the batch are one image, compute dice loss over that)
or not (each sample in the batch is a separate image, compute dice loss for each sample and average over samples)
- `preprocessor_name`: Name of the preprocessor class used for running preprocessing. Class must be located in
nnunetv2.preprocessing.preprocessors
- `use_mask_for_norm`: whether to use the nonzero mask for normalization or not (relevant for BraTS and the like,
probably False for all other datasets). Interacts with ImageNormalization class
- `normalization_schemes`: mapping of channel identifier to ImageNormalization class name. ImageNormalization
classes must be located in nnunetv2.preprocessing.normalization. Also see [here](explanation_normalization.md)
- `resampling_fn_data`: name of resampling function to be used for resizing image data. resampling function must be
callable(data, current_spacing, new_spacing, **kwargs). It must be located in nnunetv2.preprocessing.resampling
- `resampling_fn_data_kwargs`: kwargs for resampling_fn_data
- `resampling_fn_probabilities`: name of resampling function to be used for resizing predicted class probabilities/logits.
resampling function must be `callable(data: Union[np.ndarray, torch.Tensor], current_spacing, new_spacing, **kwargs)`. It must be located in
nnunetv2.preprocessing.resampling
- `resampling_fn_probabilities_kwargs`: kwargs for resampling_fn_probabilities
- `resampling_fn_seg`: name of resampling function to be used for resizing segmentation maps (integer: 0, 1, 2, 3, etc).
resampling function must be callable(data, current_spacing, new_spacing, **kwargs). It must be located in
nnunetv2.preprocessing.resampling
- `resampling_fn_seg_kwargs`: kwargs for resampling_fn_seg
- `UNet_class_name`: UNet class name, can be used to integrate custom dynamic architectures
- `UNet_base_num_features`: The number of starting features for the UNet architecture. Default is 32. Default: Features
are doubled with each downsampling
- `unet_max_num_features`: Maximum number of features (default: capped at 320 for 3D and 512 for 2d). The purpose is to
prevent parameters from exploding too much.
- `conv_kernel_sizes`: the convolutional kernel sizes used by nnU-Net in each stage of the encoder. The decoder
mirrors the encoder and is therefore not explicitly listed here! The list is as long as `n_conv_per_stage_encoder` has
entries
- `n_conv_per_stage_encoder`: number of convolutions used per stage (=at a feature map resolution in the encoder) in the encoder.
Default is 2. The list has as many entries as the encoder has stages
- `n_conv_per_stage_decoder`: number of convolutions used per stage in the decoder. Also see `n_conv_per_stage_encoder`
- `num_pool_per_axis`: number of times each of the spatial axes is pooled in the network. Needed to know how to pad
image sizes during inference (num_pool = 5 means input must be divisible by 2**5=32)
- `pool_op_kernel_sizes`: the pooling kernel sizes (and at the same time strides) for each stage of the encoder
- \[`median_image_size_in_voxels`\]: the median size of the images of the training set at the current target spacing.
Do not modify this as this is not used. It is just here for your information.
Special local settings:
- `inherits_from`: configurations can inherit from each other. This makes it easy to add new configurations that only
differ in a few local settings from another. If using this, remember to set a new `data_identifier` (if needed)!
- `previous_stage`: if this configuration is part of a cascade, we need to know what the previous stage (for example
the low resolution configuration) was. This needs to be specified here.
- `next_stage`: if this configuration is part of a cascade, we need to know what possible subsequent stages are! This
is because we need to export predictions in the correct spacing when running the validation. `next_stage` can either
be a string or a list of strings
# Examples
## Increasing the batch size for large datasets
If your dataset is large the training can benefit from larger batch_sizes. To do this, simply create a new
configuration in the `configurations` dict
"configurations": {
"3d_fullres_bs40": {
"inherits_from": "3d_fullres",
"batch_size": 40
}
}
No need to change the data_identifier. `3d_fullres_bs40` will just use the preprocessed data from `3d_fullres`.
No need to rerun `nnUNetv2_preprocess` because we can use already existing data (if available) from `3d_fullres`.
## Using custom preprocessors
If you would like to use a different preprocessor class then this can be specified as follows:
"configurations": {
"3d_fullres_my_preprocesor": {
"inherits_from": "3d_fullres",
"preprocessor_name": MY_PREPROCESSOR,
"data_identifier": "3d_fullres_my_preprocesor"
}
}
You need to run preprocessing for this new configuration:
`nnUNetv2_preprocess -d DATASET_ID -c 3d_fullres_my_preprocesor` because it changes the preprocessing. Remember to
set a unique `data_identifier` whenever you make modifications to the preprocessed data!
## Change target spacing
"configurations": {
"3d_fullres_my_spacing": {
"inherits_from": "3d_fullres",
"spacing": [X, Y, Z],
"data_identifier": "3d_fullres_my_spacing"
}
}
You need to run preprocessing for this new configuration:
`nnUNetv2_preprocess -d DATASET_ID -c 3d_fullres_my_spacing` because it changes the preprocessing. Remember to
set a unique `data_identifier` whenever you make modifications to the preprocessed data!
## Adding a cascade to a dataset where it does not exist
Hippocampus is small. It doesn't have a cascade. It also doesn't really make sense to add a cascade here but hey for
the sake of demonstration we can do that.
We change the following things here:
- `spacing`: The lowres stage should operate at a lower resolution
- we modify the `median_image_size_in_voxels` entry as a guide for what original image sizes we deal with
- we set some patch size that is inspired by `median_image_size_in_voxels`
- we need to remember that the patch size must be divisible by 2**num_pool in each axis!
- network parameters such as kernel sizes, pooling operations are changed accordingly
- we need to specify the name of the next stage
- we need to add the highres stage
This is how this would look like (comparisons with 3d_fullres given as reference):
"configurations": {
"3d_lowres": {
"inherits_from": "3d_fullres",
"data_identifier": "3d_lowres"
"spacing": [2.0, 2.0, 2.0], # from [1.0, 1.0, 1.0] in 3d_fullres
"median_image_size_in_voxels": [18, 25, 18], # from [36, 50, 35]
"patch_size": [20, 28, 20], # from [40, 56, 40]
"n_conv_per_stage_encoder": [2, 2, 2], # one less entry than 3d_fullres ([2, 2, 2, 2])
"n_conv_per_stage_decoder": [2, 2], # one less entry than 3d_fullres
"num_pool_per_axis": [2, 2, 2], # one less pooling than 3d_fullres in each dimension (3d_fullres: [3, 3, 3])
"pool_op_kernel_sizes": [[1, 1, 1], [2, 2, 2], [2, 2, 2]], # one less [2, 2, 2]
"conv_kernel_sizes": [[3, 3, 3], [3, 3, 3], [3, 3, 3]], # one less [3, 3, 3]
"next_stage": "3d_cascade_fullres" # name of the next stage in the cascade
},
"3d_cascade_fullres": { # does not need a data_identifier because we can use the data of 3d_fullres
"inherits_from": "3d_fullres",
"previous_stage": "3d_lowres" # name of the previous stage
}
}
To better understand the components describing the network topology in our plans files, please read section 6.2
in the [supplementary information](https://static-content.springer.com/esm/art%3A10.1038%2Fs41592-020-01008-z/MediaObjects/41592_2020_1008_MOESM1_ESM.pdf)
(page 13) of our paper!
================================================
FILE: Finetune/nnUNet/documentation/extending_nnunet.md
================================================
# Extending nnU-Net
We hope that the new structure of nnU-Net v2 makes it much more intuitive on how to modify it! We cannot give an
extensive tutorial on how each and every bit of it can be modified. It is better for you to search for the position
in the repository where the thing you intend to change is implemented and start working your way through the code from
there. Setting breakpoints and debugging into nnU-Net really helps in understanding it and thus will help you make the
necessary modifications!
Here are some things you might want to read before you start:
- Editing nnU-Net configurations through plans files is really powerful now and allows you to change a lot of things regarding
preprocessing, resampling, network topology etc. Read [this](explanation_plans_files.md)!
- [Image normalization](explanation_normalization.md) and [i/o formats](dataset_format.md#supported-file-formats) are easy to extend!
- Manual data splits can be defined as described [here](manual_data_splits.md)
- You can chain arbitrary configurations together into cascades, see [this again](explanation_plans_files.md)
- Read about our support for [region-based training](region_based_training.md)
- If you intend to modify the training procedure (loss, sampling, data augmentation, lr scheduler, etc) then you need
to implement your own trainer class. Best practice is to create a class that inherits from nnUNetTrainer and
implements the necessary changes. Head over to our [trainer classes folder](../nnunetv2/training/nnUNetTrainer) for
inspiration! There will be similar trainers for what you intend to change and you can take them as a guide. nnUNetTrainer
are structured similarly to PyTorch lightning trainers, this should also make things easier!
- Integrating new network architectures can be done in two ways:
- Quick and dirty: implement a new nnUNetTrainer class and overwrite its `build_network_architecture` function.
Make sure your architecture is compatible with deep supervision (if not, use `nnUNetTrainerNoDeepSupervision`
as basis!) and that it can handle the patch sizes that are thrown at it! Your architecture should NOT apply any
nonlinearities at the end (softmax, sigmoid etc). nnU-Net does that!
- The 'proper' (but difficult) way: Build a dynamically configurable architecture such as the `PlainConvUNet` class
used by default. It needs to have some sort of GPU memory estimation method that can be used to evaluate whether
certain patch sizes and
topologies fit into a specified GPU memory target. Build a new `ExperimentPlanner` that can configure your new
class and communicate with its memory budget estimation. Run `nnUNetv2_plan_and_preprocess` while specifying your
custom `ExperimentPlanner` and a custom `plans_name`. Implement a nnUNetTrainer that can use the plans generated by
your `ExperimentPlanner` to instantiate the network architecture. Specify your plans and trainer when running `nnUNetv2_train`.
It always pays off to first read and understand the corresponding nnU-Net code and use it as a template for your implementation!
- Remember that multi-GPU training, region-based training, ignore label and cascaded training are now simply integrated
into one unified nnUNetTrainer class. No separate classes needed (remember that when implementing your own trainer
classes and ensure support for all of these features! Or raise `NotImplementedError`)
[//]: # (- Read about our support for [ignore label](ignore_label.md) and [region-based training](region_based_training.md))
================================================
FILE: Finetune/nnUNet/documentation/how_to_use_nnunet.md
================================================
## How to run nnU-Net on a new dataset
Given some dataset, nnU-Net fully automatically configures an entire segmentation pipeline that matches its properties.
nnU-Net covers the entire pipeline, from preprocessing to model configuration, model training, postprocessing
all the way to ensembling. After running nnU-Net, the trained model(s) can be applied to the test cases for inference.
### Dataset Format
nnU-Net expects datasets in a structured format. This format is inspired by the data structure of
the [Medical Segmentation Decthlon](http://medicaldecathlon.com/). Please read
[this](dataset_format.md) for information on how to set up datasets to be compatible with nnU-Net.
**Since version 2 we support multiple image file formats (.nii.gz, .png, .tif, ...)! Read the dataset_format
documentation to learn more!**
**Datasets from nnU-Net v1 can be converted to V2 by running `nnUNetv2_convert_old_nnUNet_dataset INPUT_FOLDER
OUTPUT_DATASET_NAME`.** Remember that v2 calls datasets DatasetXXX_Name (not Task) where XXX is a 3-digit number.
Please provide the **path** to the old task, not just the Task name. nnU-Net V2 doesn't know where v1 tasks were!
### Experiment planning and preprocessing
Given a new dataset, nnU-Net will extract a dataset fingerprint (a set of dataset-specific properties such as
image sizes, voxel spacings, intensity information etc). This information is used to design three U-Net configurations.
Each of these pipelines operates on its own preprocessed version of the dataset.
The easiest way to run fingerprint extraction, experiment planning and preprocessing is to use:
```bash
nnUNetv2_plan_and_preprocess -d DATASET_ID --verify_dataset_integrity
```
Where `DATASET_ID` is the dataset id (duh). We recommend `--verify_dataset_integrity` whenever it's the first time
you run this command. This will check for some of the most common error sources!
You can also process several datasets at once by giving `-d 1 2 3 [...]`. If you already know what U-Net configuration
you need you can also specify that with `-c 3d_fullres` (make sure to adapt -np in this case!). For more information
about all the options available to you please run `nnUNetv2_plan_and_preprocess -h`.
nnUNetv2_plan_and_preprocess will create a new subfolder in your nnUNet_preprocessed folder named after the dataset.
Once the command is completed there will be a dataset_fingerprint.json file as well as a nnUNetPlans.json file for you to look at
(in case you are interested!). There will also be subfolders containing the preprocessed data for your UNet configurations.
[Optional]
If you prefer to keep things separate, you can also use `nnUNetv2_extract_fingerprint`, `nnUNetv2_plan_experiment`
and `nnUNetv2_preprocess` (in that order).
### Model training
#### Overview
You pick which configurations (2d, 3d_fullres, 3d_lowres, 3d_cascade_fullres) should be trained! If you have no idea
what performs best on your data, just run all of them and let nnU-Net identify the best one. It's up to you!
nnU-Net trains all configurations in a 5-fold cross-validation over the training cases. This is 1) needed so that
nnU-Net can estimate the performance of each configuration and tell you which one should be used for your
segmentation problem and 2) a natural way of obtaining a good model ensemble (average the output of these 5 models
for prediction) to boost performance.
You can influence the splits nnU-Net uses for 5-fold cross-validation (see [here](manual_data_splits.md)). If you
prefer to train a single model on all training cases, this is also possible (see below).
**Note that not all U-Net configurations are created for all datasets. In datasets with small image sizes, the U-Net
cascade (and with it the 3d_lowres configuration) is omitted because the patch size of the full resolution U-Net
already covers a large part of the input images.**
Training models is done with the `nnUNetv2_train` command. The general structure of the command is:
```bash
nnUNetv2_train DATASET_NAME_OR_ID UNET_CONFIGURATION FOLD [additional options, see -h]
```
UNET_CONFIGURATION is a string that identifies the requested U-Net configuration (defaults: 2d, 3d_fullres, 3d_lowres,
3d_cascade_lowres). DATASET_NAME_OR_ID specifies what dataset should be trained on and FOLD specifies which fold of
the 5-fold-cross-validation is trained.
nnU-Net stores a checkpoint every 50 epochs. If you need to continue a previous training, just add a `--c` to the
training command.
IMPORTANT: If you plan to use `nnUNetv2_find_best_configuration` (see below) add the `--npz` flag. This makes
nnU-Net save the softmax outputs during the final validation. They are needed for that. Exported softmax
predictions are very large and therefore can take up a lot of disk space, which is why this is not enabled by default.
If you ran initially without the `--npz` flag but now require the softmax predictions, simply rerun the validation with:
```bash
nnUNetv2_train DATASET_NAME_OR_ID UNET_CONFIGURATION FOLD --val --npz
```
You can specify the device nnU-net should use by using `-device DEVICE`. DEVICE can only be cpu, cuda or mps. If
you have multiple GPUs, please select the gpu id using `CUDA_VISIBLE_DEVICES=X nnUNetv2_train [...]` (requires device to be cuda).
See `nnUNetv2_train -h` for additional options.
### 2D U-Net
For FOLD in [0, 1, 2, 3, 4], run:
```bash
nnUNetv2_train DATASET_NAME_OR_ID 2d FOLD [--npz]
```
### 3D full resolution U-Net
For FOLD in [0, 1, 2, 3, 4], run:
```bash
nnUNetv2_train DATASET_NAME_OR_ID 3d_fullres FOLD [--npz]
```
### 3D U-Net cascade
#### 3D low resolution U-Net
For FOLD in [0, 1, 2, 3, 4], run:
```bash
nnUNetv2_train DATASET_NAME_OR_ID 3d_lowres FOLD [--npz]
```
#### 3D full resolution U-Net
For FOLD in [0, 1, 2, 3, 4], run:
```bash
nnUNetv2_train DATASET_NAME_OR_ID 3d_cascade_fullres FOLD [--npz]
```
**Note that the 3D full resolution U-Net of the cascade requires the five folds of the low resolution U-Net to be
completed!**
The trained models will be written to the nnUNet_results folder. Each training obtains an automatically generated
output folder name:
nnUNet_results/DatasetXXX_MYNAME/TRAINER_CLASS_NAME__PLANS_NAME__CONFIGURATION/FOLD
For Dataset002_Heart (from the MSD), for example, this looks like this:
nnUNet_results/
├── Dataset002_Heart
│── nnUNetTrainer__nnUNetPlans__2d
│ ├── fold_0
│ ├── fold_1
│ ├── fold_2
│ ├── fold_3
│ ├── fold_4
│ ├── dataset.json
│ ├── dataset_fingerprint.json
│ └── plans.json
└── nnUNetTrainer__nnUNetPlans__3d_fullres
├── fold_0
├── fold_1
├── fold_2
├── fold_3
├── fold_4
├── dataset.json
├── dataset_fingerprint.json
└── plans.json
Note that 3d_lowres and 3d_cascade_fullres do not exist here because this dataset did not trigger the cascade. In each
model training output folder (each of the fold_x folder), the following files will be created:
- debug.json: Contains a summary of blueprint and inferred parameters used for training this model as well as a
bunch of additional stuff. Not easy to read, but very useful for debugging ;-)
- checkpoint_best.pth: checkpoint files of the best model identified during training. Not used right now unless you
explicitly tell nnU-Net to use it.
- checkpoint_final.pth: checkpoint file of the final model (after training has ended). This is what is used for both
validation and inference.
- network_architecture.pdf (only if hiddenlayer is installed!): a pdf document with a figure of the network architecture in it.
- progress.png: Shows losses, pseudo dice, learning rate and epoch times ofer the course of the training. At the top is
a plot of the training (blue) and validation (red) loss during training. Also shows an approximation of
the dice (green) as well as a moving average of it (dotted green line). This approximation is the average Dice score
of the foreground classes. **It needs to be taken with a big (!)
grain of salt** because it is computed on randomly drawn patches from the validation
data at the end of each epoch, and the aggregation of TP, FP and FN for the Dice computation treats the patches as if
they all originate from the same volume ('global Dice'; we do not compute a Dice for each validation case and then
average over all cases but pretend that there is only one validation case from which we sample patches). The reason for
this is that the 'global Dice' is easy to compute during training and is still quite useful to evaluate whether a model
is training at all or not. A proper validation takes way too long to be done each epoch. It is run at the end of the training.
- validation_raw: in this folder are the predicted validation cases after the training has finished. The summary.json file in here
contains the validation metrics (a mean over all cases is provided at the start of the file). If `--npz` was set then
the compressed softmax outputs (saved as .npz files) are in here as well.
During training it is often useful to watch the progress. We therefore recommend that you have a look at the generated
progress.png when running the first training. It will be updated after each epoch.
Training times largely depend on the GPU. The smallest GPU we recommend for training is the Nvidia RTX 2080ti. With
that all network trainings take less than 2 days. Refer to our [benchmarks](benchmarking.md) to see if your system is
performing as expected.
### Using multiple GPUs for training
If multiple GPUs are at your disposal, the best way of using them is to train multiple nnU-Net trainings at once, one
on each GPU. This is because data parallelism never scales perfectly linearly, especially not with small networks such
as the ones used by nnU-Net.
Example:
```bash
CUDA_VISIBLE_DEVICES=0 nnUNetv2_train DATASET_NAME_OR_ID 2d 0 [--npz] & # train on GPU 0
CUDA_VISIBLE_DEVICES=1 nnUNetv2_train DATASET_NAME_OR_ID 2d 1 [--npz] & # train on GPU 1
CUDA_VISIBLE_DEVICES=2 nnUNetv2_train DATASET_NAME_OR_ID 2d 2 [--npz] & # train on GPU 2
CUDA_VISIBLE_DEVICES=3 nnUNetv2_train DATASET_NAME_OR_ID 2d 3 [--npz] & # train on GPU 3
CUDA_VISIBLE_DEVICES=4 nnUNetv2_train DATASET_NAME_OR_ID 2d 4 [--npz] & # train on GPU 4
...
wait
```
**Important: The first time a training is run nnU-Net will extract the preprocessed data into uncompressed numpy
arrays for speed reasons! This operation must be completed before starting more than one training of the same
configuration! Wait with starting subsequent folds until the first training is using the GPU! Depending on the
dataset size and your System this should only take a couple of minutes at most.**
If you insist on running DDP multi-GPU training, we got you covered:
`nnUNetv2_train DATASET_NAME_OR_ID 2d 0 [--npz] -num_gpus X`
Again, note that this will be slower than running separate training on separate GPUs. DDP only makes sense if you have
manually interfered with the nnU-Net configuration and are training larger models with larger patch and/or batch sizes!
Important when using `-num_gpus`:
1) If you train using, say, 2 GPUs but have more GPUs in the system you need to specify which GPUs should be used via
CUDA_VISIBLE_DEVICES=0,1 (or whatever your ids are).
2) You cannot specify more GPUs than you have samples in your minibatches. If the batch size is 2, 2 GPUs is the maximum!
3) Make sure your batch size is divisible by the numbers of GPUs you use or you will not make good use of your hardware.
In contrast to the old nnU-Net, DDP is now completely hassle free. Enjoy!
### Automatically determine the best configuration
Once the desired configurations were trained (full cross-validation) you can tell nnU-Net to automatically identify
the best combination for you:
```commandline
nnUNetv2_find_best_configuration DATASET_NAME_OR_ID -c CONFIGURATIONS
```
`CONFIGURATIONS` hereby is the list of configurations you would like to explore. Per default, ensembling is enabled
meaning that nnU-Net will generate all possible combinations of ensembles (2 configurations per ensemble). This requires
the .npz files containing the predicted probabilities of the validation set to be present (use `nnUNetv2_train` with
`--npz` flag, see above). You can disable ensembling by setting the `--disable_ensembling` flag.
See `nnUNetv2_find_best_configuration -h` for more options.
nnUNetv2_find_best_configuration will also automatically determine the postprocessing that should be used.
Postprocessing in nnU-Net only considers the removal of all but the largest component in the prediction (once for
foreground vs background and once for each label/region).
Once completed, the command will print to your console exactly what commands you need to run to make predictions. It
will also create two files in the `nnUNet_results/DATASET_NAME` folder for you to inspect:
- `inference_instructions.txt` again contains the exact commands you need to use for predictions
- `inference_information.json` can be inspected to see the performance of all configurations and ensembles, as well
as the effect of the postprocessing plus some debug information.
### Run inference
Remember that the data located in the input folder must have the file endings as the dataset you trained the model on
and must adhere to the nnU-Net naming scheme for image files (see [dataset format](dataset_format.md) and
[inference data format](dataset_format_inference.md)!)
`nnUNetv2_find_best_configuration` (see above) will print a string to the terminal with the inference commands you need to use.
The easiest way to run inference is to simply use these commands.
If you wish to manually specify the configuration(s) used for inference, use the following commands:
#### Run prediction
For each of the desired configurations, run:
```
nnUNetv2_predict -i INPUT_FOLDER -o OUTPUT_FOLDER -d DATASET_NAME_OR_ID -c CONFIGURATION --save_probabilities
```
Only specify `--save_probabilities` if you intend to use ensembling. `--save_probabilities` will make the command save the predicted
probabilities alongside of the predicted segmentation masks requiring a lot of disk space.
Please select a separate `OUTPUT_FOLDER` for each configuration!
Note that per default, inference will be done with all 5 folds from the cross-validation as an ensemble. We very
strongly recommend you use all 5 folds. Thus, all 5 folds must have been trained prior to running inference.
If you wish to make predictions with a single model, train the `all` fold and specify it in `nnUNetv2_predict`
with `-f all`
#### Ensembling multiple configurations
If you wish to ensemble multiple predictions (typically form different configurations), you can do so with the following command:
```bash
nnUNetv2_ensemble -i FOLDER1 FOLDER2 ... -o OUTPUT_FOLDER -np NUM_PROCESSES
```
You can specify an arbitrary number of folders, but remember that each folder needs to contain npz files that were
generated by `nnUNetv2_predict`. Again, `nnUNetv2_ensemble -h` will tell you more about additional options.
#### Apply postprocessing
Finally, apply the previously determined postprocessing to the (ensembled) predictions:
```commandline
nnUNetv2_apply_postprocessing -i FOLDER_WITH_PREDICTIONS -o OUTPUT_FOLDER --pp_pkl_file POSTPROCESSING_FILE -plans_json PLANS_FILE -dataset_json DATASET_JSON_FILE
```
`nnUNetv2_find_best_configuration` (or its generated `inference_instructions.txt` file) will tell you where to find
the postprocessing file. If not you can just look for it in your results folder (it's creatively named
`postprocessing.pkl`). If your source folder is from an ensemble, you also need to specify a `-plans_json` file and
a `-dataset_json` file that should be used (for single configuration predictions these are automatically copied
from the respective training). You can pick these files from any of the ensemble members.
## How to run inference with pretrained models
See [here](run_inference_with_pretrained_models.md)
[//]: # (## Examples)
[//]: # ()
[//]: # (To get you started we compiled two simple to follow examples:)
[//]: # (- run a training with the 3d full resolution U-Net on the Hippocampus dataset. See [here](documentation/training_example_Hippocampus.md).)
[//]: # (- run inference with nnU-Net's pretrained models on the Prostate dataset. See [here](documentation/inference_example_Prostate.md).)
[//]: # ()
[//]: # (Usability not good enough? Let us know!)
================================================
FILE: Finetune/nnUNet/documentation/installation_instructions.md
================================================
# System requirements
## Operating System
nnU-Net has been tested on Linux (Ubuntu 18.04, 20.04, 22.04; centOS, RHEL), Windows and MacOS! It should work out of the box!
## Hardware requirements
We support GPU (recommended), CPU and Apple M1/M2 as devices (currently Apple mps does not implement 3D
convolutions, so you might have to use the CPU on those devices).
### Hardware requirements for Training
We recommend you use a GPU for training as this will take a really long time on CPU or MPS (Apple M1/M2).
For training a GPU with at least 10 GB (popular non-datacenter options are the RTX 2080ti, RTX 3080/3090 or RTX 4080/4090) is
required. We also recommend a strong CPU to go along with the GPU. 6 cores (12 threads)
are the bare minimum! CPU requirements are mostly related to data augmentation and scale with the number of
input channels and target structures. Plus, the faster the GPU, the better the CPU should be!
### Hardware Requirements for inference
Again we recommend a GPU to make predictions as this will be substantially faster than the other options. However,
inference times are typically still manageable on CPU and MPS (Apple M1/M2). If using a GPU, it should have at least
4 GB of available (unused) VRAM.
### Example hardware configurations
Example workstation configurations for training:
- CPU: Ryzen 5800X - 5900X or 7900X would be even better! We have not yet tested Intel Alder/Raptor lake but they will likely work as well.
- GPU: RTX 3090 or RTX 4090
- RAM: 64GB
- Storage: SSD (M.2 PCIe Gen 3 or better!)
Example Server configuration for training:
- CPU: 2x AMD EPYC7763 for a total of 128C/256T. 16C/GPU are highly recommended for fast GPUs such as the A100!
- GPU: 8xA100 PCIe (price/performance superior to SXM variant + they use less power)
- RAM: 1 TB
- Storage: local SSD storage (PCIe Gen 3 or better) or ultra fast network storage
(nnU-net by default uses one GPU per training. The server configuration can run up to 8 model trainings simultaneously)
### Setting the correct number of Workers for data augmentation (training only)
Note that you will need to manually set the number of processes nnU-Net uses for data augmentation according to your
CPU/GPU ratio. For the server above (256 threads for 8 GPUs), a good value would be 24-30. You can do this by
setting the `nnUNet_n_proc_DA` environment variable (`export nnUNet_n_proc_DA=XX`).
Recommended values (assuming a recent CPU with good IPC) are 10-12 for RTX 2080 ti, 12 for a RTX 3090, 16-18 for
RTX 4090, 28-32 for A100. Optimal values may vary depending on the number of input channels/modalities and number of classes.
# Installation instructions
We strongly recommend that you install nnU-Net in a virtual environment! Pip or anaconda are both fine. If you choose to
compile PyTorch from source (see below), you will need to use conda instead of pip.
Use a recent version of Python! 3.9 or newer is guaranteed to work!
**nnU-Net v2 can coexist with nnU-Net v1! Both can be installed at the same time.**
1) Install [PyTorch](https://pytorch.org/get-started/locally/) as described on their website (conda/pip). Please
install the latest version with support for your hardware (cuda, mps, cpu).
**DO NOT JUST `pip install nnunetv2` WITHOUT PROPERLY INSTALLING PYTORCH FIRST**. For maximum speed, consider
[compiling pytorch yourself](https://github.com/pytorch/pytorch#from-source) (experienced users only!).
2) Install nnU-Net depending on your use case:
1) For use as **standardized baseline**, **out-of-the-box segmentation algorithm** or for running
**inference with pretrained models**:
```pip install nnunetv2```
2) For use as integrative **framework** (this will create a copy of the nnU-Net code on your computer so that you
can modify it as needed):
```bash
git clone https://github.com/MIC-DKFZ/nnUNet.git
cd nnUNet
pip install -e .
```
3) nnU-Net needs to know where you intend to save raw data, preprocessed data and trained models. For this you need to
set a few environment variables. Please follow the instructions [here](setting_up_paths.md).
4) (OPTIONAL) Install [hiddenlayer](https://github.com/waleedka/hiddenlayer). hiddenlayer enables nnU-net to generate
plots of the network topologies it generates (see [Model training](how_to_use_nnunet.md#model-training)).
To install hiddenlayer,
run the following command:
```bash
pip install --upgrade git+https://github.com/FabianIsensee/hiddenlayer.git
```
Installing nnU-Net will add several new commands to your terminal. These commands are used to run the entire nnU-Net
pipeline. You can execute them from any location on your system. All nnU-Net commands have the prefix `nnUNetv2_` for
easy identification.
Note that these commands simply execute python scripts. If you installed nnU-Net in a virtual environment, this
environment must be activated when executing the commands. You can see what scripts/functions are executed by
checking the project.scripts in the [pyproject.toml](../pyproject.toml) file.
All nnU-Net commands have a `-h` option which gives information on how to use them.
================================================
FILE: Finetune/nnUNet/documentation/manual_data_splits.md
================================================
# How to generate custom splits in nnU-Net
Sometimes, the default 5-fold cross-validation split by nnU-Net does not fit a project. Maybe you want to run 3-fold
cross-validation instead? Or maybe your training cases cannot be split randomly and require careful stratification.
Fear not, for nnU-Net has got you covered (it really can do anything <3).
The splits nnU-Net uses are generated in the `do_split` function of nnUNetTrainer. This function will first look for
existing splits, stored as a file, and if no split exists it will create one. So if you wish to influence the split,
manually creating a split file that will then be recognized and used is the way to go!
The split file is located in the `nnUNet_preprocessed/DATASETXXX_NAME` folder. So it is best practice to first
populate this folder by running `nnUNetv2_plan_and_preproccess`.
Splits are stored as a .json file. They are a simple python list. The length of that list is the number of splits it
contains (so it's 5 in the default nnU-Net). Each list entry is a dictionary with keys 'train' and 'val'. Values are
again simply lists with the train identifiers in each set. To illustrate this, I am just messing with the Dataset002
file as an example:
```commandline
In [1]: from batchgenerators.utilities.file_and_folder_operations import load_json
In [2]: splits = load_json('splits_final.json')
In [3]: len(splits)
Out[3]: 5
In [4]: splits[0].keys()
Out[4]: dict_keys(['train', 'val'])
In [5]: len(splits[0]['train'])
Out[5]: 16
In [6]: len(splits[0]['val'])
Out[6]: 4
In [7]: print(splits[0])
{'train': ['la_003', 'la_004', 'la_005', 'la_009', 'la_010', 'la_011', 'la_014', 'la_017', 'la_018', 'la_019', 'la_020', 'la_022', 'la_023', 'la_026', 'la_029', 'la_030'],
'val': ['la_007', 'la_016', 'la_021', 'la_024']}
```
If you are still not sure what splits are supposed to look like, simply download some reference dataset from the
[Medical Decathlon](http://medicaldecathlon.com/), start some training (to generate the splits) and manually inspect
the .json file with your text editor of choice!
In order to generate your custom splits, all you need to do is reproduce the data structure explained above and save it as
`splits_final.json` in the `nnUNet_preprocessed/DATASETXXX_NAME` folder. Then use `nnUNetv2_train` etc. as usual.
================================================
FILE: Finetune/nnUNet/documentation/pretraining_and_finetuning.md
================================================
# Pretraining with nnU-Net
## Intro
So far nnU-Net only supports supervised pre-training, meaning that you train a regular nnU-Net on some source dataset
and then use the final network weights as initialization for your target dataset.
As a reminder, many training hyperparameters such as patch size and network topology differ between datasets as a
result of the automated dataset analysis and experiment planning nnU-Net is known for. So, out of the box, it is not
possible to simply take the network weights from some dataset and then reuse them for another.
Consequently, the plans need to be aligned between the two tasks. In this README we show how this can be achieved and
how the resulting weights can then be used for initialization.
### Terminology
Throughout this README we use the following terminology:
- `source dataset` is the dataset you intend to run the pretraining on
- `target dataset` is the dataset you are interested in; the one you wish to fine tune on
## Pretraining on the source dataset
In order to obtain matching network topologies we need to transfer the plans from one dataset to another. Since we are
only interested in the target dataset, we first need to run experiment planning (and preprocessing) for it:
```bash
nnUNetv2_plan_and_preprocess -d TARGET_DATASET
```
Then we need to extract the dataset fingerprint of the source dataset, if not yet available:
```bash
nnUNetv2_extract_fingerprint -d SOURCE_DATASET
```
Now we can take the plans from the target dataset and transfer it to the source:
```bash
nnUNetv2_move_plans_between_datasets -s TARGET_DATASET -t SOURCE_DATASET -sp TARGET_PLANS_IDENTIFIER -tp SOURCE_PLANS_IDENTIFIER
```
`SOURCE_PLANS_IDENTIFIER` is hereby probably nnUNetPlans unless you changed the experiment planner in
nnUNetv2_plan_and_preprocess. For `TARGET_PLANS_IDENTIFIER` we recommend you set something custom in order to not
overwrite default plans.
Note that EVERYTHING is transferred between the datasets. Not just the network topology, batch size and patch size but
also the normalization scheme! Therefore, a transfer between datasets that use different normalization schemes may not
work well (but it could, depending on the schemes!).
Note on CT normalization: Yes, also the clip values, mean and std are transferred!
Now you can run the preprocessing on the source task:
```bash
nnUNetv2_preprocess -d SOURCE_DATSET -plans_name TARGET_PLANS_IDENTIFIER
```
And run the training as usual:
```bash
nnUNetv2_train SOURCE_DATSET CONFIG all -p TARGET_PLANS_IDENTIFIER
```
Note how we use the 'all' fold to train on all available data. For pretraining it does not make sense to split the data.
## Using pretrained weights
Once pretraining is completed (or you obtain compatible weights by other means) you can use them to initialize your model:
```bash
nnUNetv2_train TARGET_DATASET CONFIG FOLD -pretrained_weights PATH_TO_CHECKPOINT
```
Specify the checkpoint in PATH_TO_CHECKPOINT.
When loading pretrained weights, all layers except the segmentation layers will be used!
So far there are no specific nnUNet trainers for fine tuning, so the current recommendation is to just use
nnUNetTrainer. You can however easily write your own trainers with learning rate ramp up, fine-tuning of segmentation
heads or shorter training time.
================================================
FILE: Finetune/nnUNet/documentation/region_based_training.md
================================================
# Region-based training
## What is this about?
In some segmentation tasks, most prominently the
[Brain Tumor Segmentation Challenge](http://braintumorsegmentation.org/), the target areas (based on which the metric
will be computed) are different from the labels provided in the training data. This is the case because for some
clinical applications, it is more relevant to detect the whole tumor, tumor core and enhancing tumor instead of the
individual labels (edema, necrosis and non-enhancing tumor, enhancing tumor).
The figure shows an example BraTS case along with label-based representation of the task (top) and region-based
representation (bottom). The challenge evaluation is done on the regions. As we have shown in our
[BraTS 2018 contribution](https://arxiv.org/abs/1809.10483), directly optimizing those
overlapping areas over the individual labels yields better scoring models!
## What can nnU-Net do?
nnU-Net's region-based training allows you to learn areas that are constructed by merging individual labels. For
some segmentation tasks this provides a benefit, as this shifts the importance allocated to different labels during training.
Most prominently, this feature can be used to represent **hierarchical classes**, for example when organs +
substructures are to be segmented. Imagine a liver segmentation problem, where vessels and tumors are also to be
segmented. The first target region could thus be the entire liver (including the substructures), while the remaining
targets are the individual substructues.
Important: nnU-Net still requires integer label maps as input and will produce integer label maps as output!
Region-based training can be used to learn overlapping labels, but there must be a way to model these overlaps
for nnU-Net to work (see below how this is done).
## How do you use it?
When declaring the labels in the `dataset.json` file, BraTS would typically look like this:
```python
...
"labels": {
"background": 0,
"edema": 1,
"non_enhancing_and_necrosis": 2,
"enhancing_tumor": 3
},
...
```
(we use different int values than the challenge because nnU-Net needs consecutive integers!)
This representation corresponds to the upper row in the figure above.
For region-based training, the labels need to be changed to the following:
```python
...
"labels": {
"background": 0,
"whole_tumor": [1, 2, 3],
"tumor_core": [2, 3],
"enhancing_tumor": 3 # or [3]
},
"regions_class_order": [1, 2, 3],
...
```
This corresponds to the bottom row in the figure above. Note how an additional entry in the dataset.json is
required: `regions_class_order`. This tells nnU-Net how to convert the region representations back to an integer map.
It essentially just tells nnU-Net what labels to place for which region in what order. The length of the
list here needs to be the same as the number of regions (excl background). Each element in the list corresponds
to the label that is placed instead of the region into the final segmentation. Later entries will overwrite earlier ones!
Concretely, for the example given here, nnU-Net
will firstly place the label 1 (edema) where the 'whole_tumor' region was predicted, then place the label 2
(non-enhancing tumor and necrosis) where the "tumor_core" was predicted and finally place the label 3 in the
predicted 'enhancing_tumor' area. With each step, part of the previously set pixels
will be overwritten with the new label! So when setting your `regions_class_order`, place encompassing regions
(like whole tumor etc) first, followed by substructures.
**IMPORTANT** Because the conversion back to a segmentation map is sensitive to the order in which the regions are
declared ("place label X in the first region") you need to make sure that this order is not perturbed! When
automatically generating the dataset.json, make sure the dictionary keys do not get sorted alphabetically! Set
`sort_keys=False` in `json.dump()`!!!
nnU-Net will perform the evaluation + model selection also on the regions, not the individual labels!
That's all. Easy, huh?
================================================
FILE: Finetune/nnUNet/documentation/run_inference_with_pretrained_models.md
================================================
# How to run inference with pretrained models
**Important:** Pretrained weights from nnU-Net v1 are NOT compatible with V2. You will need to retrain with the new
version. But honestly, you already have a fully trained model with which you can run inference (in v1), so
just continue using that!
Not yet available for V2 :-(
If you wish to run inference with pretrained models, check out the old nnU-Net for now. We are working on this full steam!
================================================
FILE: Finetune/nnUNet/documentation/set_environment_variables.md
================================================
# How to set environment variables
nnU-Net requires some environment variables so that it always knows where the raw data, preprocessed data and trained
models are. Depending on the operating system, these environment variables need to be set in different ways.
Variables can either be set permanently (recommended!) or you can decide to set them every time you call nnU-Net.
# Linux & MacOS
## Permanent
Locate the `.bashrc` file in your home folder and add the following lines to the bottom:
```bash
export nnUNet_raw="/media/fabian/nnUNet_raw"
export nnUNet_preprocessed="/media/fabian/nnUNet_preprocessed"
export nnUNet_results="/media/fabian/nnUNet_results"
```
(Of course you need to adapt the paths to the actual folders you intend to use).
If you are using a different shell, such as zsh, you will need to find the correct script for it. For zsh this is `.zshrc`.
## Temporary
Just execute the following lines whenever you run nnU-Net:
```bash
export nnUNet_raw="/media/fabian/nnUNet_raw"
export nnUNet_preprocessed="/media/fabian/nnUNet_preprocessed"
export nnUNet_results="/media/fabian/nnUNet_results"
```
(Of course you need to adapt the paths to the actual folders you intend to use).
Important: These variables will be deleted if you close your terminal! They will also only apply to the current
terminal window and DO NOT transfer to other terminals!
Alternatively you can also just prefix them to your nnU-Net commands:
`nnUNet_results="/media/fabian/nnUNet_results" nnUNet_preprocessed="/media/fabian/nnUNet_preprocessed" nnUNetv2_train[...]`
## Verify that environment parameters are set
You can always execute `echo ${nnUNet_raw}` etc to print the environment variables. This will return an empty string if
they were not set.
# Windows
Useful links:
- [https://www3.ntu.edu.sg](https://www3.ntu.edu.sg/home/ehchua/programming/howto/Environment_Variables.html#:~:text=To%20set%20(or%20change)%20a,it%20to%20an%20empty%20string.)
- [https://phoenixnap.com](https://phoenixnap.com/kb/windows-set-environment-variable)
## Permanent
See `Set Environment Variable in Windows via GUI` [here](https://phoenixnap.com/kb/windows-set-environment-variable).
Or read about setx (command prompt).
## Temporary
Just execute the following before you run nnU-Net:
(PowerShell)
```PowerShell
$Env:nnUNet_raw = "C:/Users/fabian/nnUNet_raw"
$Env:nnUNet_preprocessed = "C:/Users/fabian/nnUNet_preprocessed"
$Env:nnUNet_results = "C:/Users/fabian/nnUNet_results"
```
(Command Prompt)
```Command Prompt
set nnUNet_raw=C:/Users/fabian/nnUNet_raw
set nnUNet_preprocessed=C:/Users/fabian/nnUNet_preprocessed
set nnUNet_results=C:/Users/fabian/fabian/nnUNet_results
```
(Of course you need to adapt the paths to the actual folders you intend to use).
Important: These variables will be deleted if you close your session! They will also only apply to the current
window and DO NOT transfer to other sessions!
## Verify that environment parameters are set
Printing in Windows works differently depending on the environment you are in:
PowerShell: `echo $Env:[variable_name]`
Command Prompt: `echo %[variable_name]%`
================================================
FILE: Finetune/nnUNet/documentation/setting_up_paths.md
================================================
# Setting up Paths
nnU-Net relies on environment variables to know where raw data, preprocessed data and trained model weights are stored.
To use the full functionality of nnU-Net, the following three environment variables must be set:
1) `nnUNet_raw`: This is where you place the raw datasets. This folder will have one subfolder for each dataset names
DatasetXXX_YYY where XXX is a 3-digit identifier (such as 001, 002, 043, 999, ...) and YYY is the (unique)
dataset name. The datasets must be in nnU-Net format, see [here](dataset_format.md).
Example tree structure:
```
nnUNet_raw/Dataset001_NAME1
├── dataset.json
├── imagesTr
│ ├── ...
├── imagesTs
│ ├── ...
└── labelsTr
├── ...
nnUNet_raw/Dataset002_NAME2
├── dataset.json
├── imagesTr
│ ├── ...
├── imagesTs
│ ├── ...
└── labelsTr
├── ...
```
2) `nnUNet_preprocessed`: This is the folder where the preprocessed data will be saved. The data will also be read from
this folder during training. It is important that this folder is located on a drive with low access latency and high
throughput (such as a nvme SSD (PCIe gen 3 is sufficient)).
3) `nnUNet_results`: This specifies where nnU-Net will save the model weights. If pretrained models are downloaded, this
is where it will save them.
### How to set environment variables
See [here](set_environment_variables.md).
================================================
FILE: Finetune/nnUNet/documentation/tldr_migration_guide_from_v1.md
================================================
# TLDR Migration Guide from nnU-Net V1
- nnU-Net V2 can be installed simultaneously with V1. They won't get in each other's way
- The environment variables needed for V2 have slightly different names. Read [this](setting_up_paths.md).
- nnU-Net V2 datasets are called DatasetXXX_NAME. Not Task.
- Datasets have the same structure (imagesTr, labelsTr, dataset.json) but we now support more
[file types](dataset_format.md#supported-file-formats). The dataset.json is simplified. Use `generate_dataset_json`
from nnunetv2.dataset_conversion.generate_dataset_json.py.
- Careful: labels are now no longer declared as value:name but name:value. This has to do with [hierarchical labels](region_based_training.md).
- nnU-Net v2 commands start with `nnUNetv2...`. They work mostly (but not entirely) the same. Just use the `-h` option.
- You can transfer your V1 raw datasets to V2 with `nnUNetv2_convert_old_nnUNet_dataset`. You cannot transfer trained
models. Continue to use the old nnU-Net Version for making inference with those.
- These are the commands you are most likely to be using (in that order)
- `nnUNetv2_plan_and_preprocess`. Example: `nnUNetv2_plan_and_preprocess -d 2`
- `nnUNetv2_train`. Example: `nnUNetv2_train 2 3d_fullres 0`
- `nnUNetv2_find_best_configuration`. Example: `nnUNetv2_find_best_configuration 2 -c 2d 3d_fullres`. This command
will now create a `inference_instructions.txt` file in your `nnUNet_preprocessed/DatasetXXX_NAME/` folder which
tells you exactly how to do inference.
- `nnUNetv2_predict`. Example: `nnUNetv2_predict -i INPUT_FOLDER -o OUTPUT_FOLDER -c 3d_fullres -d 2`
- `nnUNetv2_apply_postprocessing` (see inference_instructions.txt)
================================================
FILE: Finetune/nnUNet/msd.txt
================================================
A. convert
python Dataset220_KiTS2023.py /data/linshan/CTs/kits23/dataset/
python Dataset218_Amos2022_task1.py /data/linshan/CTs/Amos2022/
nnUNetv2_convert_old_nnUNet_dataset /data/linshan/CTs/Amos2022/ /data/linshan/nnunet_data/nnUNet_raw/Dataset218_Amos2022
B. pre-process
nnUNetv2_plan_and_preprocess -d 003 -c 3d_fullres --verbose --verify_dataset_integrity
nnUNetv2_plan_and_preprocess -d 218 -c 3d_fullres --verbose --verify_dataset_integrity
C. training
CUDA_VISIBLE_DEVICES=0 nnUNetv2_train 008 3d_fullres 0 -tr nnUNetTrainer_250epochs --val
CUDA_VISIBLE_DEVICES=1 nnUNetv2_train 002 3d_fullres 0 -tr nnUNetTrainer_swin_pre
CUDA_VISIBLE_DEVICES=5 nnUNetv2_train 009 3d_fullres 0 -tr nnUNetTrainer_250epochs
CUDA_VISIBLE_DEVICES=5 nnUNetv2_train 010 3d_fullres 0 -tr nnUNetTrainer_250epochs
CUDA_VISIBLE_DEVICES=5 nnUNetv2_train 218 3d_fullres 0 -tr nnUNetTrainer_250epochs
D. inference
CUDA_VISIBLE_DEVICES=4 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset017_BTCV/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset017_BTCV/imagesTs_pred -d 017 -f 0 -c 3d_fullres --verbose
CUDA_VISIBLE_DEVICES=5 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset003_Liver/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset003_Liver/imagesTs_pred -d 003 -f 0 -c 3d_fullres --verbose
CUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset006_Lung/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset006_Lung/imagesTs_pred -d 006 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs
CUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset001_BrainTumour/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset001_BrainTumour/imagesTs_pred -d 001 -f 0 -c 3d_fullres --verbose
CUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset002_Heart/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset002_Heart/imagesTs_pred -d 002 -f 0 -c 3d_fullres --verbose
CUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset004_Hippocampus/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset004_Hippocampus/imagesTs_pred -d 004 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs
CUDA_VISIBLE_DEVICES=5 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset005_Prostate/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset005_Prostate/imagesTs_pred -d 005 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs
CUDA_VISIBLE_DEVICES=5 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset007_Pancreas/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset007_Pancreas/imagesTs_pred -d 007 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs
CUDA_VISIBLE_DEVICES=2 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset008_HepaticVessel/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset008_HepaticVessel/imagesTs_pred -d 008 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs
CUDA_VISIBLE_DEVICES=5 nnUNetv2_predict -i /data/linshan/nnunet_data/nnUNet_raw/Dataset218_AMOS2022_postChallenge_task1/imagesTs/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset218_AMOS2022_postChallenge_task1/imagesTs_pred -d 218 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs --verbose -npp 1 -nps 1
--disable_tta
### predict colon cancer for flare23
CUDA_VISIBLE_DEVICES=3 nnUNetv2_predict -i /data/linshan/CTs/Flare23/Flare23_test/ -o /data/linshan/nnunet_data/nnUNet_pred/Dataset010_Colon_flare23/flare23_imagesTs_pred -d 010 -f 0 -c 3d_fullres --verbose -tr nnUNetTrainer_250epochs
================================================
FILE: Finetune/nnUNet/nnunetv2/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/benchmarking/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/benchmarking/generate_benchmarking_commands.py
================================================
if __name__ == '__main__':
"""
This code probably only works within the DKFZ infrastructure (using LSF). You will need to adapt it to your scheduler!
"""
gpu_models = [#'NVIDIAA100_PCIE_40GB', 'NVIDIAGeForceRTX2080Ti', 'NVIDIATITANRTX', 'TeslaV100_SXM2_32GB',
'NVIDIAA100_SXM4_40GB']#, 'TeslaV100_PCIE_32GB']
datasets = [2, 3, 4, 5]
trainers = ['nnUNetTrainerBenchmark_5epochs', 'nnUNetTrainerBenchmark_5epochs_noDataLoading']
plans = ['nnUNetPlans']
configs = ['2d', '2d_bs3x', '2d_bs6x', '3d_fullres', '3d_fullres_bs3x', '3d_fullres_bs6x']
num_gpus = 1
benchmark_configurations = {d: configs for d in datasets}
exclude_hosts = "-R \"select[hname!='e230-dgxa100-1']'\""
resources = "-R \"tensorcore\""
queue = "-q gpu"
preamble = "-L /bin/bash \"source ~/load_env_torch210.sh && "
train_command = 'nnUNet_compile=False nnUNet_results=/dkfz/cluster/gpu/checkpoints/OE0441/isensee/nnUNet_results_remake_benchmark nnUNetv2_train'
folds = (0, )
use_these_modules = {
tr: plans for tr in trainers
}
additional_arguments = f' -num_gpus {num_gpus}' # ''
output_file = "/home/isensee/deleteme.txt"
with open(output_file, 'w') as f:
for g in gpu_models:
gpu_requirements = f"-gpu num={num_gpus}:j_exclusive=yes:gmodel={g}"
for tr in use_these_modules.keys():
for p in use_these_modules[tr]:
for dataset in benchmark_configurations.keys():
for config in benchmark_configurations[dataset]:
for fl in folds:
command = f'bsub {exclude_hosts} {resources} {queue} {gpu_requirements} {preamble} {train_command} {dataset} {config} {fl} -tr {tr} -p {p}'
if additional_arguments is not None and len(additional_arguments) > 0:
command += f' {additional_arguments}'
f.write(f'{command}\"\n')
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/benchmarking/summarize_benchmark_results.py
================================================
from batchgenerators.utilities.file_and_folder_operations import join, load_json, isfile
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.paths import nnUNet_results
from nnunetv2.utilities.file_path_utilities import get_output_folder
if __name__ == '__main__':
trainers = ['nnUNetTrainerBenchmark_5epochs', 'nnUNetTrainerBenchmark_5epochs_noDataLoading']
datasets = [2, 3, 4, 5]
plans = ['nnUNetPlans']
configs = ['2d', '2d_bs3x', '2d_bs6x', '3d_fullres', '3d_fullres_bs3x', '3d_fullres_bs6x']
output_file = join(nnUNet_results, 'benchmark_results.csv')
torch_version = '2.1.0.dev20230330'#"2.0.0"#"2.1.0.dev20230328" #"1.11.0a0+gitbc2c6ed" #
cudnn_version = 8700 # 8302 #
num_gpus = 1
unique_gpus = set()
# collect results in the most janky way possible. Amazing coding skills!
all_results = {}
for tr in trainers:
all_results[tr] = {}
for p in plans:
all_results[tr][p] = {}
for c in configs:
all_results[tr][p][c] = {}
for d in datasets:
dataset_name = maybe_convert_to_dataset_name(d)
output_folder = get_output_folder(dataset_name, tr, p, c, fold=0)
expected_benchmark_file = join(output_folder, 'benchmark_result.json')
all_results[tr][p][c][d] = {}
if isfile(expected_benchmark_file):
# filter results for what we want
results = [i for i in load_json(expected_benchmark_file).values()
if i['num_gpus'] == num_gpus and i['cudnn_version'] == cudnn_version and
i['torch_version'] == torch_version]
for r in results:
all_results[tr][p][c][d][r['gpu_name']] = r
unique_gpus.add(r['gpu_name'])
# haha. Fuck this. Collect GPUs in the code above.
# unique_gpus = np.unique([i["gpu_name"] for tr in trainers for p in plans for c in configs for d in datasets for i in all_results[tr][p][c][d]])
unique_gpus = list(unique_gpus)
unique_gpus.sort()
with open(output_file, 'w') as f:
f.write('Dataset,Trainer,Plans,Config')
for g in unique_gpus:
f.write(f",{g}")
f.write("\n")
for d in datasets:
for tr in trainers:
for p in plans:
for c in configs:
gpu_results = []
for g in unique_gpus:
if g in all_results[tr][p][c][d].keys():
gpu_results.append(round(all_results[tr][p][c][d][g]["fastest_epoch"], ndigits=2))
else:
gpu_results.append("MISSING")
# skip if all are missing
if all([i == 'MISSING' for i in gpu_results]):
continue
f.write(f"{d},{tr},{p},{c}")
for g in gpu_results:
f.write(f",{g}")
f.write("\n")
f.write("\n")
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/collect_results_custom_Decathlon.py
================================================
from typing import Tuple
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.evaluation.evaluate_predictions import load_summary_json
from nnunetv2.paths import nnUNet_results
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name, convert_dataset_name_to_id
from nnunetv2.utilities.file_path_utilities import get_output_folder
def collect_results(trainers: dict, datasets: List, output_file: str,
configurations=("2d", "3d_fullres", "3d_lowres", "3d_cascade_fullres"),
folds=tuple(np.arange(5))):
results_dirs = (nnUNet_results,)
datasets_names = [maybe_convert_to_dataset_name(i) for i in datasets]
with open(output_file, 'w') as f:
for i, d in zip(datasets, datasets_names):
for c in configurations:
for module in trainers.keys():
for plans in trainers[module]:
for r in results_dirs:
expected_output_folder = get_output_folder(d, module, plans, c)
if isdir(expected_output_folder):
results_folds = []
f.write(f"{d},{c},{module},{plans},{r}")
for fl in folds:
expected_output_folder_fold = get_output_folder(d, module, plans, c, fl)
expected_summary_file = join(expected_output_folder_fold, "validation",
"summary.json")
if not isfile(expected_summary_file):
print('expected output file not found:', expected_summary_file)
f.write(",")
results_folds.append(np.nan)
else:
foreground_mean = load_summary_json(expected_summary_file)['foreground_mean'][
'Dice']
results_folds.append(foreground_mean)
f.write(f",{foreground_mean:02.4f}")
f.write(f",{np.nanmean(results_folds):02.4f}\n")
def summarize(input_file, output_file, folds: Tuple[int, ...], configs: Tuple[str, ...], datasets, trainers):
txt = np.loadtxt(input_file, dtype=str, delimiter=',')
num_folds = txt.shape[1] - 6
valid_configs = {}
for d in datasets:
if isinstance(d, int):
d = maybe_convert_to_dataset_name(d)
configs_in_txt = np.unique(txt[:, 1][txt[:, 0] == d])
valid_configs[d] = [i for i in configs_in_txt if i in configs]
assert max(folds) < num_folds
with open(output_file, 'w') as f:
f.write("name")
for d in valid_configs.keys():
for c in valid_configs[d]:
f.write(",%d_%s" % (convert_dataset_name_to_id(d), c[:4]))
f.write(',mean\n')
valid_entries = txt[:, 4] == nnUNet_results
for t in trainers.keys():
trainer_locs = valid_entries & (txt[:, 2] == t)
for pl in trainers[t]:
f.write(f"{t}__{pl}")
trainer_plan_locs = trainer_locs & (txt[:, 3] == pl)
r = []
for d in valid_configs.keys():
trainer_plan_d_locs = trainer_plan_locs & (txt[:, 0] == d)
for v in valid_configs[d]:
trainer_plan_d_config_locs = trainer_plan_d_locs & (txt[:, 1] == v)
if np.any(trainer_plan_d_config_locs):
# we cannot have more than one row
assert np.sum(trainer_plan_d_config_locs) == 1
# now check that we have all folds
selected_row = txt[np.argwhere(trainer_plan_d_config_locs)[0,0]]
fold_results = selected_row[[i + 5 for i in folds]]
if '' in fold_results:
print('missing fold in', t, pl, d, v)
f.write(",nan")
r.append(np.nan)
else:
mean_dice = np.mean([float(i) for i in fold_results])
f.write(f",{mean_dice:02.4f}")
r.append(mean_dice)
else:
print('missing:', t, pl, d, v)
f.write(",nan")
r.append(np.nan)
f.write(f",{np.mean(r):02.4f}\n")
if __name__ == '__main__':
use_these_trainers = {
'nnUNetTrainer': ('nnUNetPlans',),
'nnUNetTrainerDiceCELoss_noSmooth': ('nnUNetPlans',),
'nnUNetTrainer_DASegOrd0': ('nnUNetPlans',),
}
all_results_file= join(nnUNet_results, 'customDecResults.csv')
datasets = [2, 3, 4, 17, 20, 24, 27, 38, 55, 64, 82]
collect_results(use_these_trainers, datasets, all_results_file)
folds = (0, 1, 2, 3, 4)
configs = ("3d_fullres", "3d_lowres")
output_file = join(nnUNet_results, 'customDecResults_summary5fold.csv')
summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)
folds = (0, )
configs = ("3d_fullres", "3d_lowres")
output_file = join(nnUNet_results, 'customDecResults_summaryfold0.csv')
summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/collect_results_custom_Decathlon_2d.py
================================================
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.batch_running.collect_results_custom_Decathlon import collect_results, summarize
from nnunetv2.paths import nnUNet_results
if __name__ == '__main__':
use_these_trainers = {
'nnUNetTrainer': ('nnUNetPlans', ),
}
all_results_file = join(nnUNet_results, 'hrnet_results.csv')
datasets = [2, 3, 4, 17, 20, 24, 27, 38, 55, 64, 82]
collect_results(use_these_trainers, datasets, all_results_file)
folds = (0, )
configs = ('2d', )
output_file = join(nnUNet_results, 'hrnet_results_summary_fold0.csv')
summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/generate_lsf_runs_customDecathlon.py
================================================
from copy import deepcopy
import numpy as np
def merge(dict1, dict2):
keys = np.unique(list(dict1.keys()) + list(dict2.keys()))
keys = np.unique(keys)
res = {}
for k in keys:
all_configs = []
if dict1.get(k) is not None:
all_configs += list(dict1[k])
if dict2.get(k) is not None:
all_configs += list(dict2[k])
if len(all_configs) > 0:
res[k] = tuple(np.unique(all_configs))
return res
if __name__ == "__main__":
# after the Nature Methods paper we switch our evaluation to a different (more stable/high quality) set of
# datasets for evaluation and future development
configurations_all = {
2: ("3d_fullres", "2d"),
3: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
4: ("2d", "3d_fullres"),
17: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
20: ("2d", "3d_fullres"),
24: ("2d", "3d_fullres"),
27: ("2d", "3d_fullres"),
38: ("2d", "3d_fullres"),
55: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
64: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
82: ("2d", "3d_fullres"),
# 83: ("2d", "3d_fullres"),
}
configurations_3d_fr_only = {
i: ("3d_fullres", ) for i in configurations_all if "3d_fullres" in configurations_all[i]
}
configurations_3d_c_only = {
i: ("3d_cascade_fullres", ) for i in configurations_all if "3d_cascade_fullres" in configurations_all[i]
}
configurations_3d_lr_only = {
i: ("3d_lowres", ) for i in configurations_all if "3d_lowres" in configurations_all[i]
}
configurations_2d_only = {
i: ("2d", ) for i in configurations_all if "2d" in configurations_all[i]
}
num_gpus = 1
exclude_hosts = "-R \"select[hname!='e230-dgx2-2']\" -R \"select[hname!='e230-dgx2-1']\" -R \"select[hname!='e230-dgx1-1']\" -R \"select[hname!='e230-dgxa100-1']\" -R \"select[hname!='e230-dgxa100-2']\" -R \"select[hname!='e230-dgxa100-3']\" -R \"select[hname!='e230-dgxa100-4']\""
resources = "-R \"tensorcore\""
gpu_requirements = f"-gpu num={num_gpus}:j_exclusive=yes:gmem=33G"
queue = "-q gpu-lowprio"
preamble = "-L /bin/bash \"source ~/load_env_cluster4.sh && "
train_command = 'nnUNet_results=/dkfz/cluster/gpu/checkpoints/OE0441/isensee/nnUNet_results_remake_release nnUNetv2_train'
folds = (0, )
# use_this = configurations_2d_only
use_this = merge(configurations_3d_fr_only, configurations_3d_lr_only)
# use_this = merge(use_this, configurations_3d_c_only)
use_these_modules = {
'nnUNetTrainer': ('nnUNetPlans',),
'nnUNetTrainerDiceCELoss_noSmooth': ('nnUNetPlans',),
# 'nnUNetTrainer_DASegOrd0': ('nnUNetPlans',),
}
additional_arguments = f'--disable_checkpointing -num_gpus {num_gpus}' # ''
output_file = "/home/isensee/deleteme.txt"
with open(output_file, 'w') as f:
for tr in use_these_modules.keys():
for p in use_these_modules[tr]:
for dataset in use_this.keys():
for config in use_this[dataset]:
for fl in folds:
command = f'bsub {exclude_hosts} {resources} {queue} {gpu_requirements} {preamble} {train_command} {dataset} {config} {fl} -tr {tr} -p {p}'
if additional_arguments is not None and len(additional_arguments) > 0:
command += f' {additional_arguments}'
f.write(f'{command}\"\n')
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/release_trainings/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/release_trainings/nnunetv2_v1/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/release_trainings/nnunetv2_v1/collect_results.py
================================================
from typing import Tuple
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.evaluation.evaluate_predictions import load_summary_json
from nnunetv2.paths import nnUNet_results
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name, convert_dataset_name_to_id
from nnunetv2.utilities.file_path_utilities import get_output_folder
def collect_results(trainers: dict, datasets: List, output_file: str,
configurations=("2d", "3d_fullres", "3d_lowres", "3d_cascade_fullres"),
folds=tuple(np.arange(5))):
results_dirs = (nnUNet_results,)
datasets_names = [maybe_convert_to_dataset_name(i) for i in datasets]
with open(output_file, 'w') as f:
for i, d in zip(datasets, datasets_names):
for c in configurations:
for module in trainers.keys():
for plans in trainers[module]:
for r in results_dirs:
expected_output_folder = get_output_folder(d, module, plans, c)
if isdir(expected_output_folder):
results_folds = []
f.write(f"{d},{c},{module},{plans},{r}")
for fl in folds:
expected_output_folder_fold = get_output_folder(d, module, plans, c, fl)
expected_summary_file = join(expected_output_folder_fold, "validation",
"summary.json")
if not isfile(expected_summary_file):
print('expected output file not found:', expected_summary_file)
f.write(",")
results_folds.append(np.nan)
else:
foreground_mean = load_summary_json(expected_summary_file)['foreground_mean'][
'Dice']
results_folds.append(foreground_mean)
f.write(f",{foreground_mean:02.4f}")
f.write(f",{np.nanmean(results_folds):02.4f}\n")
def summarize(input_file, output_file, folds: Tuple[int, ...], configs: Tuple[str, ...], datasets, trainers):
txt = np.loadtxt(input_file, dtype=str, delimiter=',')
num_folds = txt.shape[1] - 6
valid_configs = {}
for d in datasets:
if isinstance(d, int):
d = maybe_convert_to_dataset_name(d)
configs_in_txt = np.unique(txt[:, 1][txt[:, 0] == d])
valid_configs[d] = [i for i in configs_in_txt if i in configs]
assert max(folds) < num_folds
with open(output_file, 'w') as f:
f.write("name")
for d in valid_configs.keys():
for c in valid_configs[d]:
f.write(",%d_%s" % (convert_dataset_name_to_id(d), c[:4]))
f.write(',mean\n')
valid_entries = txt[:, 4] == nnUNet_results
for t in trainers.keys():
trainer_locs = valid_entries & (txt[:, 2] == t)
for pl in trainers[t]:
f.write(f"{t}__{pl}")
trainer_plan_locs = trainer_locs & (txt[:, 3] == pl)
r = []
for d in valid_configs.keys():
trainer_plan_d_locs = trainer_plan_locs & (txt[:, 0] == d)
for v in valid_configs[d]:
trainer_plan_d_config_locs = trainer_plan_d_locs & (txt[:, 1] == v)
if np.any(trainer_plan_d_config_locs):
# we cannot have more than one row
assert np.sum(trainer_plan_d_config_locs) == 1
# now check that we have all folds
selected_row = txt[np.argwhere(trainer_plan_d_config_locs)[0,0]]
fold_results = selected_row[[i + 5 for i in folds]]
if '' in fold_results:
print('missing fold in', t, pl, d, v)
f.write(",nan")
r.append(np.nan)
else:
mean_dice = np.mean([float(i) for i in fold_results])
f.write(f",{mean_dice:02.4f}")
r.append(mean_dice)
else:
print('missing:', t, pl, d, v)
f.write(",nan")
r.append(np.nan)
f.write(f",{np.mean(r):02.4f}\n")
if __name__ == '__main__':
use_these_trainers = {
'nnUNetTrainer': ('nnUNetPlans',),
'nnUNetTrainer_v1loss': ('nnUNetPlans',),
}
all_results_file = join(nnUNet_results, 'customDecResults.csv')
datasets = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 17, 20, 24, 27, 35, 38, 48, 55, 64, 82]
collect_results(use_these_trainers, datasets, all_results_file)
folds = (0, 1, 2, 3, 4)
configs = ("3d_fullres", "3d_lowres")
output_file = join(nnUNet_results, 'customDecResults_summary5fold.csv')
summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)
folds = (0, )
configs = ("3d_fullres", "3d_lowres")
output_file = join(nnUNet_results, 'customDecResults_summaryfold0.csv')
summarize(all_results_file, output_file, folds, configs, datasets, use_these_trainers)
================================================
FILE: Finetune/nnUNet/nnunetv2/batch_running/release_trainings/nnunetv2_v1/generate_lsf_commands.py
================================================
from copy import deepcopy
import numpy as np
def merge(dict1, dict2):
keys = np.unique(list(dict1.keys()) + list(dict2.keys()))
keys = np.unique(keys)
res = {}
for k in keys:
all_configs = []
if dict1.get(k) is not None:
all_configs += list(dict1[k])
if dict2.get(k) is not None:
all_configs += list(dict2[k])
if len(all_configs) > 0:
res[k] = tuple(np.unique(all_configs))
return res
if __name__ == "__main__":
# after the Nature Methods paper we switch our evaluation to a different (more stable/high quality) set of
# datasets for evaluation and future development
configurations_all = {
# 1: ("3d_fullres", "2d"),
2: ("3d_fullres", "2d"),
# 3: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
# 4: ("2d", "3d_fullres"),
5: ("2d", "3d_fullres"),
# 6: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
# 7: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
# 8: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
# 9: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
# 10: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
# 17: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
20: ("2d", "3d_fullres"),
24: ("2d", "3d_fullres"),
27: ("2d", "3d_fullres"),
35: ("2d", "3d_fullres"),
38: ("2d", "3d_fullres"),
# 55: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
# 64: ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
# 82: ("2d", "3d_fullres"),
# 83: ("2d", "3d_fullres"),
}
configurations_3d_fr_only = {
i: ("3d_fullres", ) for i in configurations_all if "3d_fullres" in configurations_all[i]
}
configurations_3d_c_only = {
i: ("3d_cascade_fullres", ) for i in configurations_all if "3d_cascade_fullres" in configurations_all[i]
}
configurations_3d_lr_only = {
i: ("3d_lowres", ) for i in configurations_all if "3d_lowres" in configurations_all[i]
}
configurations_2d_only = {
i: ("2d", ) for i in configurations_all if "2d" in configurations_all[i]
}
num_gpus = 1
exclude_hosts = "-R \"select[hname!='e230-dgx2-2']\" -R \"select[hname!='e230-dgx2-1']\""
resources = "-R \"tensorcore\""
gpu_requirements = f"-gpu num={num_gpus}:j_exclusive=yes:gmem=1G"
queue = "-q gpu-lowprio"
preamble = "-L /bin/bash \"source ~/load_env_cluster4.sh && "
train_command = 'nnUNet_keep_files_open=True nnUNet_results=/dkfz/cluster/gpu/data/OE0441/isensee/nnUNet_results_remake_release_normfix nnUNetv2_train'
folds = (0, 1, 2, 3, 4)
# use_this = configurations_2d_only
# use_this = merge(configurations_3d_fr_only, configurations_3d_lr_only)
# use_this = merge(use_this, configurations_3d_c_only)
use_this = configurations_all
use_these_modules = {
'nnUNetTrainer': ('nnUNetPlans',),
}
additional_arguments = f'--disable_checkpointing -num_gpus {num_gpus}' # ''
output_file = "/home/isensee/deleteme.txt"
with open(output_file, 'w') as f:
for tr in use_these_modules.keys():
for p in use_these_modules[tr]:
for dataset in use_this.keys():
for config in use_this[dataset]:
for fl in folds:
command = f'bsub {exclude_hosts} {resources} {queue} {gpu_requirements} {preamble} {train_command} {dataset} {config} {fl} -tr {tr} -p {p}'
if additional_arguments is not None and len(additional_arguments) > 0:
command += f' {additional_arguments}'
f.write(f'{command}\"\n')
================================================
FILE: Finetune/nnUNet/nnunetv2/configuration.py
================================================
import os
from nnunetv2.utilities.default_n_proc_DA import get_allowed_n_proc_DA
default_num_processes = 8 if 'nnUNet_def_n_proc' not in os.environ else int(os.environ['nnUNet_def_n_proc'])
ANISO_THRESHOLD = 3 # determines when a sample is considered anisotropic (3 means that the spacing in the low
# resolution axis must be 3x as large as the next largest spacing)
default_n_proc_DA = get_allowed_n_proc_DA()
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset017_BTCV.py
================================================
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import multiprocessing
import shutil
from multiprocessing import Pool
from collections import OrderedDict
import SimpleITK as sitk
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json
from nnunetv2.paths import nnUNet_raw
if __name__ == "__main__":
base = "/data/linshan/CTs/BTCV/"
task_id = 17
task_name = "BTCV"
prefix = 'BTCV'
foldername = "Dataset%03.0d_%s" % (task_id, task_name)
out_base = join(nnUNet_raw, foldername)
imagestr = join(out_base, "imagesTr")
imagests = join(out_base, "imagesTs")
labelstr = join(out_base, "labelsTr")
maybe_mkdir_p(imagestr)
maybe_mkdir_p(imagests)
maybe_mkdir_p(labelstr)
train_folder = join(base, "imagesTr")
label_folder = join(base, "labelsTr")
test_folder = join(base, "imagesTs")
train_patient_names = []
test_patient_names = []
train_patients = subfiles(train_folder, join=False, suffix = 'nii.gz')
for p in train_patients:
serial_number = int(p[3:7])
train_patient_name = f'{prefix}_{serial_number:03d}.nii.gz'
label_file = join(label_folder, f'label{p[3:]}')
image_file = join(train_folder, p)
shutil.copy(image_file, join(imagestr, f'{train_patient_name[:8]}_0000.nii.gz'))
shutil.copy(label_file, join(labelstr, train_patient_name))
train_patient_names.append(train_patient_name)
test_patients = subfiles(test_folder, join=False, suffix=".nii.gz")
for p in test_patients:
p = p[:-7]
image_file = join(test_folder, p + ".nii.gz")
serial_number = int(p[3:7])
test_patient_name = f'{prefix}_{serial_number:03d}.nii.gz'
shutil.copy(image_file, join(imagests, f'{test_patient_name[:8]}_0000.nii.gz'))
test_patient_names.append(test_patient_name)
generate_dataset_json(out_base,
channel_names={0: 'CT'},
labels={
"background":0,
"spleen":1,
"right kidney":2,
"left kidney":3,
"gallbladder":4,
"esophagus":5,
"liver":6,
"stomach":7,
"aorta":8,
"inferior vena cava":9,
"portal vein and splenic vein":10,
"pancreas":11,
"right adrenal gland":12,
"left adrenal gland":13
},
num_training_cases=len(train_patient_names),
file_ending='.nii.gz',
license='see challenge website',
reference='see https://www.synapse.org/#!Synapse:syn3193805/wiki/217789',
dataset_release='0.0')
# json_dict = OrderedDict()
# json_dict['name'] = "AbdominalOrganSegmentation"
# json_dict['description'] = "Multi-Atlas Labeling Beyond the Cranial Vault Abdominal Organ Segmentation"
# json_dict['tensorImageSize'] = "3D"
# json_dict['reference'] = "https://www.synapse.org/#!Synapse:syn3193805/wiki/217789"
# json_dict['licence'] = "see challenge website"
# json_dict['release'] = "0.0"
# json_dict['modality'] = {
# "0": "CT",
# }
# json_dict['labels'] = OrderedDict({
# "00": "background",
# "01": "spleen",
# "02": "right kidney",
# "03": "left kidney",
# "04": "gallbladder",
# "05": "esophagus",
# "06": "liver",
# "07": "stomach",
# "08": "aorta",
# "09": "inferior vena cava",
# "10": "portal vein and splenic vein",
# "11": "pancreas",
# "12": "right adrenal gland",
# "13": "left adrenal gland"}
# )
# json_dict['numTraining'] = len(train_patient_names)
# json_dict['numTest'] = len(test_patient_names)
# json_dict['training'] = [{'image': "./imagesTr/%s" % train_patient_name, "label": "./labelsTr/%s" % train_patient_name} for i, train_patient_name in enumerate(train_patient_names)]
# json_dict['test'] = ["./imagesTs/%s" % test_patient_name for test_patient_name in test_patient_names]
#
# save_json(json_dict, os.path.join(out_base, "dataset.json"))
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset027_ACDC.py
================================================
import os
import shutil
from pathlib import Path
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json
from nnunetv2.paths import nnUNet_raw
def make_out_dirs(dataset_id: int, task_name="ACDC"):
dataset_name = f"Dataset{dataset_id:03d}_{task_name}"
out_dir = Path(nnUNet_raw.replace('"', "")) / dataset_name
out_train_dir = out_dir / "imagesTr"
out_labels_dir = out_dir / "labelsTr"
out_test_dir = out_dir / "imagesTs"
os.makedirs(out_dir, exist_ok=True)
os.makedirs(out_train_dir, exist_ok=True)
os.makedirs(out_labels_dir, exist_ok=True)
os.makedirs(out_test_dir, exist_ok=True)
return out_dir, out_train_dir, out_labels_dir, out_test_dir
def copy_files(src_data_folder: Path, train_dir: Path, labels_dir: Path, test_dir: Path):
"""Copy files from the ACDC dataset to the nnUNet dataset folder. Returns the number of training cases."""
patients_train = sorted([f for f in (src_data_folder / "training").iterdir() if f.is_dir()])
patients_test = sorted([f for f in (src_data_folder / "testing").iterdir() if f.is_dir()])
num_training_cases = 0
# Copy training files and corresponding labels.
for patient_dir in patients_train:
for file in patient_dir.iterdir():
if file.suffix == ".gz" and "_gt" not in file.name and "_4d" not in file.name:
# The stem is 'patient.nii', and the suffix is '.gz'.
# We split the stem and append _0000 to the patient part.
shutil.copy(file, train_dir / f"{file.stem.split('.')[0]}_0000.nii.gz")
num_training_cases += 1
elif file.suffix == ".gz" and "_gt" in file.name:
shutil.copy(file, labels_dir / file.name.replace("_gt", ""))
# Copy test files.
for patient_dir in patients_test:
for file in patient_dir.iterdir():
if file.suffix == ".gz" and "_gt" not in file.name and "_4d" not in file.name:
shutil.copy(file, test_dir / f"{file.stem.split('.')[0]}_0000.nii.gz")
return num_training_cases
def convert_acdc(src_data_folder: str, dataset_id=27):
out_dir, train_dir, labels_dir, test_dir = make_out_dirs(dataset_id=dataset_id)
num_training_cases = copy_files(Path(src_data_folder), train_dir, labels_dir, test_dir)
generate_dataset_json(
str(out_dir),
channel_names={
0: "cineMRI",
},
labels={
"background": 0,
"RV": 1,
"MLV": 2,
"LVC": 3,
},
file_ending=".nii.gz",
num_training_cases=num_training_cases,
)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"-i",
"--input_folder",
type=str,
help="The downloaded ACDC dataset dir. Should contain extracted 'training' and 'testing' folders.",
)
parser.add_argument(
"-d", "--dataset_id", required=False, type=int, default=27, help="nnU-Net Dataset ID, default: 27"
)
args = parser.parse_args()
print("Converting...")
convert_acdc(args.input_folder, args.dataset_id)
print("Done!")
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset073_Fluo_C3DH_A549_SIM.py
================================================
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json
from nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed
import tifffile
from batchgenerators.utilities.file_and_folder_operations import *
import shutil
if __name__ == '__main__':
"""
This is going to be my test dataset for working with tif as input and output images
All we do here is copy the files and rename them. Not file conversions take place
"""
dataset_name = 'Dataset073_Fluo_C3DH_A549_SIM'
imagestr = join(nnUNet_raw, dataset_name, 'imagesTr')
imagests = join(nnUNet_raw, dataset_name, 'imagesTs')
labelstr = join(nnUNet_raw, dataset_name, 'labelsTr')
maybe_mkdir_p(imagestr)
maybe_mkdir_p(imagests)
maybe_mkdir_p(labelstr)
# we extract the downloaded train and test datasets to two separate folders and name them Fluo-C3DH-A549-SIM_train
# and Fluo-C3DH-A549-SIM_test
train_source = '/home/fabian/Downloads/Fluo-C3DH-A549-SIM_train'
test_source = '/home/fabian/Downloads/Fluo-C3DH-A549-SIM_test'
# with the old nnU-Net we had to convert all the files to nifti. This is no longer required. We can just copy the
# tif files
# tif is broken when it comes to spacing. No standards. Grr. So when we use tif nnU-Net expects a separate file
# that specifies the spacing. This file needs to exist for EVERY training/test case to allow for different spacings
# between files. Important! The spacing must align with the axes.
# Here when we do print(tifffile.imread('IMAGE').shape) we get (29, 300, 350). The low resolution axis is the first.
# The spacing on the website is griven in the wrong axis order. Great.
spacing = (1, 0.126, 0.126)
# train set
for seq in ['01', '02']:
images_dir = join(train_source, seq)
seg_dir = join(train_source, seq + '_GT', 'SEG')
# if we were to be super clean we would go by IDs but here we just trust the files are sorted the correct way.
# Simpler filenames in the cell tracking challenge would be soooo nice.
images = subfiles(images_dir, suffix='.tif', sort=True, join=False)
segs = subfiles(seg_dir, suffix='.tif', sort=True, join=False)
for i, (im, se) in enumerate(zip(images, segs)):
target_name = f'{seq}_image_{i:03d}'
# we still need the '_0000' suffix for images! Otherwise we would not be able to support multiple input
# channels distributed over separate files
shutil.copy(join(images_dir, im), join(imagestr, target_name + '_0000.tif'))
# spacing file!
save_json({'spacing': spacing}, join(imagestr, target_name + '.json'))
shutil.copy(join(seg_dir, se), join(labelstr, target_name + '.tif'))
# spacing file!
save_json({'spacing': spacing}, join(labelstr, target_name + '.json'))
# test set, same a strain just without the segmentations
for seq in ['01', '02']:
images_dir = join(test_source, seq)
images = subfiles(images_dir, suffix='.tif', sort=True, join=False)
for i, im in enumerate(images):
target_name = f'{seq}_image_{i:03d}'
shutil.copy(join(images_dir, im), join(imagests, target_name + '_0000.tif'))
# spacing file!
save_json({'spacing': spacing}, join(imagests, target_name + '.json'))
# now we generate the dataset json
generate_dataset_json(
join(nnUNet_raw, dataset_name),
{0: 'fluorescence_microscopy'},
{'background': 0, 'cell': 1},
60,
'.tif'
)
# custom split to ensure we are stratifying properly. This dataset only has 2 folds
caseids = [i[:-4] for i in subfiles(labelstr, suffix='.tif', join=False)]
splits = []
splits.append(
{'train': [i for i in caseids if i.startswith('01_')], 'val': [i for i in caseids if i.startswith('02_')]}
)
splits.append(
{'train': [i for i in caseids if i.startswith('02_')], 'val': [i for i in caseids if i.startswith('01_')]}
)
save_json(splits, join(nnUNet_preprocessed, dataset_name, 'splits_final.json'))
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset114_MNMs.py
================================================
import csv
import os
import random
from pathlib import Path
import nibabel as nib
from batchgenerators.utilities.file_and_folder_operations import load_json, save_json
from nnunetv2.dataset_conversion.Dataset027_ACDC import make_out_dirs
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json
from nnunetv2.paths import nnUNet_preprocessed
def read_csv(csv_file: str):
patient_info = {}
with open(csv_file) as csvfile:
reader = csv.reader(csvfile)
headers = next(reader)
patient_index = headers.index("External code")
ed_index = headers.index("ED")
es_index = headers.index("ES")
vendor_index = headers.index("Vendor")
for row in reader:
patient_info[row[patient_index]] = {
"ed": int(row[ed_index]),
"es": int(row[es_index]),
"vendor": row[vendor_index],
}
return patient_info
# ------------------------------------------------------------------------------
# Conversion to nnUNet format
# ------------------------------------------------------------------------------
def convert_mnms(src_data_folder: Path, csv_file_name: str, dataset_id: int):
out_dir, out_train_dir, out_labels_dir, out_test_dir = make_out_dirs(dataset_id, task_name="MNMs")
patients_train = [f for f in (src_data_folder / "Training" / "Labeled").iterdir() if f.is_dir()]
patients_test = [f for f in (src_data_folder / "Testing").iterdir() if f.is_dir()]
patient_info = read_csv(str(src_data_folder / csv_file_name))
save_cardiac_phases(patients_train, patient_info, out_train_dir, out_labels_dir)
save_cardiac_phases(patients_test, patient_info, out_test_dir)
# There are non-orthonormal direction cosines in the test and validation data.
# Not sure if the data should be fixed, or we should skip the problematic data.
# patients_val = [f for f in (src_data_folder / "Validation").iterdir() if f.is_dir()]
# save_cardiac_phases(patients_val, patient_info, out_train_dir, out_labels_dir)
generate_dataset_json(
str(out_dir),
channel_names={
0: "cineMRI",
},
labels={"background": 0, "LVBP": 1, "LVM": 2, "RV": 3},
file_ending=".nii.gz",
num_training_cases=len(patients_train) * 2, # 2 since we have ED and ES for each patient
)
def save_cardiac_phases(
patients: list[Path], patient_info: dict[str, dict[str, int]], out_dir: Path, labels_dir: Path = None
):
for patient in patients:
print(f"Processing patient: {patient.name}")
image = nib.load(patient / f"{patient.name}_sa.nii.gz")
ed_frame = patient_info[patient.name]["ed"]
es_frame = patient_info[patient.name]["es"]
save_extracted_nifti_slice(image, ed_frame=ed_frame, es_frame=es_frame, out_dir=out_dir, patient=patient)
if labels_dir:
label = nib.load(patient / f"{patient.name}_sa_gt.nii.gz")
save_extracted_nifti_slice(label, ed_frame=ed_frame, es_frame=es_frame, out_dir=labels_dir, patient=patient)
def save_extracted_nifti_slice(image, ed_frame: int, es_frame: int, out_dir: Path, patient: Path):
# Save only extracted diastole and systole slices from the 4D H x W x D x time volume.
image_ed = nib.Nifti1Image(image.dataobj[..., ed_frame], image.affine)
image_es = nib.Nifti1Image(image.dataobj[..., es_frame], image.affine)
# Labels do not have modality identifiers. Labels always end with 'gt'.
suffix = ".nii.gz" if image.get_filename().endswith("_gt.nii.gz") else "_0000.nii.gz"
nib.save(image_ed, str(out_dir / f"{patient.name}_frame{ed_frame:02d}{suffix}"))
nib.save(image_es, str(out_dir / f"{patient.name}_frame{es_frame:02d}{suffix}"))
# ------------------------------------------------------------------------------
# Create custom splits
# ------------------------------------------------------------------------------
def create_custom_splits(src_data_folder: Path, csv_file: str, dataset_id: int, num_val_patients: int = 25):
existing_splits = os.path.join(nnUNet_preprocessed, f"Dataset{dataset_id}_MNMs", "splits_final.json")
splits = load_json(existing_splits)
patients_train = [f.name for f in (src_data_folder / "Training" / "Labeled").iterdir() if f.is_dir()]
# Filter out any patients not in the training set
patient_info = {
patient: data
for patient, data in read_csv(str(src_data_folder / csv_file)).items()
if patient in patients_train
}
# Get train and validation patients for both vendors
patients_a = [patient for patient, patient_data in patient_info.items() if patient_data["vendor"] == "A"]
patients_b = [patient for patient, patient_data in patient_info.items() if patient_data["vendor"] == "B"]
train_a, val_a = get_vendor_split(patients_a, num_val_patients)
train_b, val_b = get_vendor_split(patients_b, num_val_patients)
# Build filenames from corresponding patient frames
train_a = [f"{patient}_frame{patient_info[patient][frame]:02d}" for patient in train_a for frame in ["es", "ed"]]
train_b = [f"{patient}_frame{patient_info[patient][frame]:02d}" for patient in train_b for frame in ["es", "ed"]]
train_a_mix_1, train_a_mix_2 = train_a[: len(train_a) // 2], train_a[len(train_a) // 2 :]
train_b_mix_1, train_b_mix_2 = train_b[: len(train_b) // 2], train_b[len(train_b) // 2 :]
val_a = [f"{patient}_frame{patient_info[patient][frame]:02d}" for patient in val_a for frame in ["es", "ed"]]
val_b = [f"{patient}_frame{patient_info[patient][frame]:02d}" for patient in val_b for frame in ["es", "ed"]]
for train_set in [train_a, train_b, train_a_mix_1 + train_b_mix_1, train_a_mix_2 + train_b_mix_2]:
# For each train set, we evaluate on A, B and (A + B) respectively
# See table 3 from the original paper for more details.
splits.append({"train": train_set, "val": val_a})
splits.append({"train": train_set, "val": val_b})
splits.append({"train": train_set, "val": val_a + val_b})
save_json(splits, existing_splits)
def get_vendor_split(patients: list[str], num_val_patients: int):
random.shuffle(patients)
total_patients = len(patients)
num_training_patients = total_patients - num_val_patients
return patients[:num_training_patients], patients[num_training_patients:]
if __name__ == "__main__":
import argparse
class RawTextArgumentDefaultsHelpFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawTextHelpFormatter):
pass
parser = argparse.ArgumentParser(add_help=False, formatter_class=RawTextArgumentDefaultsHelpFormatter)
parser.add_argument(
"-h",
"--help",
action="help",
default=argparse.SUPPRESS,
help="MNMs conversion utility helper. This script can be used to convert MNMs data into the expected nnUNet "
"format. It can also be used to create additional custom splits, for explicitly training on combinations "
"of vendors A and B (see `--custom-splits`).\n"
"If you wish to generate the custom splits, run the following pipeline:\n\n"
"(1) Run `Dataset114_MNMs -i \n"
"(2) Run `nnUNetv2_plan_and_preprocess -d 114 --verify_dataset_integrity`\n"
"(3) Start training, but stop after initial splits are created: `nnUNetv2_train 114 2d 0`\n"
"(4) Re-run `Dataset114_MNMs`, with `-s True`.\n"
"(5) Re-run training.\n",
)
parser.add_argument(
"-i",
"--input_folder",
type=str,
default="./data/M&Ms/OpenDataset/",
help="The downloaded MNMs dataset dir. Should contain a csv file, as well as Training, Validation and Testing "
"folders.",
)
parser.add_argument(
"-c",
"--csv_file_name",
type=str,
default="211230_M&Ms_Dataset_information_diagnosis_opendataset.csv",
help="The csv file containing the dataset information.",
),
parser.add_argument("-d", "--dataset_id", type=int, default=114, help="nnUNet Dataset ID.")
parser.add_argument(
"-s",
"--custom_splits",
type=bool,
default=False,
help="Whether to append custom splits for training and testing on different vendors. If True, will create "
"splits for training on patients from vendors A, B or a mix of A and B. Splits are tested on a hold-out "
"validation sets of patients from A, B or A and B combined. See section 2.4 and table 3 from "
"https://arxiv.org/abs/2011.07592 for more info.",
)
args = parser.parse_args()
args.input_folder = Path(args.input_folder)
if args.custom_splits:
print("Appending custom splits...")
create_custom_splits(args.input_folder, args.csv_file_name, args.dataset_id)
else:
print("Converting...")
convert_mnms(args.input_folder, args.csv_file_name, args.dataset_id)
print("Done!")
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset115_EMIDEC.py
================================================
import shutil
from pathlib import Path
from nnunetv2.dataset_conversion.Dataset027_ACDC import make_out_dirs
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json
def copy_files(src_data_dir: Path, src_test_dir: Path, train_dir: Path, labels_dir: Path, test_dir: Path):
"""Copy files from the EMIDEC dataset to the nnUNet dataset folder. Returns the number of training cases."""
patients_train = sorted([f for f in src_data_dir.iterdir() if f.is_dir()])
patients_test = sorted([f for f in src_test_dir.iterdir() if f.is_dir()])
# Copy training files and corresponding labels.
for patient in patients_train:
train_file = patient / "Images" / f"{patient.name}.nii.gz"
label_file = patient / "Contours" / f"{patient.name}.nii.gz"
shutil.copy(train_file, train_dir / f"{train_file.stem.split('.')[0]}_0000.nii.gz")
shutil.copy(label_file, labels_dir)
# Copy test files.
for patient in patients_test:
test_file = patient / "Images" / f"{patient.name}.nii.gz"
shutil.copy(test_file, test_dir / f"{test_file.stem.split('.')[0]}_0000.nii.gz")
return len(patients_train)
def convert_emidec(src_data_dir: str, src_test_dir: str, dataset_id=27):
out_dir, train_dir, labels_dir, test_dir = make_out_dirs(dataset_id=dataset_id, task_name="EMIDEC")
num_training_cases = copy_files(Path(src_data_dir), Path(src_test_dir), train_dir, labels_dir, test_dir)
generate_dataset_json(
str(out_dir),
channel_names={
0: "cineMRI",
},
labels={
"background": 0,
"cavity": 1,
"normal_myocardium": 2,
"myocardial_infarction": 3,
"no_reflow": 4,
},
file_ending=".nii.gz",
num_training_cases=num_training_cases,
)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input_dir", type=str, help="The EMIDEC dataset directory.")
parser.add_argument("-t", "--test_dir", type=str, help="The EMIDEC test set directory.")
parser.add_argument(
"-d", "--dataset_id", required=False, type=int, default=115, help="nnU-Net Dataset ID, default: 115"
)
args = parser.parse_args()
print("Converting...")
convert_emidec(args.input_dir, args.test_dir, args.dataset_id)
print("Done!")
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset120_RoadSegmentation.py
================================================
import multiprocessing
import shutil
from multiprocessing import Pool
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json
from nnunetv2.paths import nnUNet_raw
from skimage import io
from acvl_utils.morphology.morphology_helper import generic_filter_components
from scipy.ndimage import binary_fill_holes
def load_and_covnert_case(input_image: str, input_seg: str, output_image: str, output_seg: str,
min_component_size: int = 50):
seg = io.imread(input_seg)
seg[seg == 255] = 1
image = io.imread(input_image)
image = image.sum(2)
mask = image == (3 * 255)
# the dataset has large white areas in which road segmentations can exist but no image information is available.
# Remove the road label in these areas
mask = generic_filter_components(mask, filter_fn=lambda ids, sizes: [i for j, i in enumerate(ids) if
sizes[j] > min_component_size])
mask = binary_fill_holes(mask)
seg[mask] = 0
io.imsave(output_seg, seg, check_contrast=False)
shutil.copy(input_image, output_image)
if __name__ == "__main__":
# extracted archive from https://www.kaggle.com/datasets/insaff/massachusetts-roads-dataset?resource=download
source = '/media/fabian/data/raw_datasets/Massachussetts_road_seg/road_segmentation_ideal'
dataset_name = 'Dataset120_RoadSegmentation'
imagestr = join(nnUNet_raw, dataset_name, 'imagesTr')
imagests = join(nnUNet_raw, dataset_name, 'imagesTs')
labelstr = join(nnUNet_raw, dataset_name, 'labelsTr')
labelsts = join(nnUNet_raw, dataset_name, 'labelsTs')
maybe_mkdir_p(imagestr)
maybe_mkdir_p(imagests)
maybe_mkdir_p(labelstr)
maybe_mkdir_p(labelsts)
train_source = join(source, 'training')
test_source = join(source, 'testing')
with multiprocessing.get_context("spawn").Pool(8) as p:
# not all training images have a segmentation
valid_ids = subfiles(join(train_source, 'output'), join=False, suffix='png')
num_train = len(valid_ids)
r = []
for v in valid_ids:
r.append(
p.starmap_async(
load_and_covnert_case,
((
join(train_source, 'input', v),
join(train_source, 'output', v),
join(imagestr, v[:-4] + '_0000.png'),
join(labelstr, v),
50
),)
)
)
# test set
valid_ids = subfiles(join(test_source, 'output'), join=False, suffix='png')
for v in valid_ids:
r.append(
p.starmap_async(
load_and_covnert_case,
((
join(test_source, 'input', v),
join(test_source, 'output', v),
join(imagests, v[:-4] + '_0000.png'),
join(labelsts, v),
50
),)
)
)
_ = [i.get() for i in r]
generate_dataset_json(join(nnUNet_raw, dataset_name), {0: 'R', 1: 'G', 2: 'B'}, {'background': 0, 'road': 1},
num_train, '.png', dataset_name=dataset_name)
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset137_BraTS21.py
================================================
import multiprocessing
import shutil
from multiprocessing import Pool
import SimpleITK as sitk
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json
from nnunetv2.paths import nnUNet_raw
def copy_BraTS_segmentation_and_convert_labels_to_nnUNet(in_file: str, out_file: str) -> None:
# use this for segmentation only!!!
# nnUNet wants the labels to be continuous. BraTS is 0, 1, 2, 4 -> we make that into 0, 1, 2, 3
img = sitk.ReadImage(in_file)
img_npy = sitk.GetArrayFromImage(img)
uniques = np.unique(img_npy)
for u in uniques:
if u not in [0, 1, 2, 4]:
raise RuntimeError('unexpected label')
seg_new = np.zeros_like(img_npy)
seg_new[img_npy == 4] = 3
seg_new[img_npy == 2] = 1
seg_new[img_npy == 1] = 2
img_corr = sitk.GetImageFromArray(seg_new)
img_corr.CopyInformation(img)
sitk.WriteImage(img_corr, out_file)
def convert_labels_back_to_BraTS(seg: np.ndarray):
new_seg = np.zeros_like(seg)
new_seg[seg == 1] = 2
new_seg[seg == 3] = 4
new_seg[seg == 2] = 1
return new_seg
def load_convert_labels_back_to_BraTS(filename, input_folder, output_folder):
a = sitk.ReadImage(join(input_folder, filename))
b = sitk.GetArrayFromImage(a)
c = convert_labels_back_to_BraTS(b)
d = sitk.GetImageFromArray(c)
d.CopyInformation(a)
sitk.WriteImage(d, join(output_folder, filename))
def convert_folder_with_preds_back_to_BraTS_labeling_convention(input_folder: str, output_folder: str, num_processes: int = 12):
"""
reads all prediction files (nifti) in the input folder, converts the labels back to BraTS convention and saves the
"""
maybe_mkdir_p(output_folder)
nii = subfiles(input_folder, suffix='.nii.gz', join=False)
with multiprocessing.get_context("spawn").Pool(num_processes) as p:
p.starmap(load_convert_labels_back_to_BraTS, zip(nii, [input_folder] * len(nii), [output_folder] * len(nii)))
if __name__ == '__main__':
# brats_data_dir = '/home/isensee/drives/E132-Rohdaten/BraTS_2021/training'
brats_data_dir = "/data/nnUNet_raw_data/original/"
task_id = 137
task_name = "BraTS2021"
foldername = "Dataset%03.0d_%s" % (task_id, task_name)
# setting up nnU-Net folders
out_base = join(nnUNet_raw, foldername)
imagestr = join(out_base, "imagesTr")
labelstr = join(out_base, "labelsTr")
maybe_mkdir_p(imagestr)
maybe_mkdir_p(labelstr)
case_ids = subdirs(brats_data_dir, prefix='BraTS', join=False)
for c in case_ids:
shutil.copy(join(brats_data_dir, c, c + "_t1.nii.gz"), join(imagestr, c + '_0000.nii.gz'))
shutil.copy(join(brats_data_dir, c, c + "_t1ce.nii.gz"), join(imagestr, c + '_0001.nii.gz'))
shutil.copy(join(brats_data_dir, c, c + "_t2.nii.gz"), join(imagestr, c + '_0002.nii.gz'))
shutil.copy(join(brats_data_dir, c, c + "_flair.nii.gz"), join(imagestr, c + '_0003.nii.gz'))
copy_BraTS_segmentation_and_convert_labels_to_nnUNet(join(brats_data_dir, c, c + "_seg.nii.gz"),
join(labelstr, c + '.nii.gz'))
generate_dataset_json(out_base,
channel_names={0: 'T1', 1: 'T1ce', 2: 'T2', 3: 'Flair'},
labels={
'background': 0,
'whole tumor': (1, 2, 3),
'tumor core': (2, 3),
'enhancing tumor': (3, )
},
num_training_cases=len(case_ids),
file_ending='.nii.gz',
regions_class_order=(1, 2, 3),
license='see https://www.synapse.org/#!Synapse:syn25829067/wiki/610863',
reference='see https://www.synapse.org/#!Synapse:syn25829067/wiki/610863',
dataset_release='1.0')
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset218_Amos2022_task1.py
================================================
from batchgenerators.utilities.file_and_folder_operations import *
import shutil
from generate_dataset_json import generate_dataset_json
# from nnunetv2.paths import nnUNet_raw
nnUNet_raw = '/data/linshan/nnunet_data/nnUNet_raw'
def convert_amos_task1(amos_base_dir: str, nnunet_dataset_id: int = 218):
"""
AMOS doesn't say anything about how the validation set is supposed to be used. So we just incorporate that into
the train set. Having a 5-fold cross-validation is superior to a single train:val split
"""
task_name = "AMOS2022_postChallenge_task1"
foldername = "Dataset%03.0d_%s" % (nnunet_dataset_id, task_name)
# setting up nnU-Net folders
out_base = join(nnUNet_raw, foldername)
imagestr = join(out_base, "imagesTr")
imagests = join(out_base, "imagesTs")
labelstr = join(out_base, "labelsTr")
maybe_mkdir_p(imagestr)
maybe_mkdir_p(imagests)
maybe_mkdir_p(labelstr)
dataset_json_source = load_json(join(amos_base_dir, 'dataset.json'))
training_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['training']]
tr_ctr = 0
for tr in training_identifiers:
if int(tr.split("_")[-1]) <= 410: # these are the CT images
tr_ctr += 1
shutil.copy(join(amos_base_dir, 'imagesTr', tr + '.nii.gz'), join(imagestr, f'{tr}_0000.nii.gz'))
shutil.copy(join(amos_base_dir, 'labelsTr', tr + '.nii.gz'), join(labelstr, f'{tr}.nii.gz'))
test_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['test']]
for ts in test_identifiers:
if int(ts.split("_")[-1]) <= 500: # these are the CT images
shutil.copy(join(amos_base_dir, 'imagesTs', ts + '.nii.gz'), join(imagests, f'{ts}_0000.nii.gz'))
val_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['validation']]
for vl in val_identifiers:
if int(vl.split("_")[-1]) <= 409: # these are the CT images
tr_ctr += 1
shutil.copy(join(amos_base_dir, 'imagesVa', vl + '.nii.gz'), join(imagestr, f'{vl}_0000.nii.gz'))
shutil.copy(join(amos_base_dir, 'labelsVa', vl + '.nii.gz'), join(labelstr, f'{vl}.nii.gz'))
generate_dataset_json(out_base, {0: "CT"}, labels={v: int(k) for k,v in dataset_json_source['labels'].items()},
num_training_cases=tr_ctr, file_ending='.nii.gz',
dataset_name=task_name, reference='https://amos22.grand-challenge.org/',
release='https://zenodo.org/record/7262581',
overwrite_image_reader_writer='NibabelIOWithReorient',
description="This is the dataset as released AFTER the challenge event. It has the "
"validation set gt in it! We just use the validation images as additional "
"training cases because AMOS doesn't specify how they should be used. nnU-Net's"
" 5-fold CV is better than some random train:val split.")
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('input_folder', type=str, default='/data/linshan/CTs/Amos2022/',
help="The downloaded and extracted AMOS2022 (https://amos22.grand-challenge.org/) data. "
"Use this link: https://zenodo.org/record/7262581."
"You need to specify the folder with the imagesTr, imagesVal, labelsTr etc subfolders here!")
parser.add_argument('-d', required=False, type=int, default=218, help='nnU-Net Dataset ID, default: 218')
args = parser.parse_args()
amos_base = args.input_folder
convert_amos_task1(amos_base, args.d)
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset219_Amos2022_task2.py
================================================
from batchgenerators.utilities.file_and_folder_operations import *
import shutil
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json
from nnunetv2.paths import nnUNet_raw
def convert_amos_task2(amos_base_dir: str, nnunet_dataset_id: int = 219):
"""
AMOS doesn't say anything about how the validation set is supposed to be used. So we just incorporate that into
the train set. Having a 5-fold cross-validation is superior to a single train:val split
"""
task_name = "AMOS2022_postChallenge_task2"
foldername = "Dataset%03.0d_%s" % (nnunet_dataset_id, task_name)
# setting up nnU-Net folders
out_base = join(nnUNet_raw, foldername)
imagestr = join(out_base, "imagesTr")
imagests = join(out_base, "imagesTs")
labelstr = join(out_base, "labelsTr")
maybe_mkdir_p(imagestr)
maybe_mkdir_p(imagests)
maybe_mkdir_p(labelstr)
dataset_json_source = load_json(join(amos_base_dir, 'dataset.json'))
training_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['training']]
for tr in training_identifiers:
shutil.copy(join(amos_base_dir, 'imagesTr', tr + '.nii.gz'), join(imagestr, f'{tr}_0000.nii.gz'))
shutil.copy(join(amos_base_dir, 'labelsTr', tr + '.nii.gz'), join(labelstr, f'{tr}.nii.gz'))
test_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['test']]
for ts in test_identifiers:
shutil.copy(join(amos_base_dir, 'imagesTs', ts + '.nii.gz'), join(imagests, f'{ts}_0000.nii.gz'))
val_identifiers = [i['image'].split('/')[-1][:-7] for i in dataset_json_source['validation']]
for vl in val_identifiers:
shutil.copy(join(amos_base_dir, 'imagesVa', vl + '.nii.gz'), join(imagestr, f'{vl}_0000.nii.gz'))
shutil.copy(join(amos_base_dir, 'labelsVa', vl + '.nii.gz'), join(labelstr, f'{vl}.nii.gz'))
generate_dataset_json(out_base, {0: "either_CT_or_MR"}, labels={v: int(k) for k,v in dataset_json_source['labels'].items()},
num_training_cases=len(training_identifiers) + len(val_identifiers), file_ending='.nii.gz',
dataset_name=task_name, reference='https://amos22.grand-challenge.org/',
release='https://zenodo.org/record/7262581',
overwrite_image_reader_writer='NibabelIOWithReorient',
description="This is the dataset as released AFTER the challenge event. It has the "
"validation set gt in it! We just use the validation images as additional "
"training cases because AMOS doesn't specify how they should be used. nnU-Net's"
" 5-fold CV is better than some random train:val split.")
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('input_folder', type=str,
help="The downloaded and extracted AMOS2022 (https://amos22.grand-challenge.org/) data. "
"Use this link: https://zenodo.org/record/7262581."
"You need to specify the folder with the imagesTr, imagesVal, labelsTr etc subfolders here!")
parser.add_argument('-d', required=False, type=int, default=219, help='nnU-Net Dataset ID, default: 219')
args = parser.parse_args()
amos_base = args.input_folder
convert_amos_task2(amos_base, args.d)
# /home/isensee/Downloads/amos22/amos22/
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset220_KiTS2023.py
================================================
from batchgenerators.utilities.file_and_folder_operations import *
import shutil
from generate_dataset_json import generate_dataset_json
#from nnunetv2.paths import nnUNet_raw
nnUNet_raw = '/data/linshan/nnunet_data/nnUNet_raw'
def convert_kits2023(kits_base_dir: str, nnunet_dataset_id: int = 220):
task_name = "KiTS2023"
foldername = "Dataset%03.0d_%s" % (nnunet_dataset_id, task_name)
# setting up nnU-Net folders
out_base = join(nnUNet_raw, foldername)
imagestr = join(out_base, "imagesTr")
labelstr = join(out_base, "labelsTr")
maybe_mkdir_p(imagestr)
maybe_mkdir_p(labelstr)
cases = subdirs(kits_base_dir, prefix='case_', join=False)
for tr in cases:
shutil.copy(join(kits_base_dir, tr, 'imaging.nii.gz'), join(imagestr, f'{tr}_0000.nii.gz'))
shutil.copy(join(kits_base_dir, tr, 'segmentation.nii.gz'), join(labelstr, f'{tr}.nii.gz'))
generate_dataset_json(out_base, {0: "CT"},
labels={
"background": 0,
"kidney": (1, 2, 3),
"masses": (2, 3),
"tumor": 2
},
regions_class_order=(1, 3, 2),
num_training_cases=len(cases), file_ending='.nii.gz',
dataset_name=task_name, reference='none',
release='prerelease',
overwrite_image_reader_writer='NibabelIOWithReorient',
description="KiTS2023")
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('input_folder', type=str,
help="The downloaded and extracted KiTS2023 dataset (must have case_XXXXX subfolders)")
parser.add_argument('-d', required=False, type=int, default=220, help='nnU-Net Dataset ID, default: 220')
args = parser.parse_args()
amos_base = args.input_folder
convert_kits2023(amos_base, args.d)
# /media/isensee/raw_data/raw_datasets/kits23/dataset
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset221_AutoPETII_2023.py
================================================
from batchgenerators.utilities.file_and_folder_operations import *
import shutil
from nnunetv2.dataset_conversion.generate_dataset_json import generate_dataset_json
from nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed
def convert_autopet(autopet_base_dir:str = '/media/isensee/My Book1/AutoPET/nifti/FDG-PET-CT-Lesions',
nnunet_dataset_id: int = 221):
task_name = "AutoPETII_2023"
foldername = "Dataset%03.0d_%s" % (nnunet_dataset_id, task_name)
# setting up nnU-Net folders
out_base = join(nnUNet_raw, foldername)
imagestr = join(out_base, "imagesTr")
labelstr = join(out_base, "labelsTr")
maybe_mkdir_p(imagestr)
maybe_mkdir_p(labelstr)
patients = subdirs(autopet_base_dir, prefix='PETCT', join=False)
n = 0
identifiers = []
for pat in patients:
patient_acquisitions = subdirs(join(autopet_base_dir, pat), join=False)
for pa in patient_acquisitions:
n += 1
identifier = f"{pat}_{pa}"
identifiers.append(identifier)
if not isfile(join(imagestr, f'{identifier}_0000.nii.gz')):
shutil.copy(join(autopet_base_dir, pat, pa, 'CTres.nii.gz'), join(imagestr, f'{identifier}_0000.nii.gz'))
if not isfile(join(imagestr, f'{identifier}_0001.nii.gz')):
shutil.copy(join(autopet_base_dir, pat, pa, 'SUV.nii.gz'), join(imagestr, f'{identifier}_0001.nii.gz'))
if not isfile(join(imagestr, f'{identifier}.nii.gz')):
shutil.copy(join(autopet_base_dir, pat, pa, 'SEG.nii.gz'), join(labelstr, f'{identifier}.nii.gz'))
generate_dataset_json(out_base, {0: "CT", 1:"CT"},
labels={
"background": 0,
"tumor": 1
},
num_training_cases=n, file_ending='.nii.gz',
dataset_name=task_name, reference='https://autopet-ii.grand-challenge.org/',
release='release',
# overwrite_image_reader_writer='NibabelIOWithReorient',
description=task_name)
# manual split
splits = []
for fold in range(5):
val_patients = patients[fold :: 5]
splits.append(
{
'train': [i for i in identifiers if not any([i.startswith(v) for v in val_patients])],
'val': [i for i in identifiers if any([i.startswith(v) for v in val_patients])],
}
)
pp_out_dir = join(nnUNet_preprocessed, foldername)
maybe_mkdir_p(pp_out_dir)
save_json(splits, join(pp_out_dir, 'splits_final.json'), sort_keys=False)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('input_folder', type=str,
help="The downloaded and extracted autopet dataset (must have PETCT_XXX subfolders)")
parser.add_argument('-d', required=False, type=int, default=221, help='nnU-Net Dataset ID, default: 221')
args = parser.parse_args()
amos_base = args.input_folder
convert_autopet(amos_base, args.d)
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/Dataset988_dummyDataset4.py
================================================
import os
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.paths import nnUNet_raw
from nnunetv2.utilities.utils import get_filenames_of_train_images_and_targets
if __name__ == '__main__':
# creates a dummy dataset where there are no files in imagestr and labelstr
source_dataset = 'Dataset004_Hippocampus'
target_dataset = 'Dataset987_dummyDataset4'
target_dataset_dir = join(nnUNet_raw, target_dataset)
maybe_mkdir_p(target_dataset_dir)
dataset = get_filenames_of_train_images_and_targets(join(nnUNet_raw, source_dataset))
# the returned dataset will have absolute paths. We should use relative paths so that you can freely copy
# datasets around between systems. As long as the source dataset is there it will continue working even if
# nnUNet_raw is in different locations
# paths must be relative to target_dataset_dir!!!
for k in dataset.keys():
dataset[k]['label'] = os.path.relpath(dataset[k]['label'], target_dataset_dir)
dataset[k]['images'] = [os.path.relpath(i, target_dataset_dir) for i in dataset[k]['images']]
# load old dataset.json
dataset_json = load_json(join(nnUNet_raw, source_dataset, 'dataset.json'))
dataset_json['dataset'] = dataset
# save
save_json(dataset_json, join(target_dataset_dir, 'dataset.json'), sort_keys=False)
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/convert_MSD_dataset.py
================================================
import argparse
import multiprocessing
import shutil
from multiprocessing import Pool
from typing import Optional
import SimpleITK as sitk
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.paths import nnUNet_raw
from nnunetv2.utilities.dataset_name_id_conversion import find_candidate_datasets
from nnunetv2.configuration import default_num_processes
import numpy as np
def split_4d_nifti(filename, output_folder):
img_itk = sitk.ReadImage(filename)
dim = img_itk.GetDimension()
file_base = os.path.basename(filename)
if dim == 3:
shutil.copy(filename, join(output_folder, file_base[:-7] + "_0000.nii.gz"))
return
elif dim != 4:
raise RuntimeError("Unexpected dimensionality: %d of file %s, cannot split" % (dim, filename))
else:
img_npy = sitk.GetArrayFromImage(img_itk)
spacing = img_itk.GetSpacing()
origin = img_itk.GetOrigin()
direction = np.array(img_itk.GetDirection()).reshape(4,4)
# now modify these to remove the fourth dimension
spacing = tuple(list(spacing[:-1]))
origin = tuple(list(origin[:-1]))
direction = tuple(direction[:-1, :-1].reshape(-1))
for i, t in enumerate(range(img_npy.shape[0])):
img = img_npy[t]
img_itk_new = sitk.GetImageFromArray(img)
img_itk_new.SetSpacing(spacing)
img_itk_new.SetOrigin(origin)
img_itk_new.SetDirection(direction)
sitk.WriteImage(img_itk_new, join(output_folder, file_base[:-7] + "_%04.0d.nii.gz" % i))
def convert_msd_dataset(source_folder: str, overwrite_target_id: Optional[int] = None,
num_processes: int = default_num_processes) -> None:
if source_folder.endswith('/') or source_folder.endswith('\\'):
source_folder = source_folder[:-1]
labelsTr = join(source_folder, 'labelsTr')
imagesTs = join(source_folder, 'imagesTs')
imagesTr = join(source_folder, 'imagesTr')
assert isdir(labelsTr), f"labelsTr subfolder missing in source folder"
assert isdir(imagesTs), f"imagesTs subfolder missing in source folder"
assert isdir(imagesTr), f"imagesTr subfolder missing in source folder"
dataset_json = join(source_folder, 'dataset.json')
assert isfile(dataset_json), f"dataset.json missing in source_folder"
# infer source dataset id and name
task, dataset_name = os.path.basename(source_folder).split('_')
task_id = int(task[4:])
# check if target dataset id is taken
target_id = task_id if overwrite_target_id is None else overwrite_target_id
existing_datasets = find_candidate_datasets(target_id)
assert len(existing_datasets) == 0, f"Target dataset id {target_id} is already taken, please consider changing " \
f"it using overwrite_target_id. Conflicting dataset: {existing_datasets} (check nnUNet_results, nnUNet_preprocessed and nnUNet_raw!)"
target_dataset_name = f"Dataset{target_id:03d}_{dataset_name}"
target_folder = join(nnUNet_raw, target_dataset_name)
target_imagesTr = join(target_folder, 'imagesTr')
target_imagesTs = join(target_folder, 'imagesTs')
target_labelsTr = join(target_folder, 'labelsTr')
maybe_mkdir_p(target_imagesTr)
maybe_mkdir_p(target_imagesTs)
maybe_mkdir_p(target_labelsTr)
with multiprocessing.get_context("spawn").Pool(num_processes) as p:
results = []
# convert 4d train images
source_images = [i for i in subfiles(imagesTr, suffix='.nii.gz', join=False) if
not i.startswith('.') and not i.startswith('_')]
source_images = [join(imagesTr, i) for i in source_images]
results.append(
p.starmap_async(
split_4d_nifti, zip(source_images, [target_imagesTr] * len(source_images))
)
)
# convert 4d test images
source_images = [i for i in subfiles(imagesTs, suffix='.nii.gz', join=False) if
not i.startswith('.') and not i.startswith('_')]
source_images = [join(imagesTs, i) for i in source_images]
results.append(
p.starmap_async(
split_4d_nifti, zip(source_images, [target_imagesTs] * len(source_images))
)
)
# copy segmentations
source_images = [i for i in subfiles(labelsTr, suffix='.nii.gz', join=False) if
not i.startswith('.') and not i.startswith('_')]
for s in source_images:
shutil.copy(join(labelsTr, s), join(target_labelsTr, s))
[i.get() for i in results]
dataset_json = load_json(dataset_json)
dataset_json['labels'] = {j: int(i) for i, j in dataset_json['labels'].items()}
dataset_json['file_ending'] = ".nii.gz"
dataset_json["channel_names"] = dataset_json["modality"]
del dataset_json["modality"]
del dataset_json["training"]
del dataset_json["test"]
save_json(dataset_json, join(nnUNet_raw, target_dataset_name, 'dataset.json'), sort_keys=False)
def entry_point():
parser = argparse.ArgumentParser()
parser.add_argument('-i', type=str, required=True,
help='Downloaded and extracted MSD dataset folder. CANNOT be nnUNetv1 dataset! Example: '
'/home/fabian/Downloads/Task05_Prostate')
parser.add_argument('-overwrite_id', type=int, required=False, default=None,
help='Overwrite the dataset id. If not set we use the id of the MSD task (inferred from '
'folder name). Only use this if you already have an equivalently numbered dataset!')
parser.add_argument('-np', type=int, required=False, default=default_num_processes,
help=f'Number of processes used. Default: {default_num_processes}')
args = parser.parse_args()
convert_msd_dataset(args.i, args.overwrite_id, args.np)
if __name__ == '__main__':
convert_msd_dataset('/data/jiaxin/data/10_Decathlon/Task01_BrainTumour', overwrite_target_id=201)
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/convert_raw_dataset_from_old_nnunet_format.py
================================================
import shutil
from copy import deepcopy
from batchgenerators.utilities.file_and_folder_operations import join, maybe_mkdir_p, isdir, load_json, save_json
from nnunetv2.paths import nnUNet_raw
def convert(source_folder, target_dataset_name):
"""
remember that old tasks were called TaskXXX_YYY and new ones are called DatasetXXX_YYY
source_folder
"""
if isdir(join(nnUNet_raw, target_dataset_name)):
raise RuntimeError(f'Target dataset name {target_dataset_name} already exists. Aborting... '
f'(we might break something). If you are sure you want to proceed, please manually '
f'delete {join(nnUNet_raw, target_dataset_name)}')
maybe_mkdir_p(join(nnUNet_raw, target_dataset_name))
shutil.copytree(join(source_folder, 'imagesTr'), join(nnUNet_raw, target_dataset_name, 'imagesTr'))
shutil.copytree(join(source_folder, 'labelsTr'), join(nnUNet_raw, target_dataset_name, 'labelsTr'))
if isdir(join(source_folder, 'imagesTs')):
shutil.copytree(join(source_folder, 'imagesTs'), join(nnUNet_raw, target_dataset_name, 'imagesTs'))
if isdir(join(source_folder, 'labelsTs')):
shutil.copytree(join(source_folder, 'labelsTs'), join(nnUNet_raw, target_dataset_name, 'labelsTs'))
if isdir(join(source_folder, 'imagesVal')):
shutil.copytree(join(source_folder, 'imagesVal'), join(nnUNet_raw, target_dataset_name, 'imagesVal'))
if isdir(join(source_folder, 'labelsVal')):
shutil.copytree(join(source_folder, 'labelsVal'), join(nnUNet_raw, target_dataset_name, 'labelsVal'))
shutil.copy(join(source_folder, 'dataset.json'), join(nnUNet_raw, target_dataset_name))
dataset_json = load_json(join(nnUNet_raw, target_dataset_name, 'dataset.json'))
del dataset_json['tensorImageSize']
del dataset_json['numTest']
del dataset_json['training']
del dataset_json['test']
dataset_json['channel_names'] = deepcopy(dataset_json['modality'])
del dataset_json['modality']
dataset_json['labels'] = {j: int(i) for i, j in dataset_json['labels'].items()}
dataset_json['file_ending'] = ".nii.gz"
save_json(dataset_json, join(nnUNet_raw, target_dataset_name, 'dataset.json'), sort_keys=False)
def convert_entry_point():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("input_folder", type=str,
help='Raw old nnUNet dataset. This must be the folder with imagesTr,labelsTr etc subfolders! '
'Please provide the PATH to the old Task, not just the task name. nnU-Net V2 does not '
'know where v1 tasks are.')
parser.add_argument("output_dataset_name", type=str,
help='New dataset NAME (not path!). Must follow the DatasetXXX_NAME convention!')
args = parser.parse_args()
convert(args.input_folder, args.output_dataset_name)
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset996_IntegrationTest_Hippocampus_regions_ignore.py
================================================
import SimpleITK as sitk
import shutil
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import isdir, join, load_json, save_json, nifti_files
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.paths import nnUNet_raw
from nnunetv2.utilities.label_handling.label_handling import LabelManager
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager
def sparsify_segmentation(seg: np.ndarray, label_manager: LabelManager, percent_of_slices: float) -> np.ndarray:
assert label_manager.has_ignore_label, "This preprocessor only works with datasets that have an ignore label!"
seg_new = np.ones_like(seg) * label_manager.ignore_label
x, y, z = seg.shape
# x
num_slices = max(1, round(x * percent_of_slices))
selected_slices = np.random.choice(x, num_slices, replace=False)
seg_new[selected_slices] = seg[selected_slices]
# y
num_slices = max(1, round(y * percent_of_slices))
selected_slices = np.random.choice(y, num_slices, replace=False)
seg_new[:, selected_slices] = seg[:, selected_slices]
# z
num_slices = max(1, round(z * percent_of_slices))
selected_slices = np.random.choice(z, num_slices, replace=False)
seg_new[:, :, selected_slices] = seg[:, :, selected_slices]
return seg_new
if __name__ == '__main__':
dataset_name = 'IntegrationTest_Hippocampus_regions_ignore'
dataset_id = 996
dataset_name = f"Dataset{dataset_id:03d}_{dataset_name}"
try:
existing_dataset_name = maybe_convert_to_dataset_name(dataset_id)
if existing_dataset_name != dataset_name:
raise FileExistsError(f"A different dataset with id {dataset_id} already exists :-(: {existing_dataset_name}. If "
f"you intent to delete it, remember to also remove it in nnUNet_preprocessed and "
f"nnUNet_results!")
except RuntimeError:
pass
if isdir(join(nnUNet_raw, dataset_name)):
shutil.rmtree(join(nnUNet_raw, dataset_name))
source_dataset = maybe_convert_to_dataset_name(4)
shutil.copytree(join(nnUNet_raw, source_dataset), join(nnUNet_raw, dataset_name))
# additionally optimize entire hippocampus region, remove Posterior
dj = load_json(join(nnUNet_raw, dataset_name, 'dataset.json'))
dj['labels'] = {
'background': 0,
'hippocampus': (1, 2),
'anterior': 1,
'ignore': 3
}
dj['regions_class_order'] = (2, 1)
save_json(dj, join(nnUNet_raw, dataset_name, 'dataset.json'), sort_keys=False)
# now add ignore label to segmentation images
np.random.seed(1234)
lm = LabelManager(label_dict=dj['labels'], regions_class_order=dj.get('regions_class_order'))
segs = nifti_files(join(nnUNet_raw, dataset_name, 'labelsTr'))
for s in segs:
seg_itk = sitk.ReadImage(s)
seg_npy = sitk.GetArrayFromImage(seg_itk)
seg_npy = sparsify_segmentation(seg_npy, lm, 0.1 / 3)
seg_itk_new = sitk.GetImageFromArray(seg_npy)
seg_itk_new.CopyInformation(seg_itk)
sitk.WriteImage(seg_itk_new, s)
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset997_IntegrationTest_Hippocampus_regions.py
================================================
import shutil
from batchgenerators.utilities.file_and_folder_operations import isdir, join, load_json, save_json
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.paths import nnUNet_raw
if __name__ == '__main__':
dataset_name = 'IntegrationTest_Hippocampus_regions'
dataset_id = 997
dataset_name = f"Dataset{dataset_id:03d}_{dataset_name}"
try:
existing_dataset_name = maybe_convert_to_dataset_name(dataset_id)
if existing_dataset_name != dataset_name:
raise FileExistsError(
f"A different dataset with id {dataset_id} already exists :-(: {existing_dataset_name}. If "
f"you intent to delete it, remember to also remove it in nnUNet_preprocessed and "
f"nnUNet_results!")
except RuntimeError:
pass
if isdir(join(nnUNet_raw, dataset_name)):
shutil.rmtree(join(nnUNet_raw, dataset_name))
source_dataset = maybe_convert_to_dataset_name(4)
shutil.copytree(join(nnUNet_raw, source_dataset), join(nnUNet_raw, dataset_name))
# additionally optimize entire hippocampus region, remove Posterior
dj = load_json(join(nnUNet_raw, dataset_name, 'dataset.json'))
dj['labels'] = {
'background': 0,
'hippocampus': (1, 2),
'anterior': 1
}
dj['regions_class_order'] = (2, 1)
save_json(dj, join(nnUNet_raw, dataset_name, 'dataset.json'), sort_keys=False)
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset998_IntegrationTest_Hippocampus_ignore.py
================================================
import shutil
from batchgenerators.utilities.file_and_folder_operations import isdir, join, load_json, save_json
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.paths import nnUNet_raw
if __name__ == '__main__':
dataset_name = 'IntegrationTest_Hippocampus_ignore'
dataset_id = 998
dataset_name = f"Dataset{dataset_id:03d}_{dataset_name}"
try:
existing_dataset_name = maybe_convert_to_dataset_name(dataset_id)
if existing_dataset_name != dataset_name:
raise FileExistsError(f"A different dataset with id {dataset_id} already exists :-(: {existing_dataset_name}. If "
f"you intent to delete it, remember to also remove it in nnUNet_preprocessed and "
f"nnUNet_results!")
except RuntimeError:
pass
if isdir(join(nnUNet_raw, dataset_name)):
shutil.rmtree(join(nnUNet_raw, dataset_name))
source_dataset = maybe_convert_to_dataset_name(4)
shutil.copytree(join(nnUNet_raw, source_dataset), join(nnUNet_raw, dataset_name))
# set class 2 to ignore label
dj = load_json(join(nnUNet_raw, dataset_name, 'dataset.json'))
dj['labels']['ignore'] = 2
del dj['labels']['Posterior']
save_json(dj, join(nnUNet_raw, dataset_name, 'dataset.json'), sort_keys=False)
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset999_IntegrationTest_Hippocampus.py
================================================
import shutil
from batchgenerators.utilities.file_and_folder_operations import isdir, join
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.paths import nnUNet_raw
if __name__ == '__main__':
dataset_name = 'IntegrationTest_Hippocampus'
dataset_id = 999
dataset_name = f"Dataset{dataset_id:03d}_{dataset_name}"
try:
existing_dataset_name = maybe_convert_to_dataset_name(dataset_id)
if existing_dataset_name != dataset_name:
raise FileExistsError(f"A different dataset with id {dataset_id} already exists :-(: {existing_dataset_name}. If "
f"you intent to delete it, remember to also remove it in nnUNet_preprocessed and "
f"nnUNet_results!")
except RuntimeError:
pass
if isdir(join(nnUNet_raw, dataset_name)):
shutil.rmtree(join(nnUNet_raw, dataset_name))
source_dataset = maybe_convert_to_dataset_name(4)
shutil.copytree(join(nnUNet_raw, source_dataset), join(nnUNet_raw, dataset_name))
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/datasets_for_integration_tests/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/dataset_conversion/generate_dataset_json.py
================================================
from typing import Tuple
from batchgenerators.utilities.file_and_folder_operations import save_json, join
def generate_dataset_json(output_folder: str,
channel_names: dict,
labels: dict,
num_training_cases: int,
file_ending: str,
regions_class_order: Tuple[int, ...] = None,
dataset_name: str = None, reference: str = None, release: str = None, license: str = None,
description: str = None,
overwrite_image_reader_writer: str = None, **kwargs):
"""
Generates a dataset.json file in the output folder
channel_names:
Channel names must map the index to the name of the channel, example:
{
0: 'T1',
1: 'CT'
}
Note that the channel names may influence the normalization scheme!! Learn more in the documentation.
labels:
This will tell nnU-Net what labels to expect. Important: This will also determine whether you use region-based training or not.
Example regular labels:
{
'background': 0,
'left atrium': 1,
'some other label': 2
}
Example region-based training:
{
'background': 0,
'whole tumor': (1, 2, 3),
'tumor core': (2, 3),
'enhancing tumor': 3
}
Remember that nnU-Net expects consecutive values for labels! nnU-Net also expects 0 to be background!
num_training_cases: is used to double check all cases are there!
file_ending: needed for finding the files correctly. IMPORTANT! File endings must match between images and
segmentations!
dataset_name, reference, release, license, description: self-explanatory and not used by nnU-Net. Just for
completeness and as a reminder that these would be great!
overwrite_image_reader_writer: If you need a special IO class for your dataset you can derive it from
BaseReaderWriter, place it into nnunet.imageio and reference it here by name
kwargs: whatever you put here will be placed in the dataset.json as well
"""
has_regions: bool = any([isinstance(i, (tuple, list)) and len(i) > 1 for i in labels.values()])
if has_regions:
assert regions_class_order is not None, f"You have defined regions but regions_class_order is not set. " \
f"You need that."
# channel names need strings as keys
keys = list(channel_names.keys())
for k in keys:
if not isinstance(k, str):
channel_names[str(k)] = channel_names[k]
del channel_names[k]
# labels need ints as values
for l in labels.keys():
value = labels[l]
if isinstance(value, (tuple, list)):
value = tuple([int(i) for i in value])
labels[l] = value
else:
labels[l] = int(labels[l])
dataset_json = {
'channel_names': channel_names, # previously this was called 'modality'. I didn't like this so this is
# channel_names now. Live with it.
'labels': labels,
'numTraining': num_training_cases,
'file_ending': file_ending,
}
if dataset_name is not None:
dataset_json['name'] = dataset_name
if reference is not None:
dataset_json['reference'] = reference
if release is not None:
dataset_json['release'] = release
if license is not None:
dataset_json['licence'] = license
if description is not None:
dataset_json['description'] = description
if overwrite_image_reader_writer is not None:
dataset_json['overwrite_image_reader_writer'] = overwrite_image_reader_writer
if regions_class_order is not None:
dataset_json['regions_class_order'] = regions_class_order
dataset_json.update(kwargs)
save_json(dataset_json, join(output_folder, 'dataset.json'), sort_keys=False)
================================================
FILE: Finetune/nnUNet/nnunetv2/ensembling/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/ensembling/ensemble.py
================================================
import argparse
import multiprocessing
import shutil
from copy import deepcopy
from multiprocessing import Pool
from typing import List, Union, Tuple
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import load_json, join, subfiles, \
maybe_mkdir_p, isdir, save_pickle, load_pickle, isfile
from nnunetv2.configuration import default_num_processes
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
from nnunetv2.utilities.label_handling.label_handling import LabelManager
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager
def average_probabilities(list_of_files: List[str]) -> np.ndarray:
assert len(list_of_files), 'At least one file must be given in list_of_files'
avg = None
for f in list_of_files:
if avg is None:
avg = np.load(f)['probabilities']
# maybe increase precision to prevent rounding errors
if avg.dtype != np.float32:
avg = avg.astype(np.float32)
else:
avg += np.load(f)['probabilities']
avg /= len(list_of_files)
return avg
def merge_files(list_of_files,
output_filename_truncated: str,
output_file_ending: str,
image_reader_writer: BaseReaderWriter,
label_manager: LabelManager,
save_probabilities: bool = False):
# load the pkl file associated with the first file in list_of_files
properties = load_pickle(list_of_files[0][:-4] + '.pkl')
# load and average predictions
probabilities = average_probabilities(list_of_files)
segmentation = label_manager.convert_logits_to_segmentation(probabilities)
image_reader_writer.write_seg(segmentation, output_filename_truncated + output_file_ending, properties)
if save_probabilities:
np.savez_compressed(output_filename_truncated + '.npz', probabilities=probabilities)
save_pickle(probabilities, output_filename_truncated + '.pkl')
def ensemble_folders(list_of_input_folders: List[str],
output_folder: str,
save_merged_probabilities: bool = False,
num_processes: int = default_num_processes,
dataset_json_file_or_dict: str = None,
plans_json_file_or_dict: str = None):
"""we need too much shit for this function. Problem is that we now have to support region-based training plus
multiple input/output formats so there isn't really a way around this.
If plans and dataset json are not specified, we assume each of the folders has a corresponding plans.json
and/or dataset.json in it. These are usually copied into those folders by nnU-Net during prediction.
We just pick the dataset.json and plans.json from the first of the folders and we DONT check whether the 5
folders contain the same plans etc! This can be a feature if results from different datasets are to be merged (only
works if label dict in dataset.json is the same between these datasets!!!)"""
if dataset_json_file_or_dict is not None:
if isinstance(dataset_json_file_or_dict, str):
dataset_json = load_json(dataset_json_file_or_dict)
else:
dataset_json = dataset_json_file_or_dict
else:
dataset_json = load_json(join(list_of_input_folders[0], 'dataset.json'))
if plans_json_file_or_dict is not None:
if isinstance(plans_json_file_or_dict, str):
plans = load_json(plans_json_file_or_dict)
else:
plans = plans_json_file_or_dict
else:
plans = load_json(join(list_of_input_folders[0], 'plans.json'))
plans_manager = PlansManager(plans)
# now collect the files in each of the folders and enforce that all files are present in all folders
files_per_folder = [set(subfiles(i, suffix='.npz', join=False)) for i in list_of_input_folders]
# first build a set with all files
s = deepcopy(files_per_folder[0])
for f in files_per_folder[1:]:
s.update(f)
for f in files_per_folder:
assert len(s.difference(f)) == 0, "Not all folders contain the same files for ensembling. Please only " \
"provide folders that contain the predictions"
lists_of_lists_of_files = [[join(fl, fi) for fl in list_of_input_folders] for fi in s]
output_files_truncated = [join(output_folder, fi[:-4]) for fi in s]
image_reader_writer = plans_manager.image_reader_writer_class()
label_manager = plans_manager.get_label_manager(dataset_json)
maybe_mkdir_p(output_folder)
shutil.copy(join(list_of_input_folders[0], 'dataset.json'), output_folder)
with multiprocessing.get_context("spawn").Pool(num_processes) as pool:
num_preds = len(s)
_ = pool.starmap(
merge_files,
zip(
lists_of_lists_of_files,
output_files_truncated,
[dataset_json['file_ending']] * num_preds,
[image_reader_writer] * num_preds,
[label_manager] * num_preds,
[save_merged_probabilities] * num_preds
)
)
def entry_point_ensemble_folders():
parser = argparse.ArgumentParser()
parser.add_argument('-i', nargs='+', type=str, required=True,
help='list of input folders')
parser.add_argument('-o', type=str, required=True, help='output folder')
parser.add_argument('-np', type=int, required=False, default=default_num_processes,
help=f"Numbers of processes used for ensembling. Default: {default_num_processes}")
parser.add_argument('--save_npz', action='store_true', required=False, help='Set this flag to store output '
'probabilities in separate .npz files')
args = parser.parse_args()
ensemble_folders(args.i, args.o, args.save_npz, args.np)
def ensemble_crossvalidations(list_of_trained_model_folders: List[str],
output_folder: str,
folds: Union[Tuple[int, ...], List[int]] = (0, 1, 2, 3, 4),
num_processes: int = default_num_processes,
overwrite: bool = True) -> None:
"""
Feature: different configurations can now have different splits
"""
dataset_json = load_json(join(list_of_trained_model_folders[0], 'dataset.json'))
plans_manager = PlansManager(join(list_of_trained_model_folders[0], 'plans.json'))
# first collect all unique filenames
files_per_folder = {}
unique_filenames = set()
for tr in list_of_trained_model_folders:
files_per_folder[tr] = {}
for f in folds:
if not isdir(join(tr, f'fold_{f}', 'validation')):
raise RuntimeError(f'Expected model output directory does not exist. You must train all requested '
f'folds of the specified model.\nModel: {tr}\nFold: {f}')
files_here = subfiles(join(tr, f'fold_{f}', 'validation'), suffix='.npz', join=False)
if len(files_here) == 0:
raise RuntimeError(f"No .npz files found in folder {join(tr, f'fold_{f}', 'validation')}. Rerun your "
f"validation with the --npz flag. Use nnUNetv2_train [...] --val --npz.")
files_per_folder[tr][f] = subfiles(join(tr, f'fold_{f}', 'validation'), suffix='.npz', join=False)
unique_filenames.update(files_per_folder[tr][f])
# verify that all trained_model_folders have all predictions
ok = True
for tr, fi in files_per_folder.items():
all_files_here = set()
for f in folds:
all_files_here.update(fi[f])
diff = unique_filenames.difference(all_files_here)
if len(diff) > 0:
ok = False
print(f'model {tr} does not seem to contain all predictions. Missing: {diff}')
if not ok:
raise RuntimeError('There were missing files, see print statements above this one')
# now we need to collect where these files are
file_mapping = []
for tr in list_of_trained_model_folders:
file_mapping.append({})
for f in folds:
for fi in files_per_folder[tr][f]:
# check for duplicates
assert fi not in file_mapping[-1].keys(), f"Duplicate detected. Case {fi} is present in more than " \
f"one fold of model {tr}."
file_mapping[-1][fi] = join(tr, f'fold_{f}', 'validation', fi)
lists_of_lists_of_files = [[fm[i] for fm in file_mapping] for i in unique_filenames]
output_files_truncated = [join(output_folder, fi[:-4]) for fi in unique_filenames]
image_reader_writer = plans_manager.image_reader_writer_class()
maybe_mkdir_p(output_folder)
label_manager = plans_manager.get_label_manager(dataset_json)
if not overwrite:
tmp = [isfile(i + dataset_json['file_ending']) for i in output_files_truncated]
lists_of_lists_of_files = [lists_of_lists_of_files[i] for i in range(len(tmp)) if not tmp[i]]
output_files_truncated = [output_files_truncated[i] for i in range(len(tmp)) if not tmp[i]]
with multiprocessing.get_context("spawn").Pool(num_processes) as pool:
num_preds = len(lists_of_lists_of_files)
_ = pool.starmap(
merge_files,
zip(
lists_of_lists_of_files,
output_files_truncated,
[dataset_json['file_ending']] * num_preds,
[image_reader_writer] * num_preds,
[label_manager] * num_preds,
[False] * num_preds
)
)
shutil.copy(join(list_of_trained_model_folders[0], 'plans.json'), join(output_folder, 'plans.json'))
shutil.copy(join(list_of_trained_model_folders[0], 'dataset.json'), join(output_folder, 'dataset.json'))
================================================
FILE: Finetune/nnUNet/nnunetv2/evaluation/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/evaluation/accumulate_cv_results.py
================================================
import shutil
from typing import Union, List, Tuple
from batchgenerators.utilities.file_and_folder_operations import load_json, join, isdir, maybe_mkdir_p, subfiles, isfile
from nnunetv2.configuration import default_num_processes
from nnunetv2.evaluation.evaluate_predictions import compute_metrics_on_folder
from nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager
def accumulate_cv_results(trained_model_folder,
merged_output_folder: str,
folds: Union[List[int], Tuple[int, ...]],
num_processes: int = default_num_processes,
overwrite: bool = True):
"""
There are a lot of things that can get fucked up, so the simplest way to deal with potential problems is to
collect the cv results into a separate folder and then evaluate them again. No messing with summary_json files!
"""
if overwrite and isdir(merged_output_folder):
shutil.rmtree(merged_output_folder)
maybe_mkdir_p(merged_output_folder)
dataset_json = load_json(join(trained_model_folder, 'dataset.json'))
plans_manager = PlansManager(join(trained_model_folder, 'plans.json'))
rw = plans_manager.image_reader_writer_class()
shutil.copy(join(trained_model_folder, 'dataset.json'), join(merged_output_folder, 'dataset.json'))
shutil.copy(join(trained_model_folder, 'plans.json'), join(merged_output_folder, 'plans.json'))
did_we_copy_something = False
for f in folds:
expected_validation_folder = join(trained_model_folder, f'fold_{f}', 'validation')
if not isdir(expected_validation_folder):
raise RuntimeError(f"fold {f} of model {trained_model_folder} is missing. Please train it!")
predicted_files = subfiles(expected_validation_folder, suffix=dataset_json['file_ending'], join=False)
for pf in predicted_files:
if overwrite and isfile(join(merged_output_folder, pf)):
raise RuntimeError(f'More than one of your folds has a prediction for case {pf}')
if overwrite or not isfile(join(merged_output_folder, pf)):
shutil.copy(join(expected_validation_folder, pf), join(merged_output_folder, pf))
did_we_copy_something = True
if did_we_copy_something or not isfile(join(merged_output_folder, 'summary.json')):
label_manager = plans_manager.get_label_manager(dataset_json)
gt_folder = join(nnUNet_raw, plans_manager.dataset_name, 'labelsTr')
if not isdir(gt_folder):
gt_folder = join(nnUNet_preprocessed, plans_manager.dataset_name, 'gt_segmentations')
compute_metrics_on_folder(gt_folder,
merged_output_folder,
join(merged_output_folder, 'summary.json'),
rw,
dataset_json['file_ending'],
label_manager.foreground_regions if label_manager.has_regions else
label_manager.foreground_labels,
label_manager.ignore_label,
num_processes)
================================================
FILE: Finetune/nnUNet/nnunetv2/evaluation/evaluate_predictions.py
================================================
import multiprocessing
import os
from copy import deepcopy
from multiprocessing import Pool
from typing import Tuple, List, Union, Optional
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import subfiles, join, save_json, load_json, \
isfile
from nnunetv2.configuration import default_num_processes
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
from nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json, \
determine_reader_writer_from_file_ending
from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO
# the Evaluator class of the previous nnU-Net was great and all but man was it overengineered. Keep it simple
from nnunetv2.utilities.json_export import recursive_fix_for_json_export
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager
def label_or_region_to_key(label_or_region: Union[int, Tuple[int]]):
return str(label_or_region)
def key_to_label_or_region(key: str):
try:
return int(key)
except ValueError:
key = key.replace('(', '')
key = key.replace(')', '')
split = key.split(',')
return tuple([int(i) for i in split if len(i) > 0])
def save_summary_json(results: dict, output_file: str):
"""
stupid json does not support tuples as keys (why does it have to be so shitty) so we need to convert that shit
ourselves
"""
results_converted = deepcopy(results)
# convert keys in mean metrics
results_converted['mean'] = {label_or_region_to_key(k): results['mean'][k] for k in results['mean'].keys()}
# convert metric_per_case
for i in range(len(results_converted["metric_per_case"])):
results_converted["metric_per_case"][i]['metrics'] = \
{label_or_region_to_key(k): results["metric_per_case"][i]['metrics'][k]
for k in results["metric_per_case"][i]['metrics'].keys()}
# sort_keys=True will make foreground_mean the first entry and thus easy to spot
save_json(results_converted, output_file, sort_keys=True)
def load_summary_json(filename: str):
results = load_json(filename)
# convert keys in mean metrics
results['mean'] = {key_to_label_or_region(k): results['mean'][k] for k in results['mean'].keys()}
# convert metric_per_case
for i in range(len(results["metric_per_case"])):
results["metric_per_case"][i]['metrics'] = \
{key_to_label_or_region(k): results["metric_per_case"][i]['metrics'][k]
for k in results["metric_per_case"][i]['metrics'].keys()}
return results
def labels_to_list_of_regions(labels: List[int]):
return [(i,) for i in labels]
def region_or_label_to_mask(segmentation: np.ndarray, region_or_label: Union[int, Tuple[int, ...]]) -> np.ndarray:
if np.isscalar(region_or_label):
return segmentation == region_or_label
else:
mask = np.zeros_like(segmentation, dtype=bool)
for r in region_or_label:
mask[segmentation == r] = True
return mask
def compute_tp_fp_fn_tn(mask_ref: np.ndarray, mask_pred: np.ndarray, ignore_mask: np.ndarray = None):
if ignore_mask is None:
use_mask = np.ones_like(mask_ref, dtype=bool)
else:
use_mask = ~ignore_mask
tp = np.sum((mask_ref & mask_pred) & use_mask)
fp = np.sum(((~mask_ref) & mask_pred) & use_mask)
fn = np.sum((mask_ref & (~mask_pred)) & use_mask)
tn = np.sum(((~mask_ref) & (~mask_pred)) & use_mask)
return tp, fp, fn, tn
def compute_metrics(reference_file: str, prediction_file: str, image_reader_writer: BaseReaderWriter,
labels_or_regions: Union[List[int], List[Union[int, Tuple[int, ...]]]],
ignore_label: int = None) -> dict:
# load images
seg_ref, seg_ref_dict = image_reader_writer.read_seg(reference_file)
seg_pred, seg_pred_dict = image_reader_writer.read_seg(prediction_file)
# spacing = seg_ref_dict['spacing']
ignore_mask = seg_ref == ignore_label if ignore_label is not None else None
results = {}
results['reference_file'] = reference_file
results['prediction_file'] = prediction_file
results['metrics'] = {}
for r in labels_or_regions:
results['metrics'][r] = {}
mask_ref = region_or_label_to_mask(seg_ref, r)
mask_pred = region_or_label_to_mask(seg_pred, r)
tp, fp, fn, tn = compute_tp_fp_fn_tn(mask_ref, mask_pred, ignore_mask)
if tp + fp + fn == 0:
results['metrics'][r]['Dice'] = np.nan
results['metrics'][r]['IoU'] = np.nan
else:
results['metrics'][r]['Dice'] = 2 * tp / (2 * tp + fp + fn)
results['metrics'][r]['IoU'] = tp / (tp + fp + fn)
results['metrics'][r]['FP'] = fp
results['metrics'][r]['TP'] = tp
results['metrics'][r]['FN'] = fn
results['metrics'][r]['TN'] = tn
results['metrics'][r]['n_pred'] = fp + tp
results['metrics'][r]['n_ref'] = fn + tp
return results
def compute_metrics_on_folder(folder_ref: str, folder_pred: str, output_file: str,
image_reader_writer: BaseReaderWriter,
file_ending: str,
regions_or_labels: Union[List[int], List[Union[int, Tuple[int, ...]]]],
ignore_label: int = None,
num_processes: int = default_num_processes,
chill: bool = True) -> dict:
"""
output_file must end with .json; can be None
"""
if output_file is not None:
assert output_file.endswith('.json'), 'output_file should end with .json'
files_pred = subfiles(folder_pred, suffix=file_ending, join=False)
files_ref = subfiles(folder_ref, suffix=file_ending, join=False)
if not chill:
present = [isfile(join(folder_pred, i)) for i in files_ref]
assert all(present), "Not all files in folder_pred exist in folder_ref"
files_ref = [join(folder_ref, i) for i in files_pred]
files_pred = [join(folder_pred, i) for i in files_pred]
with multiprocessing.get_context("spawn").Pool(num_processes) as pool:
# for i in list(zip(files_ref, files_pred, [image_reader_writer] * len(files_pred), [regions_or_labels] * len(files_pred), [ignore_label] * len(files_pred))):
# compute_metrics(*i)
results = pool.starmap(
compute_metrics,
list(zip(files_ref, files_pred, [image_reader_writer] * len(files_pred), [regions_or_labels] * len(files_pred),
[ignore_label] * len(files_pred)))
)
# mean metric per class
metric_list = list(results[0]['metrics'][regions_or_labels[0]].keys())
means = {}
for r in regions_or_labels:
means[r] = {}
for m in metric_list:
means[r][m] = np.nanmean([i['metrics'][r][m] for i in results])
# foreground mean
foreground_mean = {}
for m in metric_list:
values = []
for k in means.keys():
if k == 0 or k == '0':
continue
values.append(means[k][m])
foreground_mean[m] = np.mean(values)
[recursive_fix_for_json_export(i) for i in results]
recursive_fix_for_json_export(means)
recursive_fix_for_json_export(foreground_mean)
result = {'metric_per_case': results, 'mean': means, 'foreground_mean': foreground_mean}
if output_file is not None:
save_summary_json(result, output_file)
return result
# print('DONE')
def compute_metrics_on_folder2(folder_ref: str, folder_pred: str, dataset_json_file: str, plans_file: str,
output_file: str = None,
num_processes: int = default_num_processes,
chill: bool = False):
dataset_json = load_json(dataset_json_file)
# get file ending
file_ending = dataset_json['file_ending']
# get reader writer class
example_file = subfiles(folder_ref, suffix=file_ending, join=True)[0]
rw = determine_reader_writer_from_dataset_json(dataset_json, example_file)()
# maybe auto set output file
if output_file is None:
output_file = join(folder_pred, 'summary.json')
lm = PlansManager(plans_file).get_label_manager(dataset_json)
compute_metrics_on_folder(folder_ref, folder_pred, output_file, rw, file_ending,
lm.foreground_regions if lm.has_regions else lm.foreground_labels, lm.ignore_label,
num_processes, chill=chill)
def compute_metrics_on_folder_simple(folder_ref: str, folder_pred: str, labels: Union[Tuple[int, ...], List[int]],
output_file: str = None,
num_processes: int = default_num_processes,
ignore_label: int = None,
chill: bool = False):
example_file = subfiles(folder_ref, join=True)[0]
file_ending = os.path.splitext(example_file)[-1]
rw = determine_reader_writer_from_file_ending(file_ending, example_file, allow_nonmatching_filename=True,
verbose=False)()
# maybe auto set output file
if output_file is None:
output_file = join(folder_pred, 'summary.json')
compute_metrics_on_folder(folder_ref, folder_pred, output_file, rw, file_ending,
labels, ignore_label=ignore_label, num_processes=num_processes, chill=chill)
def evaluate_folder_entry_point():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('gt_folder', type=str, help='folder with gt segmentations')
parser.add_argument('pred_folder', type=str, help='folder with predicted segmentations')
parser.add_argument('-djfile', type=str, required=True,
help='dataset.json file')
parser.add_argument('-pfile', type=str, required=True,
help='plans.json file')
parser.add_argument('-o', type=str, required=False, default=None,
help='Output file. Optional. Default: pred_folder/summary.json')
parser.add_argument('-np', type=int, required=False, default=default_num_processes,
help=f'number of processes used. Optional. Default: {default_num_processes}')
parser.add_argument('--chill', action='store_true', help='dont crash if folder_pred does not have all files that are present in folder_gt')
args = parser.parse_args()
compute_metrics_on_folder2(args.gt_folder, args.pred_folder, args.djfile, args.pfile, args.o, args.np, chill=args.chill)
def evaluate_simple_entry_point():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('gt_folder', type=str, help='folder with gt segmentations')
parser.add_argument('pred_folder', type=str, help='folder with predicted segmentations')
parser.add_argument('-l', type=int, nargs='+', required=True,
help='list of labels')
parser.add_argument('-il', type=int, required=False, default=None,
help='ignore label')
parser.add_argument('-o', type=str, required=False, default=None,
help='Output file. Optional. Default: pred_folder/summary.json')
parser.add_argument('-np', type=int, required=False, default=default_num_processes,
help=f'number of processes used. Optional. Default: {default_num_processes}')
parser.add_argument('--chill', action='store_true', help='dont crash if folder_pred does not have all files that are present in folder_gt')
args = parser.parse_args()
compute_metrics_on_folder_simple(args.gt_folder, args.pred_folder, args.l, args.o, args.np, args.il, chill=args.chill)
if __name__ == '__main__':
folder_ref = '/media/fabian/data/nnUNet_raw/Dataset004_Hippocampus/labelsTr'
folder_pred = '/home/fabian/results/nnUNet_remake/Dataset004_Hippocampus/nnUNetModule__nnUNetPlans__3d_fullres/fold_0/validation'
output_file = '/home/fabian/results/nnUNet_remake/Dataset004_Hippocampus/nnUNetModule__nnUNetPlans__3d_fullres/fold_0/validation/summary.json'
image_reader_writer = SimpleITKIO()
file_ending = '.nii.gz'
regions = labels_to_list_of_regions([1, 2])
ignore_label = None
num_processes = 12
compute_metrics_on_folder(folder_ref, folder_pred, output_file, image_reader_writer, file_ending, regions, ignore_label,
num_processes)
================================================
FILE: Finetune/nnUNet/nnunetv2/evaluation/find_best_configuration.py
================================================
import argparse
import os.path
from copy import deepcopy
from typing import Union, List, Tuple
from batchgenerators.utilities.file_and_folder_operations import load_json, join, isdir, save_json
from nnunetv2.configuration import default_num_processes
from nnunetv2.ensembling.ensemble import ensemble_crossvalidations
from nnunetv2.evaluation.accumulate_cv_results import accumulate_cv_results
from nnunetv2.evaluation.evaluate_predictions import compute_metrics_on_folder, load_summary_json
from nnunetv2.paths import nnUNet_preprocessed, nnUNet_raw, nnUNet_results
from nnunetv2.postprocessing.remove_connected_components import determine_postprocessing
from nnunetv2.utilities.file_path_utilities import maybe_convert_to_dataset_name, get_output_folder, \
convert_identifier_to_trainer_plans_config, get_ensemble_name, folds_tuple_to_string
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager
default_trained_models = tuple([
{'plans': 'nnUNetPlans', 'configuration': '2d', 'trainer': 'nnUNetTrainer'},
{'plans': 'nnUNetPlans', 'configuration': '3d_fullres', 'trainer': 'nnUNetTrainer'},
{'plans': 'nnUNetPlans', 'configuration': '3d_lowres', 'trainer': 'nnUNetTrainer'},
{'plans': 'nnUNetPlans', 'configuration': '3d_cascade_fullres', 'trainer': 'nnUNetTrainer'},
])
def filter_available_models(model_dict: Union[List[dict], Tuple[dict, ...]], dataset_name_or_id: Union[str, int]):
valid = []
for trained_model in model_dict:
plans_manager = PlansManager(join(nnUNet_preprocessed, maybe_convert_to_dataset_name(dataset_name_or_id),
trained_model['plans'] + '.json'))
# check if configuration exists
# 3d_cascade_fullres and 3d_lowres do not exist for each dataset so we allow them to be absent IF they are not
# specified in the plans file
if trained_model['configuration'] not in plans_manager.available_configurations:
print(f"Configuration {trained_model['configuration']} not found in plans {trained_model['plans']}.\n"
f"Inferred plans file: {join(nnUNet_preprocessed, maybe_convert_to_dataset_name(dataset_name_or_id), trained_model['plans'] + '.json')}.")
continue
# check if trained model output folder exists. This is a requirement. No mercy here.
expected_output_folder = get_output_folder(dataset_name_or_id, trained_model['trainer'], trained_model['plans'],
trained_model['configuration'], fold=None)
if not isdir(expected_output_folder):
raise RuntimeError(f"Trained model {trained_model} does not have an output folder. "
f"Expected: {expected_output_folder}. Please run the training for this model! (don't forget "
f"the --npz flag if you want to ensemble multiple configurations)")
valid.append(trained_model)
return valid
def generate_inference_command(dataset_name_or_id: Union[int, str], configuration_name: str,
plans_identifier: str = 'nnUNetPlans', trainer_name: str = 'nnUNetTrainer',
folds: Union[List[int], Tuple[int, ...]] = (0, 1, 2, 3, 4),
folder_with_segs_from_prev_stage: str = None,
input_folder: str = 'INPUT_FOLDER',
output_folder: str = 'OUTPUT_FOLDER',
save_npz: bool = False):
fold_str = ''
for f in folds:
fold_str += f' {f}'
predict_command = ''
trained_model_folder = get_output_folder(dataset_name_or_id, trainer_name, plans_identifier, configuration_name, fold=None)
plans_manager = PlansManager(join(trained_model_folder, 'plans.json'))
configuration_manager = plans_manager.get_configuration(configuration_name)
if 'previous_stage' in plans_manager.available_configurations:
prev_stage = configuration_manager.previous_stage_name
predict_command += generate_inference_command(dataset_name_or_id, prev_stage, plans_identifier, trainer_name,
folds, None, output_folder='OUTPUT_FOLDER_PREV_STAGE') + '\n'
folder_with_segs_from_prev_stage = 'OUTPUT_FOLDER_PREV_STAGE'
predict_command = f'nnUNetv2_predict -d {dataset_name_or_id} -i {input_folder} -o {output_folder} -f {fold_str} ' \
f'-tr {trainer_name} -c {configuration_name} -p {plans_identifier}'
if folder_with_segs_from_prev_stage is not None:
predict_command += f' -prev_stage_predictions {folder_with_segs_from_prev_stage}'
if save_npz:
predict_command += ' --save_probabilities'
return predict_command
def find_best_configuration(dataset_name_or_id,
allowed_trained_models: Union[List[dict], Tuple[dict, ...]] = default_trained_models,
allow_ensembling: bool = True,
num_processes: int = default_num_processes,
overwrite: bool = True,
folds: Union[List[int], Tuple[int, ...]] = (0, 1, 2, 3, 4),
strict: bool = False):
dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)
all_results = {}
allowed_trained_models = filter_available_models(deepcopy(allowed_trained_models), dataset_name_or_id)
for m in allowed_trained_models:
output_folder = get_output_folder(dataset_name_or_id, m['trainer'], m['plans'], m['configuration'], fold=None)
if not isdir(output_folder) and strict:
raise RuntimeError(f'{dataset_name}: The output folder of plans {m["plans"]} configuration '
f'{m["configuration"]} is missing. Please train the model (all requested folds!) first!')
identifier = os.path.basename(output_folder)
merged_output_folder = join(output_folder, f'crossval_results_folds_{folds_tuple_to_string(folds)}')
accumulate_cv_results(output_folder, merged_output_folder, folds, num_processes, overwrite)
all_results[identifier] = {
'source': merged_output_folder,
'result': load_summary_json(join(merged_output_folder, 'summary.json'))['foreground_mean']['Dice']
}
if allow_ensembling:
for i in range(len(allowed_trained_models)):
for j in range(i + 1, len(allowed_trained_models)):
m1, m2 = allowed_trained_models[i], allowed_trained_models[j]
output_folder_1 = get_output_folder(dataset_name_or_id, m1['trainer'], m1['plans'], m1['configuration'], fold=None)
output_folder_2 = get_output_folder(dataset_name_or_id, m2['trainer'], m2['plans'], m2['configuration'], fold=None)
identifier = get_ensemble_name(output_folder_1, output_folder_2, folds)
output_folder_ensemble = join(nnUNet_results, dataset_name, 'ensembles', identifier)
ensemble_crossvalidations([output_folder_1, output_folder_2], output_folder_ensemble, folds,
num_processes, overwrite=overwrite)
# evaluate ensembled predictions
plans_manager = PlansManager(join(output_folder_1, 'plans.json'))
dataset_json = load_json(join(output_folder_1, 'dataset.json'))
label_manager = plans_manager.get_label_manager(dataset_json)
rw = plans_manager.image_reader_writer_class()
compute_metrics_on_folder(join(nnUNet_preprocessed, dataset_name, 'gt_segmentations'),
output_folder_ensemble,
join(output_folder_ensemble, 'summary.json'),
rw,
dataset_json['file_ending'],
label_manager.foreground_regions if label_manager.has_regions else
label_manager.foreground_labels,
label_manager.ignore_label,
num_processes)
all_results[identifier] = \
{
'source': output_folder_ensemble,
'result': load_summary_json(join(output_folder_ensemble, 'summary.json'))['foreground_mean']['Dice']
}
# pick best and report inference command
best_score = max([i['result'] for i in all_results.values()])
best_keys = [k for k in all_results.keys() if all_results[k]['result'] == best_score] # may never happen but theoretically
# there can be a tie. Let's pick the first model in this case because it's going to be the simpler one (ensembles
# come after single configs)
best_key = best_keys[0]
print()
print('***All results:***')
for k, v in all_results.items():
print(f'{k}: {v["result"]}')
print(f'\n*Best*: {best_key}: {all_results[best_key]["result"]}')
print()
print('***Determining postprocessing for best model/ensemble***')
determine_postprocessing(all_results[best_key]['source'], join(nnUNet_preprocessed, dataset_name, 'gt_segmentations'),
plans_file_or_dict=join(all_results[best_key]['source'], 'plans.json'),
dataset_json_file_or_dict=join(all_results[best_key]['source'], 'dataset.json'),
num_processes=num_processes, keep_postprocessed_files=True)
# in addition to just reading the console output (how it was previously) we should return the information
# needed to run the full inference via API
return_dict = {
'folds': folds,
'dataset_name_or_id': dataset_name_or_id,
'considered_models': allowed_trained_models,
'ensembling_allowed': allow_ensembling,
'all_results': {i: j['result'] for i, j in all_results.items()},
'best_model_or_ensemble': {
'result_on_crossval_pre_pp': all_results[best_key]["result"],
'result_on_crossval_post_pp': load_json(join(all_results[best_key]['source'], 'postprocessed', 'summary.json'))['foreground_mean']['Dice'],
'postprocessing_file': join(all_results[best_key]['source'], 'postprocessing.pkl'),
'some_plans_file': join(all_results[best_key]['source'], 'plans.json'),
# just needed for label handling, can
# come from any of the ensemble members (if any)
'selected_model_or_models': []
}
}
# convert best key to inference command:
if best_key.startswith('ensemble___'):
prefix, m1, m2, folds_string = best_key.split('___')
tr1, pl1, c1 = convert_identifier_to_trainer_plans_config(m1)
tr2, pl2, c2 = convert_identifier_to_trainer_plans_config(m2)
return_dict['best_model_or_ensemble']['selected_model_or_models'].append(
{
'configuration': c1,
'trainer': tr1,
'plans_identifier': pl1,
})
return_dict['best_model_or_ensemble']['selected_model_or_models'].append(
{
'configuration': c2,
'trainer': tr2,
'plans_identifier': pl2,
})
else:
tr, pl, c = convert_identifier_to_trainer_plans_config(best_key)
return_dict['best_model_or_ensemble']['selected_model_or_models'].append(
{
'configuration': c,
'trainer': tr,
'plans_identifier': pl,
})
save_json(return_dict, join(nnUNet_results, dataset_name, 'inference_information.json')) # save this so that we don't have to run this
# everything someone wants to be reminded of the inference commands. They can just load this and give it to
# print_inference_instructions
# print it
print_inference_instructions(return_dict, instructions_file=join(nnUNet_results, dataset_name, 'inference_instructions.txt'))
return return_dict
def print_inference_instructions(inference_info_dict: dict, instructions_file: str = None):
def _print_and_maybe_write_to_file(string):
print(string)
if f_handle is not None:
f_handle.write(f'{string}\n')
f_handle = open(instructions_file, 'w') if instructions_file is not None else None
print()
_print_and_maybe_write_to_file('***Run inference like this:***\n')
output_folders = []
dataset_name_or_id = inference_info_dict['dataset_name_or_id']
if len(inference_info_dict['best_model_or_ensemble']['selected_model_or_models']) > 1:
is_ensemble = True
_print_and_maybe_write_to_file('An ensemble won! What a surprise! Run the following commands to run predictions with the ensemble members:\n')
else:
is_ensemble = False
for j, i in enumerate(inference_info_dict['best_model_or_ensemble']['selected_model_or_models']):
tr, c, pl = i['trainer'], i['configuration'], i['plans_identifier']
if is_ensemble:
output_folder_name = f"OUTPUT_FOLDER_MODEL_{j+1}"
else:
output_folder_name = f"OUTPUT_FOLDER"
output_folders.append(output_folder_name)
_print_and_maybe_write_to_file(generate_inference_command(dataset_name_or_id, c, pl, tr, inference_info_dict['folds'],
save_npz=is_ensemble, output_folder=output_folder_name))
if is_ensemble:
output_folder_str = output_folders[0]
for o in output_folders[1:]:
output_folder_str += f' {o}'
output_ensemble = f"OUTPUT_FOLDER"
_print_and_maybe_write_to_file('\nThe run ensembling with:\n')
_print_and_maybe_write_to_file(f"nnUNetv2_ensemble -i {output_folder_str} -o {output_ensemble} -np {default_num_processes}")
_print_and_maybe_write_to_file("\n***Once inference is completed, run postprocessing like this:***\n")
_print_and_maybe_write_to_file(f"nnUNetv2_apply_postprocessing -i OUTPUT_FOLDER -o OUTPUT_FOLDER_PP "
f"-pp_pkl_file {inference_info_dict['best_model_or_ensemble']['postprocessing_file']} -np {default_num_processes} "
f"-plans_json {inference_info_dict['best_model_or_ensemble']['some_plans_file']}")
def dumb_trainer_config_plans_to_trained_models_dict(trainers: List[str], configs: List[str], plans: List[str]):
"""
function is called dumb because it's dumb
"""
ret = []
for t in trainers:
for c in configs:
for p in plans:
ret.append(
{'plans': p, 'configuration': c, 'trainer': t}
)
return tuple(ret)
def find_best_configuration_entry_point():
parser = argparse.ArgumentParser()
parser.add_argument('dataset_name_or_id', type=str, help='Dataset Name or id')
parser.add_argument('-p', nargs='+', required=False, default=['nnUNetPlans'],
help='List of plan identifiers. Default: nnUNetPlans')
parser.add_argument('-c', nargs='+', required=False, default=['2d', '3d_fullres', '3d_lowres', '3d_cascade_fullres'],
help="List of configurations. Default: ['2d', '3d_fullres', '3d_lowres', '3d_cascade_fullres']")
parser.add_argument('-tr', nargs='+', required=False, default=['nnUNetTrainer'],
help='List of trainers. Default: nnUNetTrainer')
parser.add_argument('-np', required=False, default=default_num_processes, type=int,
help='Number of processes to use for ensembling, postprocessing etc')
parser.add_argument('-f', nargs='+', type=int, default=(0, 1, 2, 3, 4),
help='Folds to use. Default: 0 1 2 3 4')
parser.add_argument('--disable_ensembling', action='store_true', required=False,
help='Set this flag to disable ensembling')
parser.add_argument('--no_overwrite', action='store_true',
help='If set we will not overwrite already ensembled files etc. May speed up concecutive '
'runs of this command (why would you want to do that?) at the risk of not updating '
'outdated results.')
args = parser.parse_args()
model_dict = dumb_trainer_config_plans_to_trained_models_dict(args.tr, args.c, args.p)
dataset_name = maybe_convert_to_dataset_name(args.dataset_name_or_id)
find_best_configuration(dataset_name, model_dict, allow_ensembling=not args.disable_ensembling,
num_processes=args.np, overwrite=not args.no_overwrite, folds=args.f,
strict=False)
def accumulate_crossval_results_entry_point():
parser = argparse.ArgumentParser('Copies all predicted segmentations from the individual folds into one joint '
'folder and evaluates them')
parser.add_argument('dataset_name_or_id', type=str, help='Dataset Name or id')
parser.add_argument('-c', type=str, required=True,
default='3d_fullres',
help="Configuration")
parser.add_argument('-o', type=str, required=False, default=None,
help="Output folder. If not specified, the output folder will be located in the trained " \
"model directory (named crossval_results_folds_XXX).")
parser.add_argument('-f', nargs='+', type=int, default=(0, 1, 2, 3, 4),
help='Folds to use. Default: 0 1 2 3 4')
parser.add_argument('-p', type=str, required=False, default='nnUNetPlans',
help='Plan identifier in which to search for the specified configuration. Default: nnUNetPlans')
parser.add_argument('-tr', type=str, required=False, default='nnUNetTrainer',
help='Trainer class. Default: nnUNetTrainer')
args = parser.parse_args()
trained_model_folder = get_output_folder(args.dataset_name_or_id, args.tr, args.p, args.c)
if args.o is None:
merged_output_folder = join(trained_model_folder, f'crossval_results_folds_{folds_tuple_to_string(args.f)}')
else:
merged_output_folder = args.o
accumulate_cv_results(trained_model_folder, merged_output_folder, args.f)
if __name__ == '__main__':
find_best_configuration(4,
default_trained_models,
True,
8,
False,
(0, 1, 2, 3, 4))
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/dataset_fingerprint/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/dataset_fingerprint/fingerprint_extractor.py
================================================
import multiprocessing
import os
from time import sleep
from typing import List, Type, Union
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import load_json, join, save_json, isfile, maybe_mkdir_p
from tqdm import tqdm
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
from nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json
from nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed
from nnunetv2.preprocessing.cropping.cropping import crop_to_nonzero
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.utilities.utils import get_filenames_of_train_images_and_targets
class DatasetFingerprintExtractor(object):
def __init__(self, dataset_name_or_id: Union[str, int], num_processes: int = 8, verbose: bool = False):
"""
extracts the dataset fingerprint used for experiment planning. The dataset fingerprint will be saved as a
json file in the input_folder
Philosophy here is to do only what we really need. Don't store stuff that we can easily read from somewhere
else. Don't compute stuff we don't need (except for intensity_statistics_per_channel)
"""
dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)
self.verbose = verbose
self.dataset_name = dataset_name
self.input_folder = join(nnUNet_raw, dataset_name)
self.num_processes = num_processes
self.dataset_json = load_json(join(self.input_folder, 'dataset.json'))
self.dataset = get_filenames_of_train_images_and_targets(self.input_folder, self.dataset_json)
# We don't want to use all foreground voxels because that can accumulate a lot of data (out of memory). It is
# also not critically important to get all pixels as long as there are enough. Let's use 10e7 voxels in total
# (for the entire dataset)
self.num_foreground_voxels_for_intensitystats = 10e7
@staticmethod
def collect_foreground_intensities(segmentation: np.ndarray, images: np.ndarray, seed: int = 1234,
num_samples: int = 10000):
"""
images=image with multiple channels = shape (c, x, y(, z))
"""
assert images.ndim == 4
assert segmentation.ndim == 4
assert not np.any(np.isnan(segmentation)), "Segmentation contains NaN values. grrrr.... :-("
assert not np.any(np.isnan(images)), "Images contains NaN values. grrrr.... :-("
rs = np.random.RandomState(seed)
intensities_per_channel = []
# we don't use the intensity_statistics_per_channel at all, it's just something that might be nice to have
intensity_statistics_per_channel = []
# segmentation is 4d: 1,x,y,z. We need to remove the empty dimension for the following code to work
foreground_mask = segmentation[0] > 0
for i in range(len(images)):
foreground_pixels = images[i][foreground_mask]
num_fg = len(foreground_pixels)
# sample with replacement so that we don't get issues with cases that have less than num_samples
# foreground_pixels. We could also just sample less in those cases but that would than cause these
# training cases to be underrepresented
intensities_per_channel.append(
rs.choice(foreground_pixels, num_samples, replace=True) if num_fg > 0 else [])
intensity_statistics_per_channel.append({
'mean': np.mean(foreground_pixels) if num_fg > 0 else np.nan,
'median': np.median(foreground_pixels) if num_fg > 0 else np.nan,
'min': np.min(foreground_pixels) if num_fg > 0 else np.nan,
'max': np.max(foreground_pixels) if num_fg > 0 else np.nan,
'percentile_99_5': np.percentile(foreground_pixels, 99.5) if num_fg > 0 else np.nan,
'percentile_00_5': np.percentile(foreground_pixels, 0.5) if num_fg > 0 else np.nan,
})
return intensities_per_channel, intensity_statistics_per_channel
@staticmethod
def analyze_case(image_files: List[str], segmentation_file: str, reader_writer_class: Type[BaseReaderWriter],
num_samples: int = 10000):
rw = reader_writer_class()
images, properties_images = rw.read_images(image_files)
segmentation, properties_seg = rw.read_seg(segmentation_file)
# we no longer crop and save the cropped images before this is run. Instead we run the cropping on the fly.
# Downside is that we need to do this twice (once here and once during preprocessing). Upside is that we don't
# need to save the cropped data anymore. Given that cropping is not too expensive it makes sense to do it this
# way. This is only possible because we are now using our new input/output interface.
data_cropped, seg_cropped, bbox = crop_to_nonzero(images, segmentation)
foreground_intensities_per_channel, foreground_intensity_stats_per_channel = \
DatasetFingerprintExtractor.collect_foreground_intensities(seg_cropped, data_cropped,
num_samples=num_samples)
spacing = properties_images['spacing']
shape_before_crop = images.shape[1:]
shape_after_crop = data_cropped.shape[1:]
relative_size_after_cropping = np.prod(shape_after_crop) / np.prod(shape_before_crop)
return shape_after_crop, spacing, foreground_intensities_per_channel, foreground_intensity_stats_per_channel, \
relative_size_after_cropping
def run(self, overwrite_existing: bool = False) -> dict:
# we do not save the properties file in self.input_folder because that folder might be read-only. We can only
# reliably write in nnUNet_preprocessed and nnUNet_results, so nnUNet_preprocessed it is
preprocessed_output_folder = join(nnUNet_preprocessed, self.dataset_name)
maybe_mkdir_p(preprocessed_output_folder)
properties_file = join(preprocessed_output_folder, 'dataset_fingerprint.json')
if not isfile(properties_file) or overwrite_existing:
reader_writer_class = determine_reader_writer_from_dataset_json(self.dataset_json,
# yikes. Rip the following line
self.dataset[self.dataset.keys().__iter__().__next__()]['images'][0])
# determine how many foreground voxels we need to sample per training case
num_foreground_samples_per_case = int(self.num_foreground_voxels_for_intensitystats //
len(self.dataset))
r = []
with multiprocessing.get_context("spawn").Pool(self.num_processes) as p:
for k in self.dataset.keys():
r.append(p.starmap_async(DatasetFingerprintExtractor.analyze_case,
((self.dataset[k]['images'], self.dataset[k]['label'], reader_writer_class,
num_foreground_samples_per_case),)))
remaining = list(range(len(self.dataset)))
# p is pretty nifti. If we kill workers they just respawn but don't do any work.
# So we need to store the original pool of workers.
workers = [j for j in p._pool]
with tqdm(desc=None, total=len(self.dataset), disable=self.verbose) as pbar:
while len(remaining) > 0:
all_alive = all([j.is_alive() for j in workers])
if not all_alive:
raise RuntimeError('Some background worker is 6 feet under. Yuck. \n'
'OK jokes aside.\n'
'One of your background processes is missing. This could be because of '
'an error (look for an error message) or because it was killed '
'by your OS due to running out of RAM. If you don\'t see '
'an error message, out of RAM is likely the problem. In that case '
'reducing the number of workers might help')
done = [i for i in remaining if r[i].ready()]
for _ in done:
pbar.update()
remaining = [i for i in remaining if i not in done]
sleep(0.1)
# results = ptqdm(DatasetFingerprintExtractor.analyze_case,
# (training_images_per_case, training_labels_per_case),
# processes=self.num_processes, zipped=True, reader_writer_class=reader_writer_class,
# num_samples=num_foreground_samples_per_case, disable=self.verbose)
results = [i.get()[0] for i in r]
shapes_after_crop = [r[0] for r in results]
spacings = [r[1] for r in results]
foreground_intensities_per_channel = [np.concatenate([r[2][i] for r in results]) for i in
range(len(results[0][2]))]
# we drop this so that the json file is somewhat human readable
# foreground_intensity_stats_by_case_and_modality = [r[3] for r in results]
median_relative_size_after_cropping = np.median([r[4] for r in results], 0)
num_channels = len(self.dataset_json['channel_names'].keys()
if 'channel_names' in self.dataset_json.keys()
else self.dataset_json['modality'].keys())
intensity_statistics_per_channel = {}
for i in range(num_channels):
intensity_statistics_per_channel[i] = {
'mean': float(np.mean(foreground_intensities_per_channel[i])),
'median': float(np.median(foreground_intensities_per_channel[i])),
'std': float(np.std(foreground_intensities_per_channel[i])),
'min': float(np.min(foreground_intensities_per_channel[i])),
'max': float(np.max(foreground_intensities_per_channel[i])),
'percentile_99_5': float(np.percentile(foreground_intensities_per_channel[i], 99.5)),
'percentile_00_5': float(np.percentile(foreground_intensities_per_channel[i], 0.5)),
}
fingerprint = {
"spacings": spacings,
"shapes_after_crop": shapes_after_crop,
'foreground_intensity_properties_per_channel': intensity_statistics_per_channel,
"median_relative_size_after_cropping": median_relative_size_after_cropping
}
try:
save_json(fingerprint, properties_file)
except Exception as e:
if isfile(properties_file):
os.remove(properties_file)
raise e
else:
fingerprint = load_json(properties_file)
return fingerprint
if __name__ == '__main__':
dfe = DatasetFingerprintExtractor(2, 8)
dfe.run(overwrite_existing=False)
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/default_experiment_planner.py
================================================
import os.path
import shutil
from copy import deepcopy
from functools import lru_cache
from typing import List, Union, Tuple, Type
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import load_json, join, save_json, isfile, maybe_mkdir_p
from dynamic_network_architectures.architectures.unet import PlainConvUNet, ResidualEncoderUNet
from dynamic_network_architectures.building_blocks.helper import convert_dim_to_conv_op, get_matching_instancenorm
from nnunetv2.configuration import ANISO_THRESHOLD
from nnunetv2.experiment_planning.experiment_planners.network_topology import get_pool_and_conv_props
from nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json
from nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed
from nnunetv2.preprocessing.normalization.map_channel_name_to_normalization import get_normalization_scheme
from nnunetv2.preprocessing.resampling.default_resampling import resample_data_or_seg_to_shape, compute_new_shape
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.utilities.json_export import recursive_fix_for_json_export
from nnunetv2.utilities.utils import get_identifiers_from_splitted_dataset_folder, \
get_filenames_of_train_images_and_targets
class ExperimentPlanner(object):
def __init__(self, dataset_name_or_id: Union[str, int],
gpu_memory_target_in_gb: float = 8,
preprocessor_name: str = 'DefaultPreprocessor', plans_name: str = 'nnUNetPlans',
overwrite_target_spacing: Union[List[float], Tuple[float, ...]] = None,
suppress_transpose: bool = False):
"""
overwrite_target_spacing only affects 3d_fullres! (but by extension 3d_lowres which starts with fullres may
also be affected
"""
self.dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)
self.suppress_transpose = suppress_transpose
self.raw_dataset_folder = join(nnUNet_raw, self.dataset_name)
preprocessed_folder = join(nnUNet_preprocessed, self.dataset_name)
self.dataset_json = load_json(join(self.raw_dataset_folder, 'dataset.json'))
self.dataset = get_filenames_of_train_images_and_targets(self.raw_dataset_folder, self.dataset_json)
# load dataset fingerprint
if not isfile(join(preprocessed_folder, 'dataset_fingerprint.json')):
raise RuntimeError('Fingerprint missing for this dataset. Please run nnUNet_extract_dataset_fingerprint')
self.dataset_fingerprint = load_json(join(preprocessed_folder, 'dataset_fingerprint.json'))
self.anisotropy_threshold = ANISO_THRESHOLD
self.UNet_base_num_features = 32
self.UNet_class = PlainConvUNet
# the following two numbers are really arbitrary and were set to reproduce nnU-Net v1's configurations as
# much as possible
self.UNet_reference_val_3d = 560000000 # 455600128 550000000
self.UNet_reference_val_2d = 85000000 # 83252480
self.UNet_reference_com_nfeatures = 32
self.UNet_reference_val_corresp_GB = 8
self.UNet_reference_val_corresp_bs_2d = 12
self.UNet_reference_val_corresp_bs_3d = 2
self.UNet_vram_target_GB = gpu_memory_target_in_gb
self.UNet_featuremap_min_edge_length = 4
self.UNet_blocks_per_stage_encoder = (2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2)
self.UNet_blocks_per_stage_decoder = (2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2)
self.UNet_min_batch_size = 2
self.UNet_max_features_2d = 512
self.UNet_max_features_3d = 320
self.lowres_creation_threshold = 0.25 # if the patch size of fullres is less than 25% of the voxels in the
# median shape then we need a lowres config as well
self.preprocessor_name = preprocessor_name
self.plans_identifier = plans_name
self.overwrite_target_spacing = overwrite_target_spacing
assert overwrite_target_spacing is None or len(overwrite_target_spacing), 'if overwrite_target_spacing is ' \
'used then three floats must be ' \
'given (as list or tuple)'
assert overwrite_target_spacing is None or all([isinstance(i, float) for i in overwrite_target_spacing]), \
'if overwrite_target_spacing is used then three floats must be given (as list or tuple)'
self.plans = None
def determine_reader_writer(self):
example_image = self.dataset[self.dataset.keys().__iter__().__next__()]['images'][0]
return determine_reader_writer_from_dataset_json(self.dataset_json, example_image)
@staticmethod
@lru_cache(maxsize=None)
def static_estimate_VRAM_usage(patch_size: Tuple[int],
n_stages: int,
strides: Union[int, List[int], Tuple[int, ...]],
UNet_class: Union[Type[PlainConvUNet], Type[ResidualEncoderUNet]],
num_input_channels: int,
features_per_stage: Tuple[int],
blocks_per_stage_encoder: Union[int, Tuple[int]],
blocks_per_stage_decoder: Union[int, Tuple[int]],
num_labels: int):
"""
Works for PlainConvUNet, ResidualEncoderUNet
"""
dim = len(patch_size)
conv_op = convert_dim_to_conv_op(dim)
norm_op = get_matching_instancenorm(conv_op)
net = UNet_class(num_input_channels, n_stages,
features_per_stage,
conv_op,
3,
strides,
blocks_per_stage_encoder,
num_labels,
blocks_per_stage_decoder,
norm_op=norm_op)
return net.compute_conv_feature_map_size(patch_size)
def determine_resampling(self, *args, **kwargs):
"""
returns what functions to use for resampling data and seg, respectively. Also returns kwargs
resampling function must be callable(data, current_spacing, new_spacing, **kwargs)
determine_resampling is called within get_plans_for_configuration to allow for different functions for each
configuration
"""
resampling_data = resample_data_or_seg_to_shape
resampling_data_kwargs = {
"is_seg": False,
"order": 3,
"order_z": 0,
"force_separate_z": None,
}
resampling_seg = resample_data_or_seg_to_shape
resampling_seg_kwargs = {
"is_seg": True,
"order": 1,
"order_z": 0,
"force_separate_z": None,
}
return resampling_data, resampling_data_kwargs, resampling_seg, resampling_seg_kwargs
def determine_segmentation_softmax_export_fn(self, *args, **kwargs):
"""
function must be callable(data, new_shape, current_spacing, new_spacing, **kwargs). The new_shape should be
used as target. current_spacing and new_spacing are merely there in case we want to use it somehow
determine_segmentation_softmax_export_fn is called within get_plans_for_configuration to allow for different
functions for each configuration
"""
resampling_fn = resample_data_or_seg_to_shape
resampling_fn_kwargs = {
"is_seg": False,
"order": 1,
"order_z": 0,
"force_separate_z": None,
}
return resampling_fn, resampling_fn_kwargs
def determine_fullres_target_spacing(self) -> np.ndarray:
"""
per default we use the 50th percentile=median for the target spacing. Higher spacing results in smaller data
and thus faster and easier training. Smaller spacing results in larger data and thus longer and harder training
For some datasets the median is not a good choice. Those are the datasets where the spacing is very anisotropic
(for example ACDC with (10, 1.5, 1.5)). These datasets still have examples with a spacing of 5 or 6 mm in the low
resolution axis. Choosing the median here will result in bad interpolation artifacts that can substantially
impact performance (due to the low number of slices).
"""
if self.overwrite_target_spacing is not None:
return np.array(self.overwrite_target_spacing)
spacings = self.dataset_fingerprint['spacings']
sizes = self.dataset_fingerprint['shapes_after_crop']
target = np.percentile(np.vstack(spacings), 50, 0)
# todo sizes_after_resampling = [compute_new_shape(j, i, target) for i, j in zip(spacings, sizes)]
target_size = np.percentile(np.vstack(sizes), 50, 0)
# we need to identify datasets for which a different target spacing could be beneficial. These datasets have
# the following properties:
# - one axis which much lower resolution than the others
# - the lowres axis has much less voxels than the others
# - (the size in mm of the lowres axis is also reduced)
worst_spacing_axis = np.argmax(target)
other_axes = [i for i in range(len(target)) if i != worst_spacing_axis]
other_spacings = [target[i] for i in other_axes]
other_sizes = [target_size[i] for i in other_axes]
has_aniso_spacing = target[worst_spacing_axis] > (self.anisotropy_threshold * max(other_spacings))
has_aniso_voxels = target_size[worst_spacing_axis] * self.anisotropy_threshold < min(other_sizes)
if has_aniso_spacing and has_aniso_voxels:
spacings_of_that_axis = np.vstack(spacings)[:, worst_spacing_axis]
target_spacing_of_that_axis = np.percentile(spacings_of_that_axis, 10)
# don't let the spacing of that axis get higher than the other axes
if target_spacing_of_that_axis < max(other_spacings):
target_spacing_of_that_axis = max(max(other_spacings), target_spacing_of_that_axis) + 1e-5
target[worst_spacing_axis] = target_spacing_of_that_axis
return target
def determine_normalization_scheme_and_whether_mask_is_used_for_norm(self) -> Tuple[List[str], List[bool]]:
if 'channel_names' not in self.dataset_json.keys():
print('WARNING: "modalities" should be renamed to "channel_names" in dataset.json. This will be '
'enforced soon!')
modalities = self.dataset_json['channel_names'] if 'channel_names' in self.dataset_json.keys() else \
self.dataset_json['modality']
normalization_schemes = [get_normalization_scheme(m) for m in modalities.values()]
if self.dataset_fingerprint['median_relative_size_after_cropping'] < (3 / 4.):
use_nonzero_mask_for_norm = [i.leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true for i in
normalization_schemes]
else:
use_nonzero_mask_for_norm = [False] * len(normalization_schemes)
assert all([i in (True, False) for i in use_nonzero_mask_for_norm]), 'use_nonzero_mask_for_norm must be ' \
'True or False and cannot be None'
normalization_schemes = [i.__name__ for i in normalization_schemes]
return normalization_schemes, use_nonzero_mask_for_norm
def determine_transpose(self):
if self.suppress_transpose:
return [0, 1, 2], [0, 1, 2]
# todo we should use shapes for that as well. Not quite sure how yet
target_spacing = self.determine_fullres_target_spacing()
max_spacing_axis = np.argmax(target_spacing)
remaining_axes = [i for i in list(range(3)) if i != max_spacing_axis]
transpose_forward = [max_spacing_axis] + remaining_axes
transpose_backward = [np.argwhere(np.array(transpose_forward) == i)[0][0] for i in range(3)]
return transpose_forward, transpose_backward
def get_plans_for_configuration(self,
spacing: Union[np.ndarray, Tuple[float, ...], List[float]],
median_shape: Union[np.ndarray, Tuple[int, ...], List[int]],
data_identifier: str,
approximate_n_voxels_dataset: float) -> dict:
assert all([i > 0 for i in spacing]), f"Spacing must be > 0! Spacing: {spacing}"
# print(spacing, median_shape, approximate_n_voxels_dataset)
# find an initial patch size
# we first use the spacing to get an aspect ratio
tmp = 1 / np.array(spacing)
# we then upscale it so that it initially is certainly larger than what we need (rescale to have the same
# volume as a patch of size 256 ** 3)
# this may need to be adapted when using absurdly large GPU memory targets. Increasing this now would not be
# ideal because large initial patch sizes increase computation time because more iterations in the while loop
# further down may be required.
if len(spacing) == 3:
initial_patch_size = [round(i) for i in tmp * (256 ** 3 / np.prod(tmp)) ** (1 / 3)]
elif len(spacing) == 2:
initial_patch_size = [round(i) for i in tmp * (2048 ** 2 / np.prod(tmp)) ** (1 / 2)]
else:
raise RuntimeError()
# clip initial patch size to median_shape. It makes little sense to have it be larger than that. Note that
# this is different from how nnU-Net v1 does it!
# todo patch size can still get too large because we pad the patch size to a multiple of 2**n
initial_patch_size = np.array([min(i, j) for i, j in zip(initial_patch_size, median_shape[:len(spacing)])])
# use that to get the network topology. Note that this changes the patch_size depending on the number of
# pooling operations (must be divisible by 2**num_pool in each axis)
network_num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, patch_size, \
shape_must_be_divisible_by = get_pool_and_conv_props(spacing, initial_patch_size,
self.UNet_featuremap_min_edge_length,
999999)
# now estimate vram consumption
num_stages = len(pool_op_kernel_sizes)
estimate = self.static_estimate_VRAM_usage(tuple(patch_size),
num_stages,
tuple([tuple(i) for i in pool_op_kernel_sizes]),
self.UNet_class,
len(self.dataset_json['channel_names'].keys()
if 'channel_names' in self.dataset_json.keys()
else self.dataset_json['modality'].keys()),
tuple([min(self.UNet_max_features_2d if len(patch_size) == 2 else
self.UNet_max_features_3d,
self.UNet_reference_com_nfeatures * 2 ** i) for
i in range(len(pool_op_kernel_sizes))]),
self.UNet_blocks_per_stage_encoder[:num_stages],
self.UNet_blocks_per_stage_decoder[:num_stages - 1],
len(self.dataset_json['labels'].keys()))
# how large is the reference for us here (batch size etc)?
# adapt for our vram target
reference = (self.UNet_reference_val_2d if len(spacing) == 2 else self.UNet_reference_val_3d) * \
(self.UNet_vram_target_GB / self.UNet_reference_val_corresp_GB)
while estimate > reference:
# print(patch_size)
# patch size seems to be too large, so we need to reduce it. Reduce the axis that currently violates the
# aspect ratio the most (that is the largest relative to median shape)
axis_to_be_reduced = np.argsort(patch_size / median_shape[:len(spacing)])[-1]
# we cannot simply reduce that axis by shape_must_be_divisible_by[axis_to_be_reduced] because this
# may cause us to skip some valid sizes, for example shape_must_be_divisible_by is 64 for a shape of 256.
# If we subtracted that we would end up with 192, skipping 224 which is also a valid patch size
# (224 / 2**5 = 7; 7 < 2 * self.UNet_featuremap_min_edge_length(4) so it's valid). So we need to first
# subtract shape_must_be_divisible_by, then recompute it and then subtract the
# recomputed shape_must_be_divisible_by. Annoying.
tmp = deepcopy(patch_size)
tmp[axis_to_be_reduced] -= shape_must_be_divisible_by[axis_to_be_reduced]
_, _, _, _, shape_must_be_divisible_by = \
get_pool_and_conv_props(spacing, tmp,
self.UNet_featuremap_min_edge_length,
999999)
patch_size[axis_to_be_reduced] -= shape_must_be_divisible_by[axis_to_be_reduced]
# now recompute topology
network_num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, patch_size, \
shape_must_be_divisible_by = get_pool_and_conv_props(spacing, patch_size,
self.UNet_featuremap_min_edge_length,
999999)
num_stages = len(pool_op_kernel_sizes)
estimate = self.static_estimate_VRAM_usage(tuple(patch_size),
num_stages,
tuple([tuple(i) for i in pool_op_kernel_sizes]),
self.UNet_class,
len(self.dataset_json['channel_names'].keys()
if 'channel_names' in self.dataset_json.keys()
else self.dataset_json['modality'].keys()),
tuple([min(self.UNet_max_features_2d if len(patch_size) == 2 else
self.UNet_max_features_3d,
self.UNet_reference_com_nfeatures * 2 ** i) for
i in range(len(pool_op_kernel_sizes))]),
self.UNet_blocks_per_stage_encoder[:num_stages],
self.UNet_blocks_per_stage_decoder[:num_stages - 1],
len(self.dataset_json['labels'].keys()))
# alright now let's determine the batch size. This will give self.UNet_min_batch_size if the while loop was
# executed. If not, additional vram headroom is used to increase batch size
ref_bs = self.UNet_reference_val_corresp_bs_2d if len(spacing) == 2 else self.UNet_reference_val_corresp_bs_3d
batch_size = round((reference / estimate) * ref_bs)
# we need to cap the batch size to cover at most 5% of the entire dataset. Overfitting precaution. We cannot
# go smaller than self.UNet_min_batch_size though
bs_corresponding_to_5_percent = round(
approximate_n_voxels_dataset * 0.05 / np.prod(patch_size, dtype=np.float64))
batch_size = max(min(batch_size, bs_corresponding_to_5_percent), self.UNet_min_batch_size)
resampling_data, resampling_data_kwargs, resampling_seg, resampling_seg_kwargs = self.determine_resampling()
resampling_softmax, resampling_softmax_kwargs = self.determine_segmentation_softmax_export_fn()
normalization_schemes, mask_is_used_for_norm = \
self.determine_normalization_scheme_and_whether_mask_is_used_for_norm()
num_stages = len(pool_op_kernel_sizes)
plan = {
'data_identifier': data_identifier,
'preprocessor_name': self.preprocessor_name,
'batch_size': batch_size,
'patch_size': patch_size,
'median_image_size_in_voxels': median_shape,
'spacing': spacing,
'normalization_schemes': normalization_schemes,
'use_mask_for_norm': mask_is_used_for_norm,
'UNet_class_name': self.UNet_class.__name__,
'UNet_base_num_features': self.UNet_base_num_features,
'n_conv_per_stage_encoder': self.UNet_blocks_per_stage_encoder[:num_stages],
'n_conv_per_stage_decoder': self.UNet_blocks_per_stage_decoder[:num_stages - 1],
'num_pool_per_axis': network_num_pool_per_axis,
'pool_op_kernel_sizes': pool_op_kernel_sizes,
'conv_kernel_sizes': conv_kernel_sizes,
'unet_max_num_features': self.UNet_max_features_3d if len(spacing) == 3 else self.UNet_max_features_2d,
'resampling_fn_data': resampling_data.__name__,
'resampling_fn_seg': resampling_seg.__name__,
'resampling_fn_data_kwargs': resampling_data_kwargs,
'resampling_fn_seg_kwargs': resampling_seg_kwargs,
'resampling_fn_probabilities': resampling_softmax.__name__,
'resampling_fn_probabilities_kwargs': resampling_softmax_kwargs,
}
return plan
def plan_experiment(self):
"""
MOVE EVERYTHING INTO THE PLANS. MAXIMUM FLEXIBILITY
Ideally I would like to move transpose_forward/backward into the configurations so that this can also be done
differently for each configuration but this would cause problems with identifying the correct axes for 2d. There
surely is a way around that but eh. I'm feeling lazy and featuritis must also not be pushed to the extremes.
So for now if you want a different transpose_forward/backward you need to create a new planner. Also not too
hard.
"""
# first get transpose
transpose_forward, transpose_backward = self.determine_transpose()
# get fullres spacing and transpose it
fullres_spacing = self.determine_fullres_target_spacing()
fullres_spacing_transposed = fullres_spacing[transpose_forward]
# get transposed new median shape (what we would have after resampling)
new_shapes = [compute_new_shape(j, i, fullres_spacing) for i, j in
zip(self.dataset_fingerprint['spacings'], self.dataset_fingerprint['shapes_after_crop'])]
new_median_shape = np.median(new_shapes, 0)
new_median_shape_transposed = new_median_shape[transpose_forward]
approximate_n_voxels_dataset = float(np.prod(new_median_shape_transposed, dtype=np.float64) *
self.dataset_json['numTraining'])
# only run 3d if this is a 3d dataset
if new_median_shape_transposed[0] != 1:
plan_3d_fullres = self.get_plans_for_configuration(fullres_spacing_transposed,
new_median_shape_transposed,
self.generate_data_identifier('3d_fullres'),
approximate_n_voxels_dataset)
# maybe add 3d_lowres as well
patch_size_fullres = plan_3d_fullres['patch_size']
median_num_voxels = np.prod(new_median_shape_transposed, dtype=np.float64)
num_voxels_in_patch = np.prod(patch_size_fullres, dtype=np.float64)
plan_3d_lowres = None
lowres_spacing = deepcopy(plan_3d_fullres['spacing'])
spacing_increase_factor = 1.03 # used to be 1.01 but that is slow with new GPU memory estimation!
while num_voxels_in_patch / median_num_voxels < self.lowres_creation_threshold:
# we incrementally increase the target spacing. We start with the anisotropic axis/axes until it/they
# is/are similar (factor 2) to the other ax(i/e)s.
max_spacing = max(lowres_spacing)
if np.any((max_spacing / lowres_spacing) > 2):
lowres_spacing[(max_spacing / lowres_spacing) > 2] *= spacing_increase_factor
else:
lowres_spacing *= spacing_increase_factor
median_num_voxels = np.prod(plan_3d_fullres['spacing'] / lowres_spacing * new_median_shape_transposed,
dtype=np.float64)
# print(lowres_spacing)
plan_3d_lowres = self.get_plans_for_configuration(lowres_spacing,
[round(i) for i in plan_3d_fullres['spacing'] /
lowres_spacing * new_median_shape_transposed],
self.generate_data_identifier('3d_lowres'),
float(np.prod(median_num_voxels) *
self.dataset_json['numTraining']))
num_voxels_in_patch = np.prod(plan_3d_lowres['patch_size'], dtype=np.int64)
print(f'Attempting to find 3d_lowres config. '
f'\nCurrent spacing: {lowres_spacing}. '
f'\nCurrent patch size: {plan_3d_lowres["patch_size"]}. '
f'\nCurrent median shape: {plan_3d_fullres["spacing"] / lowres_spacing * new_median_shape_transposed}')
if plan_3d_lowres is not None:
plan_3d_lowres['batch_dice'] = False
plan_3d_fullres['batch_dice'] = True
else:
plan_3d_fullres['batch_dice'] = False
else:
plan_3d_fullres = None
plan_3d_lowres = None
# 2D configuration
plan_2d = self.get_plans_for_configuration(fullres_spacing_transposed[1:],
new_median_shape_transposed[1:],
self.generate_data_identifier('2d'), approximate_n_voxels_dataset)
plan_2d['batch_dice'] = True
print('2D U-Net configuration:')
print(plan_2d)
print()
# median spacing and shape, just for reference when printing the plans
median_spacing = np.median(self.dataset_fingerprint['spacings'], 0)[transpose_forward]
median_shape = np.median(self.dataset_fingerprint['shapes_after_crop'], 0)[transpose_forward]
# instead of writing all that into the plans we just copy the original file. More files, but less crowded
# per file.
shutil.copy(join(self.raw_dataset_folder, 'dataset.json'),
join(nnUNet_preprocessed, self.dataset_name, 'dataset.json'))
# json is stupid and I hate it... "Object of type int64 is not JSON serializable" -> my ass
plans = {
'dataset_name': self.dataset_name,
'plans_name': self.plans_identifier,
'original_median_spacing_after_transp': [float(i) for i in median_spacing],
'original_median_shape_after_transp': [int(round(i)) for i in median_shape],
'image_reader_writer': self.determine_reader_writer().__name__,
'transpose_forward': [int(i) for i in transpose_forward],
'transpose_backward': [int(i) for i in transpose_backward],
'configurations': {'2d': plan_2d},
'experiment_planner_used': self.__class__.__name__,
'label_manager': 'LabelManager',
'foreground_intensity_properties_per_channel': self.dataset_fingerprint[
'foreground_intensity_properties_per_channel']
}
if plan_3d_lowres is not None:
plans['configurations']['3d_lowres'] = plan_3d_lowres
if plan_3d_fullres is not None:
plans['configurations']['3d_lowres']['next_stage'] = '3d_cascade_fullres'
print('3D lowres U-Net configuration:')
print(plan_3d_lowres)
print()
if plan_3d_fullres is not None:
plans['configurations']['3d_fullres'] = plan_3d_fullres
print('3D fullres U-Net configuration:')
print(plan_3d_fullres)
print()
if plan_3d_lowres is not None:
plans['configurations']['3d_cascade_fullres'] = {
'inherits_from': '3d_fullres',
'previous_stage': '3d_lowres'
}
self.plans = plans
self.save_plans(plans)
return plans
def save_plans(self, plans):
recursive_fix_for_json_export(plans)
plans_file = join(nnUNet_preprocessed, self.dataset_name, self.plans_identifier + '.json')
# we don't want to overwrite potentially existing custom configurations every time this is executed. So let's
# read the plans file if it already exists and keep any non-default configurations
if isfile(plans_file):
old_plans = load_json(plans_file)
old_configurations = old_plans['configurations']
for c in plans['configurations'].keys():
if c in old_configurations.keys():
del (old_configurations[c])
plans['configurations'].update(old_configurations)
maybe_mkdir_p(join(nnUNet_preprocessed, self.dataset_name))
save_json(plans, plans_file, sort_keys=False)
print(f"Plans were saved to {join(nnUNet_preprocessed, self.dataset_name, self.plans_identifier + '.json')}")
def generate_data_identifier(self, configuration_name: str) -> str:
"""
configurations are unique within each plans file but different plans file can have configurations with the
same name. In order to distinguish the associated data we need a data identifier that reflects not just the
config but also the plans it originates from
"""
return self.plans_identifier + '_' + configuration_name
def load_plans(self, fname: str):
self.plans = load_json(fname)
if __name__ == '__main__':
ExperimentPlanner(2, 8).plan_experiment()
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/network_topology.py
================================================
from copy import deepcopy
import numpy as np
def get_shape_must_be_divisible_by(net_numpool_per_axis):
return 2 ** np.array(net_numpool_per_axis)
def pad_shape(shape, must_be_divisible_by):
"""
pads shape so that it is divisible by must_be_divisible_by
:param shape:
:param must_be_divisible_by:
:return:
"""
if not isinstance(must_be_divisible_by, (tuple, list, np.ndarray)):
must_be_divisible_by = [must_be_divisible_by] * len(shape)
else:
assert len(must_be_divisible_by) == len(shape)
new_shp = [shape[i] + must_be_divisible_by[i] - shape[i] % must_be_divisible_by[i] for i in range(len(shape))]
for i in range(len(shape)):
if shape[i] % must_be_divisible_by[i] == 0:
new_shp[i] -= must_be_divisible_by[i]
new_shp = np.array(new_shp).astype(int)
return new_shp
def get_pool_and_conv_props(spacing, patch_size, min_feature_map_size, max_numpool):
"""
this is the same as get_pool_and_conv_props_v2 from old nnunet
:param spacing:
:param patch_size:
:param min_feature_map_size: min edge length of feature maps in bottleneck
:param max_numpool:
:return:
"""
# todo review this code
dim = len(spacing)
current_spacing = deepcopy(list(spacing))
current_size = deepcopy(list(patch_size))
pool_op_kernel_sizes = [[1] * len(spacing)]
conv_kernel_sizes = []
num_pool_per_axis = [0] * dim
kernel_size = [1] * dim
while True:
# exclude axes that we cannot pool further because of min_feature_map_size constraint
valid_axes_for_pool = [i for i in range(dim) if current_size[i] >= 2*min_feature_map_size]
if len(valid_axes_for_pool) < 1:
break
spacings_of_axes = [current_spacing[i] for i in valid_axes_for_pool]
# find axis that are within factor of 2 within smallest spacing
min_spacing_of_valid = min(spacings_of_axes)
valid_axes_for_pool = [i for i in valid_axes_for_pool if current_spacing[i] / min_spacing_of_valid < 2]
# max_numpool constraint
valid_axes_for_pool = [i for i in valid_axes_for_pool if num_pool_per_axis[i] < max_numpool]
if len(valid_axes_for_pool) == 1:
if current_size[valid_axes_for_pool[0]] >= 3 * min_feature_map_size:
pass
else:
break
if len(valid_axes_for_pool) < 1:
break
# now we need to find kernel sizes
# kernel sizes are initialized to 1. They are successively set to 3 when their associated axis becomes within
# factor 2 of min_spacing. Once they are 3 they remain 3
for d in range(dim):
if kernel_size[d] == 3:
continue
else:
if current_spacing[d] / min(current_spacing) < 2:
kernel_size[d] = 3
other_axes = [i for i in range(dim) if i not in valid_axes_for_pool]
pool_kernel_sizes = [0] * dim
for v in valid_axes_for_pool:
pool_kernel_sizes[v] = 2
num_pool_per_axis[v] += 1
current_spacing[v] *= 2
current_size[v] = np.ceil(current_size[v] / 2)
for nv in other_axes:
pool_kernel_sizes[nv] = 1
pool_op_kernel_sizes.append(pool_kernel_sizes)
conv_kernel_sizes.append(deepcopy(kernel_size))
#print(conv_kernel_sizes)
must_be_divisible_by = get_shape_must_be_divisible_by(num_pool_per_axis)
patch_size = pad_shape(patch_size, must_be_divisible_by)
# we need to add one more conv_kernel_size for the bottleneck. We always use 3x3(x3) conv here
conv_kernel_sizes.append([3]*dim)
return num_pool_per_axis, pool_op_kernel_sizes, conv_kernel_sizes, patch_size, must_be_divisible_by
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/readme.md
================================================
What do experiment planners need to do (these are notes for myself while rewriting nnU-Net, they are provided as is
without further explanations. These notes also include new features):
- (done) preprocessor name should be configurable via cli
- (done) gpu memory target should be configurable via cli
- (done) plans name should be configurable via cli
- (done) data name should be specified in plans (plans specify the data they want to use, this will allow us to manually
edit plans files without having to copy the data folders)
- plans must contain:
- (done) transpose forward/backward
- (done) preprocessor name (can differ for each config)
- (done) spacing
- (done) normalization scheme
- (done) target spacing
- (done) conv and pool op kernel sizes
- (done) base num features for architecture
- (done) data identifier
- num conv per stage?
- (done) use mask for norm
- [NO. Handled by LabelManager & dataset.json] num segmentation outputs
- [NO. Handled by LabelManager & dataset.json] ignore class
- [NO. Handled by LabelManager & dataset.json] list of regions or classes
- [NO. Handled by LabelManager & dataset.json] regions class order, if applicable
- (done) resampling function to be used
- (done) the image reader writer class that should be used
dataset.json
mandatory:
- numTraining
- labels (value 'ignore' has special meaning. Cannot have more than one ignore_label)
- modalities
- file_ending
optional
- overwrite_image_reader_writer (if absent, auto)
- regions
- region_class_order
-
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/experiment_planners/resencUNet_planner.py
================================================
from typing import Union, List, Tuple
from torch import nn
from nnunetv2.experiment_planning.experiment_planners.default_experiment_planner import ExperimentPlanner
from dynamic_network_architectures.architectures.unet import ResidualEncoderUNet
class ResEncUNetPlanner(ExperimentPlanner):
def __init__(self, dataset_name_or_id: Union[str, int],
gpu_memory_target_in_gb: float = 8,
preprocessor_name: str = 'DefaultPreprocessor', plans_name: str = 'nnUNetResEncUNetPlans',
overwrite_target_spacing: Union[List[float], Tuple[float, ...]] = None,
suppress_transpose: bool = False):
super().__init__(dataset_name_or_id, gpu_memory_target_in_gb, preprocessor_name, plans_name,
overwrite_target_spacing, suppress_transpose)
self.UNet_base_num_features = 32
self.UNet_class = ResidualEncoderUNet
# the following two numbers are really arbitrary and were set to reproduce default nnU-Net's configurations as
# much as possible
self.UNet_reference_val_3d = 680000000
self.UNet_reference_val_2d = 135000000
self.UNet_reference_com_nfeatures = 32
self.UNet_reference_val_corresp_GB = 8
self.UNet_reference_val_corresp_bs_2d = 12
self.UNet_reference_val_corresp_bs_3d = 2
self.UNet_featuremap_min_edge_length = 4
self.UNet_blocks_per_stage_encoder = (1, 3, 4, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6)
self.UNet_blocks_per_stage_decoder = (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
self.UNet_min_batch_size = 2
self.UNet_max_features_2d = 512
self.UNet_max_features_3d = 320
if __name__ == '__main__':
# we know both of these networks run with batch size 2 and 12 on ~8-10GB, respectively
net = ResidualEncoderUNet(input_channels=1, n_stages=6, features_per_stage=(32, 64, 128, 256, 320, 320),
conv_op=nn.Conv3d, kernel_sizes=3, strides=(1, 2, 2, 2, 2, 2),
n_blocks_per_stage=(1, 3, 4, 6, 6, 6), num_classes=3,
n_conv_per_stage_decoder=(1, 1, 1, 1, 1),
conv_bias=True, norm_op=nn.InstanceNorm3d, norm_op_kwargs={}, dropout_op=None,
nonlin=nn.LeakyReLU, nonlin_kwargs={'inplace': True}, deep_supervision=True)
print(net.compute_conv_feature_map_size((128, 128, 128))) # -> 558319104. The value you see above was finetuned
# from this one to match the regular nnunetplans more closely
net = ResidualEncoderUNet(input_channels=1, n_stages=7, features_per_stage=(32, 64, 128, 256, 512, 512, 512),
conv_op=nn.Conv2d, kernel_sizes=3, strides=(1, 2, 2, 2, 2, 2, 2),
n_blocks_per_stage=(1, 3, 4, 6, 6, 6, 6), num_classes=3,
n_conv_per_stage_decoder=(1, 1, 1, 1, 1, 1),
conv_bias=True, norm_op=nn.InstanceNorm2d, norm_op_kwargs={}, dropout_op=None,
nonlin=nn.LeakyReLU, nonlin_kwargs={'inplace': True}, deep_supervision=True)
print(net.compute_conv_feature_map_size((512, 512))) # -> 129793792
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/plan_and_preprocess_api.py
================================================
import shutil
from typing import List, Type, Optional, Tuple, Union
import nnunetv2
from batchgenerators.utilities.file_and_folder_operations import join, maybe_mkdir_p, subfiles, load_json
from nnunetv2.experiment_planning.dataset_fingerprint.fingerprint_extractor import DatasetFingerprintExtractor
from nnunetv2.experiment_planning.experiment_planners.default_experiment_planner import ExperimentPlanner
from nnunetv2.experiment_planning.verify_dataset_integrity import verify_dataset_integrity
from nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed
from nnunetv2.utilities.dataset_name_id_conversion import convert_id_to_dataset_name, maybe_convert_to_dataset_name
from nnunetv2.utilities.find_class_by_name import recursive_find_python_class
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager
from nnunetv2.configuration import default_num_processes
from nnunetv2.utilities.utils import get_filenames_of_train_images_and_targets
def extract_fingerprint_dataset(dataset_id: int,
fingerprint_extractor_class: Type[
DatasetFingerprintExtractor] = DatasetFingerprintExtractor,
num_processes: int = default_num_processes, check_dataset_integrity: bool = False,
clean: bool = True, verbose: bool = True):
"""
Returns the fingerprint as a dictionary (additionally to saving it)
"""
dataset_name = convert_id_to_dataset_name(dataset_id)
print(dataset_name)
if check_dataset_integrity:
verify_dataset_integrity(join(nnUNet_raw, dataset_name), num_processes)
fpe = fingerprint_extractor_class(dataset_id, num_processes, verbose=verbose)
return fpe.run(overwrite_existing=clean)
def extract_fingerprints(dataset_ids: List[int], fingerprint_extractor_class_name: str = 'DatasetFingerprintExtractor',
num_processes: int = default_num_processes, check_dataset_integrity: bool = False,
clean: bool = True, verbose: bool = True):
"""
clean = False will not actually run this. This is just a switch for use with nnUNetv2_plan_and_preprocess where
we don't want to rerun fingerprint extraction every time.
"""
fingerprint_extractor_class = recursive_find_python_class(join(nnunetv2.__path__[0], "experiment_planning"),
fingerprint_extractor_class_name,
current_module="nnunetv2.experiment_planning")
for d in dataset_ids:
extract_fingerprint_dataset(d, fingerprint_extractor_class, num_processes, check_dataset_integrity, clean,
verbose)
def plan_experiment_dataset(dataset_id: int,
experiment_planner_class: Type[ExperimentPlanner] = ExperimentPlanner,
gpu_memory_target_in_gb: float = 8, preprocess_class_name: str = 'DefaultPreprocessor',
overwrite_target_spacing: Optional[Tuple[float, ...]] = None,
overwrite_plans_name: Optional[str] = None) -> dict:
"""
overwrite_target_spacing ONLY applies to 3d_fullres and 3d_cascade fullres!
"""
kwargs = {}
if overwrite_plans_name is not None:
kwargs['plans_name'] = overwrite_plans_name
return experiment_planner_class(dataset_id,
gpu_memory_target_in_gb=gpu_memory_target_in_gb,
preprocessor_name=preprocess_class_name,
overwrite_target_spacing=[float(i) for i in overwrite_target_spacing] if
overwrite_target_spacing is not None else overwrite_target_spacing,
suppress_transpose=False, # might expose this later,
**kwargs
).plan_experiment()
def plan_experiments(dataset_ids: List[int], experiment_planner_class_name: str = 'ExperimentPlanner',
gpu_memory_target_in_gb: float = 8, preprocess_class_name: str = 'DefaultPreprocessor',
overwrite_target_spacing: Optional[Tuple[float, ...]] = None,
overwrite_plans_name: Optional[str] = None):
"""
overwrite_target_spacing ONLY applies to 3d_fullres and 3d_cascade fullres!
"""
experiment_planner = recursive_find_python_class(join(nnunetv2.__path__[0], "experiment_planning"),
experiment_planner_class_name,
current_module="nnunetv2.experiment_planning")
for d in dataset_ids:
plan_experiment_dataset(d, experiment_planner, gpu_memory_target_in_gb, preprocess_class_name,
overwrite_target_spacing, overwrite_plans_name)
def preprocess_dataset(dataset_id: int,
plans_identifier: str = 'nnUNetPlans',
configurations: Union[Tuple[str], List[str]] = ('2d', '3d_fullres', '3d_lowres'),
num_processes: Union[int, Tuple[int, ...], List[int]] = (8, 4, 8),
verbose: bool = False) -> None:
if not isinstance(num_processes, list):
num_processes = list(num_processes)
if len(num_processes) == 1:
num_processes = num_processes * len(configurations)
if len(num_processes) != len(configurations):
raise RuntimeError(
f'The list provided with num_processes must either have len 1 or as many elements as there are '
f'configurations (see --help). Number of configurations: {len(configurations)}, length '
f'of num_processes: '
f'{len(num_processes)}')
dataset_name = convert_id_to_dataset_name(dataset_id)
print(f'Preprocessing dataset {dataset_name}')
plans_file = join(nnUNet_preprocessed, dataset_name, plans_identifier + '.json')
plans_manager = PlansManager(plans_file)
for n, c in zip(num_processes, configurations):
print(f'Configuration: {c}...')
if c not in plans_manager.available_configurations:
print(
f"INFO: Configuration {c} not found in plans file {plans_identifier + '.json'} of "
f"dataset {dataset_name}. Skipping.")
continue
configuration_manager = plans_manager.get_configuration(c)
preprocessor = configuration_manager.preprocessor_class(verbose=verbose)
preprocessor.run(dataset_id, c, plans_identifier, num_processes=n)
# copy the gt to a folder in the nnUNet_preprocessed so that we can do validation even if the raw data is no
# longer there (useful for compute cluster where only the preprocessed data is available)
from distutils.file_util import copy_file
maybe_mkdir_p(join(nnUNet_preprocessed, dataset_name, 'gt_segmentations'))
dataset_json = load_json(join(nnUNet_raw, dataset_name, 'dataset.json'))
dataset = get_filenames_of_train_images_and_targets(join(nnUNet_raw, dataset_name), dataset_json)
# only copy files that are newer than the ones already present
for k in dataset:
copy_file(dataset[k]['label'],
join(nnUNet_preprocessed, dataset_name, 'gt_segmentations', k + dataset_json['file_ending']),
update=True)
def preprocess(dataset_ids: List[int],
plans_identifier: str = 'nnUNetPlans',
configurations: Union[Tuple[str], List[str]] = ('2d', '3d_fullres', '3d_lowres'),
num_processes: Union[int, Tuple[int, ...], List[int]] = (8, 4, 8),
verbose: bool = False):
for d in dataset_ids:
preprocess_dataset(d, plans_identifier, configurations, num_processes, verbose)
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/plan_and_preprocess_entrypoints.py
================================================
from nnunetv2.configuration import default_num_processes
from nnunetv2.experiment_planning.plan_and_preprocess_api import extract_fingerprints, plan_experiments, preprocess
def extract_fingerprint_entry():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-d', nargs='+', type=int,
help="[REQUIRED] List of dataset IDs. Example: 2 4 5. This will run fingerprint extraction, experiment "
"planning and preprocessing for these datasets. Can of course also be just one dataset")
parser.add_argument('-fpe', type=str, required=False, default='DatasetFingerprintExtractor',
help='[OPTIONAL] Name of the Dataset Fingerprint Extractor class that should be used. Default is '
'\'DatasetFingerprintExtractor\'.')
parser.add_argument('-np', type=int, default=default_num_processes, required=False,
help=f'[OPTIONAL] Number of processes used for fingerprint extraction. '
f'Default: {default_num_processes}')
parser.add_argument("--verify_dataset_integrity", required=False, default=False, action="store_true",
help="[RECOMMENDED] set this flag to check the dataset integrity. This is useful and should be done once for "
"each dataset!")
parser.add_argument("--clean", required=False, default=False, action="store_true",
help='[OPTIONAL] Set this flag to overwrite existing fingerprints. If this flag is not set and a '
'fingerprint already exists, the fingerprint extractor will not run.')
parser.add_argument('--verbose', required=False, action='store_true',
help='Set this to print a lot of stuff. Useful for debugging. Will disable progress bar! '
'Recommended for cluster environments')
args, unrecognized_args = parser.parse_known_args()
extract_fingerprints(args.d, args.fpe, args.np, args.verify_dataset_integrity, args.clean, args.verbose)
def plan_experiment_entry():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-d', nargs='+', type=int,
help="[REQUIRED] List of dataset IDs. Example: 2 4 5. This will run fingerprint extraction, experiment "
"planning and preprocessing for these datasets. Can of course also be just one dataset")
parser.add_argument('-pl', type=str, default='ExperimentPlanner', required=False,
help='[OPTIONAL] Name of the Experiment Planner class that should be used. Default is '
'\'ExperimentPlanner\'. Note: There is no longer a distinction between 2d and 3d planner. '
'It\'s an all in one solution now. Wuch. Such amazing.')
parser.add_argument('-gpu_memory_target', default=8, type=float, required=False,
help='[OPTIONAL] DANGER ZONE! Sets a custom GPU memory target. Default: 8 [GB]. Changing this will '
'affect patch and batch size and will '
'definitely affect your models performance! Only use this if you really know what you '
'are doing and NEVER use this without running the default nnU-Net first (as a baseline).')
parser.add_argument('-preprocessor_name', default='DefaultPreprocessor', type=str, required=False,
help='[OPTIONAL] DANGER ZONE! Sets a custom preprocessor class. This class must be located in '
'nnunetv2.preprocessing. Default: \'DefaultPreprocessor\'. Changing this may affect your '
'models performance! Only use this if you really know what you '
'are doing and NEVER use this without running the default nnU-Net first (as a baseline).')
parser.add_argument('-overwrite_target_spacing', default=None, nargs='+', required=False,
help='[OPTIONAL] DANGER ZONE! Sets a custom target spacing for the 3d_fullres and 3d_cascade_fullres '
'configurations. Default: None [no changes]. Changing this will affect image size and '
'potentially patch and batch '
'size. This will definitely affect your models performance! Only use this if you really '
'know what you are doing and NEVER use this without running the default nnU-Net first '
'(as a baseline). Changing the target spacing for the other configurations is currently '
'not implemented. New target spacing must be a list of three numbers!')
parser.add_argument('-overwrite_plans_name', default=None, required=False,
help='[OPTIONAL] DANGER ZONE! If you used -gpu_memory_target, -preprocessor_name or '
'-overwrite_target_spacing it is best practice to use -overwrite_plans_name to generate a '
'differently named plans file such that the nnunet default plans are not '
'overwritten. You will then need to specify your custom plans file with -p whenever '
'running other nnunet commands (training, inference etc)')
args, unrecognized_args = parser.parse_known_args()
plan_experiments(args.d, args.pl, args.gpu_memory_target, args.preprocessor_name, args.overwrite_target_spacing,
args.overwrite_plans_name)
def preprocess_entry():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-d', nargs='+', type=int,
help="[REQUIRED] List of dataset IDs. Example: 2 4 5. This will run fingerprint extraction, experiment "
"planning and preprocessing for these datasets. Can of course also be just one dataset")
parser.add_argument('-plans_name', default='nnUNetPlans', required=False,
help='[OPTIONAL] You can use this to specify a custom plans file that you may have generated')
parser.add_argument('-c', required=False, default=['2d', '3d_fullres', '3d_lowres'], nargs='+',
help='[OPTIONAL] Configurations for which the preprocessing should be run. Default: 2d 3d_fullres '
'3d_lowres. 3d_cascade_fullres does not need to be specified because it uses the data '
'from 3d_fullres. Configurations that do not exist for some dataset will be skipped.')
parser.add_argument('-np', type=int, nargs='+', default=[8, 4, 8], required=False,
help="[OPTIONAL] Use this to define how many processes are to be used. If this is just one number then "
"this number of processes is used for all configurations specified with -c. If it's a "
"list of numbers this list must have as many elements as there are configurations. We "
"then iterate over zip(configs, num_processes) to determine then umber of processes "
"used for each configuration. More processes is always faster (up to the number of "
"threads your PC can support, so 8 for a 4 core CPU with hyperthreading. If you don't "
"know what that is then dont touch it, or at least don't increase it!). DANGER: More "
"often than not the number of processes that can be used is limited by the amount of "
"RAM available. Image resampling takes up a lot of RAM. MONITOR RAM USAGE AND "
"DECREASE -np IF YOUR RAM FILLS UP TOO MUCH!. Default: 8 processes for 2d, 4 "
"for 3d_fullres, 8 for 3d_lowres and 4 for everything else")
parser.add_argument('--verbose', required=False, action='store_true',
help='Set this to print a lot of stuff. Useful for debugging. Will disable progress bar! '
'Recommended for cluster environments')
args, unrecognized_args = parser.parse_known_args()
if args.np is None:
default_np = {
'2d': 4,
'3d_lowres': 8,
'3d_fullres': 4
}
np = {default_np[c] if c in default_np.keys() else 4 for c in args.c}
else:
np = args.np
preprocess(args.d, args.plans_name, configurations=args.c, num_processes=np, verbose=args.verbose)
def plan_and_preprocess_entry():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-d', nargs='+', type=int,
help="[REQUIRED] List of dataset IDs. Example: 2 4 5. This will run fingerprint extraction, experiment "
"planning and preprocessing for these datasets. Can of course also be just one dataset")
parser.add_argument('-fpe', type=str, required=False, default='DatasetFingerprintExtractor',
help='[OPTIONAL] Name of the Dataset Fingerprint Extractor class that should be used. Default is '
'\'DatasetFingerprintExtractor\'.')
parser.add_argument('-npfp', type=int, default=8, required=False,
help='[OPTIONAL] Number of processes used for fingerprint extraction. Default: 8')
parser.add_argument("--verify_dataset_integrity", required=False, default=False, action="store_true",
help="[RECOMMENDED] set this flag to check the dataset integrity. This is useful and should be done once for "
"each dataset!")
parser.add_argument('--no_pp', default=False, action='store_true', required=False,
help='[OPTIONAL] Set this to only run fingerprint extraction and experiment planning (no '
'preprocesing). Useful for debugging.')
parser.add_argument("--clean", required=False, default=False, action="store_true",
help='[OPTIONAL] Set this flag to overwrite existing fingerprints. If this flag is not set and a '
'fingerprint already exists, the fingerprint extractor will not run. REQUIRED IF YOU '
'CHANGE THE DATASET FINGERPRINT EXTRACTOR OR MAKE CHANGES TO THE DATASET!')
parser.add_argument('-pl', type=str, default='ExperimentPlanner', required=False,
help='[OPTIONAL] Name of the Experiment Planner class that should be used. Default is '
'\'ExperimentPlanner\'. Note: There is no longer a distinction between 2d and 3d planner. '
'It\'s an all in one solution now. Wuch. Such amazing.')
parser.add_argument('-gpu_memory_target', default=8, type=int, required=False,
help='[OPTIONAL] DANGER ZONE! Sets a custom GPU memory target. Default: 8 [GB]. Changing this will '
'affect patch and batch size and will '
'definitely affect your models performance! Only use this if you really know what you '
'are doing and NEVER use this without running the default nnU-Net first (as a baseline).')
parser.add_argument('-preprocessor_name', default='DefaultPreprocessor', type=str, required=False,
help='[OPTIONAL] DANGER ZONE! Sets a custom preprocessor class. This class must be located in '
'nnunetv2.preprocessing. Default: \'DefaultPreprocessor\'. Changing this may affect your '
'models performance! Only use this if you really know what you '
'are doing and NEVER use this without running the default nnU-Net first (as a baseline).')
parser.add_argument('-overwrite_target_spacing', default=None, nargs='+', required=False,
help='[OPTIONAL] DANGER ZONE! Sets a custom target spacing for the 3d_fullres and 3d_cascade_fullres '
'configurations. Default: None [no changes]. Changing this will affect image size and '
'potentially patch and batch '
'size. This will definitely affect your models performance! Only use this if you really '
'know what you are doing and NEVER use this without running the default nnU-Net first '
'(as a baseline). Changing the target spacing for the other configurations is currently '
'not implemented. New target spacing must be a list of three numbers!')
parser.add_argument('-overwrite_plans_name', default='nnUNetPlans', required=False,
help='[OPTIONAL] uSE A CUSTOM PLANS IDENTIFIER. If you used -gpu_memory_target, '
'-preprocessor_name or '
'-overwrite_target_spacing it is best practice to use -overwrite_plans_name to generate a '
'differently named plans file such that the nnunet default plans are not '
'overwritten. You will then need to specify your custom plans file with -p whenever '
'running other nnunet commands (training, inference etc)')
parser.add_argument('-c', required=False, default=['2d', '3d_fullres', '3d_lowres'], nargs='+',
help='[OPTIONAL] Configurations for which the preprocessing should be run. Default: 2d 3d_fullres '
'3d_lowres. 3d_cascade_fullres does not need to be specified because it uses the data '
'from 3d_fullres. Configurations that do not exist for some dataset will be skipped.')
parser.add_argument('-np', type=int, nargs='+', default=None, required=False,
help="[OPTIONAL] Use this to define how many processes are to be used. If this is just one number then "
"this number of processes is used for all configurations specified with -c. If it's a "
"list of numbers this list must have as many elements as there are configurations. We "
"then iterate over zip(configs, num_processes) to determine then umber of processes "
"used for each configuration. More processes is always faster (up to the number of "
"threads your PC can support, so 8 for a 4 core CPU with hyperthreading. If you don't "
"know what that is then dont touch it, or at least don't increase it!). DANGER: More "
"often than not the number of processes that can be used is limited by the amount of "
"RAM available. Image resampling takes up a lot of RAM. MONITOR RAM USAGE AND "
"DECREASE -np IF YOUR RAM FILLS UP TOO MUCH!. Default: 8 processes for 2d, 4 "
"for 3d_fullres, 8 for 3d_lowres and 4 for everything else")
parser.add_argument('--verbose', required=False, action='store_true',
help='Set this to print a lot of stuff. Useful for debugging. Will disable progress bar! '
'Recommended for cluster environments')
args = parser.parse_args()
# fingerprint extraction
print("Fingerprint extraction...")
extract_fingerprints(args.d, args.fpe, args.npfp, args.verify_dataset_integrity, args.clean, args.verbose)
# experiment planning
print('Experiment planning...')
plan_experiments(args.d, args.pl, args.gpu_memory_target, args.preprocessor_name, args.overwrite_target_spacing, args.overwrite_plans_name)
# manage default np
if args.np is None:
default_np = {"2d": 8, "3d_fullres": 4, "3d_lowres": 8}
np = [default_np[c] if c in default_np.keys() else 4 for c in args.c]
else:
np = args.np
# preprocessing
if not args.no_pp:
print('Preprocessing...')
preprocess(args.d, args.overwrite_plans_name, args.c, np, args.verbose)
if __name__ == '__main__':
plan_and_preprocess_entry()
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/plans_for_pretraining/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/plans_for_pretraining/move_plans_between_datasets.py
================================================
import argparse
from typing import Union
from batchgenerators.utilities.file_and_folder_operations import join, isdir, isfile, load_json, subfiles, save_json
from nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json
from nnunetv2.paths import nnUNet_preprocessed, nnUNet_raw
from nnunetv2.utilities.file_path_utilities import maybe_convert_to_dataset_name
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager
from nnunetv2.utilities.utils import get_filenames_of_train_images_and_targets
def move_plans_between_datasets(
source_dataset_name_or_id: Union[int, str],
target_dataset_name_or_id: Union[int, str],
source_plans_identifier: str,
target_plans_identifier: str = None):
source_dataset_name = maybe_convert_to_dataset_name(source_dataset_name_or_id)
target_dataset_name = maybe_convert_to_dataset_name(target_dataset_name_or_id)
if target_plans_identifier is None:
target_plans_identifier = source_plans_identifier
source_folder = join(nnUNet_preprocessed, source_dataset_name)
assert isdir(source_folder), f"Cannot move plans because preprocessed directory of source dataset is missing. " \
f"Run nnUNetv2_plan_and_preprocess for source dataset first!"
source_plans_file = join(source_folder, source_plans_identifier + '.json')
assert isfile(source_plans_file), f"Source plans are missing. Run the corresponding experiment planning first! " \
f"Expected file: {source_plans_file}"
source_plans = load_json(source_plans_file)
source_plans['dataset_name'] = target_dataset_name
# we need to change data_identifier to use target_plans_identifier
if target_plans_identifier != source_plans_identifier:
for c in source_plans['configurations'].keys():
if 'data_identifier' in source_plans['configurations'][c].keys():
old_identifier = source_plans['configurations'][c]["data_identifier"]
if old_identifier.startswith(source_plans_identifier):
new_identifier = target_plans_identifier + old_identifier[len(source_plans_identifier):]
else:
new_identifier = target_plans_identifier + '_' + old_identifier
source_plans['configurations'][c]["data_identifier"] = new_identifier
# we need to change the reader writer class!
target_raw_data_dir = join(nnUNet_raw, target_dataset_name)
target_dataset_json = load_json(join(target_raw_data_dir, 'dataset.json'))
# we may need to change the reader/writer
# pick any file from the source dataset
dataset = get_filenames_of_train_images_and_targets(target_raw_data_dir, target_dataset_json)
example_image = dataset[dataset.keys().__iter__().__next__()]['images'][0]
rw = determine_reader_writer_from_dataset_json(target_dataset_json, example_image, allow_nonmatching_filename=True,
verbose=False)
source_plans["image_reader_writer"] = rw.__name__
if target_plans_identifier is not None:
source_plans["plans_name"] = target_plans_identifier
save_json(source_plans, join(nnUNet_preprocessed, target_dataset_name, target_plans_identifier + '.json'),
sort_keys=False)
def entry_point_move_plans_between_datasets():
parser = argparse.ArgumentParser()
parser.add_argument('-s', type=str, required=True,
help='Source dataset name or id')
parser.add_argument('-t', type=str, required=True,
help='Target dataset name or id')
parser.add_argument('-sp', type=str, required=True,
help='Source plans identifier. If your plans are named "nnUNetPlans.json" then the '
'identifier would be nnUNetPlans')
parser.add_argument('-tp', type=str, required=False, default=None,
help='Target plans identifier. Default is None meaning the source plans identifier will '
'be kept. Not recommended if the source plans identifier is a default nnU-Net identifier '
'such as nnUNetPlans!!!')
args = parser.parse_args()
move_plans_between_datasets(args.s, args.t, args.sp, args.tp)
if __name__ == '__main__':
move_plans_between_datasets(2, 4, 'nnUNetPlans', 'nnUNetPlansFrom2')
================================================
FILE: Finetune/nnUNet/nnunetv2/experiment_planning/verify_dataset_integrity.py
================================================
# Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center
# (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import multiprocessing
import re
from multiprocessing import Pool
from typing import Type
import numpy as np
import pandas as pd
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
from nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json
from nnunetv2.paths import nnUNet_raw
from nnunetv2.utilities.label_handling.label_handling import LabelManager
from nnunetv2.utilities.utils import get_identifiers_from_splitted_dataset_folder, \
get_filenames_of_train_images_and_targets
def verify_labels(label_file: str, readerclass: Type[BaseReaderWriter], expected_labels: List[int]) -> bool:
rw = readerclass()
seg, properties = rw.read_seg(label_file)
found_labels = np.sort(pd.unique(seg.ravel())) # np.unique(seg)
unexpected_labels = [i for i in found_labels if i not in expected_labels]
if len(found_labels) == 0 and found_labels[0] == 0:
print('WARNING: File %s only has label 0 (which should be background). This may be intentional or not, '
'up to you.' % label_file)
if len(unexpected_labels) > 0:
print("Error: Unexpected labels found in file %s.\nExpected: %s\nFound: %s" % (label_file, expected_labels,
found_labels))
return False
return True
def check_cases(image_files: List[str], label_file: str, expected_num_channels: int,
readerclass: Type[BaseReaderWriter]) -> bool:
rw = readerclass()
ret = True
images, properties_image = rw.read_images(image_files)
segmentation, properties_seg = rw.read_seg(label_file)
# check for nans
if np.any(np.isnan(images)):
print(f'Images contain NaN pixel values. You need to fix that by '
f'replacing NaN values with something that makes sense for your images!\nImages:\n{image_files}')
ret = False
if np.any(np.isnan(segmentation)):
print(f'Segmentation contains NaN pixel values. You need to fix that.\nSegmentation:\n{label_file}')
ret = False
# check shapes
shape_image = images.shape[1:]
shape_seg = segmentation.shape[1:]
if shape_image != shape_seg:
print('Error: Shape mismatch between segmentation and corresponding images. \nShape images: %s. '
'\nShape seg: %s. \nImage files: %s. \nSeg file: %s\n' %
(shape_image, shape_seg, image_files, label_file))
ret = False
# check spacings
spacing_images = properties_image['spacing']
spacing_seg = properties_seg['spacing']
if not np.allclose(spacing_seg, spacing_images):
print('Error: Spacing mismatch between segmentation and corresponding images. \nSpacing images: %s. '
'\nSpacing seg: %s. \nImage files: %s. \nSeg file: %s\n' %
(shape_image, shape_seg, image_files, label_file))
ret = False
# check modalities
if not len(images) == expected_num_channels:
print('Error: Unexpected number of modalities. \nExpected: %d. \nGot: %d. \nImages: %s\n'
% (expected_num_channels, len(images), image_files))
ret = False
# nibabel checks
if 'nibabel_stuff' in properties_image.keys():
# this image was read with NibabelIO
affine_image = properties_image['nibabel_stuff']['original_affine']
affine_seg = properties_seg['nibabel_stuff']['original_affine']
if not np.allclose(affine_image, affine_seg):
print('WARNING: Affine is not the same for image and seg! \nAffine image: %s \nAffine seg: %s\n'
'Image files: %s. \nSeg file: %s.\nThis can be a problem but doesn\'t have to be. Please run '
'nnUNet_plot_dataset_pngs to verify if everything is OK!\n'
% (affine_image, affine_seg, image_files, label_file))
# sitk checks
if 'sitk_stuff' in properties_image.keys():
# this image was read with SimpleITKIO
# spacing has already been checked, only check direction and origin
origin_image = properties_image['sitk_stuff']['origin']
origin_seg = properties_seg['sitk_stuff']['origin']
if not np.allclose(origin_image, origin_seg):
print('Warning: Origin mismatch between segmentation and corresponding images. \nOrigin images: %s. '
'\nOrigin seg: %s. \nImage files: %s. \nSeg file: %s\n' %
(origin_image, origin_seg, image_files, label_file))
direction_image = properties_image['sitk_stuff']['direction']
direction_seg = properties_seg['sitk_stuff']['direction']
if not np.allclose(direction_image, direction_seg):
print('Warning: Direction mismatch between segmentation and corresponding images. \nDirection images: %s. '
'\nDirection seg: %s. \nImage files: %s. \nSeg file: %s\n' %
(direction_image, direction_seg, image_files, label_file))
return ret
def verify_dataset_integrity(folder: str, num_processes: int = 8) -> None:
"""
folder needs the imagesTr, imagesTs and labelsTr subfolders. There also needs to be a dataset.json
checks if the expected number of training cases and labels are present
for each case, if possible, checks whether the pixel grids are aligned
checks whether the labels really only contain values they should
:param folder:
:return:
"""
assert isfile(join(folder, "dataset.json")), f"There needs to be a dataset.json file in folder, folder={folder}"
dataset_json = load_json(join(folder, "dataset.json"))
if not 'dataset' in dataset_json.keys():
assert isdir(join(folder, "imagesTr")), f"There needs to be a imagesTr subfolder in folder, folder={folder}"
assert isdir(join(folder, "labelsTr")), f"There needs to be a labelsTr subfolder in folder, folder={folder}"
# make sure all required keys are there
dataset_keys = list(dataset_json.keys())
required_keys = ['labels', "channel_names", "numTraining", "file_ending"]
assert all([i in dataset_keys for i in required_keys]), 'not all required keys are present in dataset.json.' \
'\n\nRequired: \n%s\n\nPresent: \n%s\n\nMissing: ' \
'\n%s\n\nUnused by nnU-Net:\n%s' % \
(str(required_keys),
str(dataset_keys),
str([i for i in required_keys if i not in dataset_keys]),
str([i for i in dataset_keys if i not in required_keys]))
expected_num_training = dataset_json['numTraining']
num_modalities = len(dataset_json['channel_names'].keys()
if 'channel_names' in dataset_json.keys()
else dataset_json['modality'].keys())
file_ending = dataset_json['file_ending']
dataset = get_filenames_of_train_images_and_targets(folder, dataset_json)
# check if the right number of training cases is present
assert len(dataset) == expected_num_training, 'Did not find the expected number of training cases ' \
'(%d). Found %d instead.\nExamples: %s' % \
(expected_num_training, len(dataset),
list(dataset.keys())[:5])
# check if corresponding labels are present
if 'dataset' in dataset_json.keys():
# just check if everything is there
ok = True
missing_images = []
missing_labels = []
for k in dataset:
for i in dataset[k]['images']:
if not isfile(i):
missing_images.append(i)
ok = False
if not isfile(dataset[k]['label']):
missing_labels.append(dataset[k]['label'])
ok = False
if not ok:
raise FileNotFoundError(f"Some expected files were missing. Make sure you are properly referencing them "
f"in the dataset.json. Or use imagesTr & labelsTr folders!\nMissing images:"
f"\n{missing_images}\n\nMissing labels:\n{missing_labels}")
else:
# old code that uses imagestr and labelstr folders
labelfiles = subfiles(join(folder, 'labelsTr'), suffix=file_ending, join=False)
label_identifiers = [i[:-len(file_ending)] for i in labelfiles]
labels_present = [i in label_identifiers for i in dataset.keys()]
missing = [i for j, i in enumerate(dataset.keys()) if not labels_present[j]]
assert all(labels_present), f'not all training cases have a label file in labelsTr. Fix that. Missing: {missing}'
labelfiles = [v['label'] for v in dataset.values()]
image_files = [v['images'] for v in dataset.values()]
# no plans exist yet, so we can't use PlansManager and gotta roll with the default. It's unlikely to cause
# problems anyway
label_manager = LabelManager(dataset_json['labels'], regions_class_order=dataset_json.get('regions_class_order'))
expected_labels = label_manager.all_labels
if label_manager.has_ignore_label:
expected_labels.append(label_manager.ignore_label)
labels_valid_consecutive = np.ediff1d(expected_labels) == 1
assert all(
labels_valid_consecutive), f'Labels must be in consecutive order (0, 1, 2, ...). The labels {np.array(expected_labels)[1:][~labels_valid_consecutive]} do not satisfy this restriction'
# determine reader/writer class
reader_writer_class = determine_reader_writer_from_dataset_json(dataset_json, dataset[dataset.keys().__iter__().__next__()]['images'][0])
# check whether only the desired labels are present
with multiprocessing.get_context("spawn").Pool(num_processes) as p:
result = p.starmap(
verify_labels,
zip([join(folder, 'labelsTr', i) for i in labelfiles], [reader_writer_class] * len(labelfiles),
[expected_labels] * len(labelfiles))
)
if not all(result):
raise RuntimeError(
'Some segmentation images contained unexpected labels. Please check text output above to see which one(s).')
# check whether shapes and spacings match between images and labels
result = p.starmap(
check_cases,
zip(image_files, labelfiles, [num_modalities] * expected_num_training,
[reader_writer_class] * expected_num_training)
)
if not all(result):
raise RuntimeError(
'Some images have errors. Please check text output above to see which one(s) and what\'s going on.')
# check for nans
# check all same orientation nibabel
print('\n####################')
print('verify_dataset_integrity Done. \nIf you didn\'t see any error messages then your dataset is most likely OK!')
print('####################\n')
if __name__ == "__main__":
# investigate geometry issues
example_folder = join(nnUNet_raw, 'Dataset250_COMPUTING_it0')
num_processes = 6
verify_dataset_integrity(example_folder, num_processes)
================================================
FILE: Finetune/nnUNet/nnunetv2/imageio/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/imageio/base_reader_writer.py
================================================
# Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center
# (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
from typing import Tuple, Union, List
import numpy as np
class BaseReaderWriter(ABC):
@staticmethod
def _check_all_same(input_list):
# compare all entries to the first
for i in input_list[1:]:
if i != input_list[0]:
return False
return True
@staticmethod
def _check_all_same_array(input_list):
# compare all entries to the first
for i in input_list[1:]:
if i.shape != input_list[0].shape or not np.allclose(i, input_list[0]):
return False
return True
@abstractmethod
def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:
"""
Reads a sequence of images and returns a 4d (!) np.ndarray along with a dictionary. The 4d array must have the
modalities (or color channels, or however you would like to call them) in its first axis, followed by the
spatial dimensions (so shape must be c,x,y,z where c is the number of modalities (can be 1)).
Use the dictionary to store necessary meta information that is lost when converting to numpy arrays, for
example the Spacing, Orientation and Direction of the image. This dictionary will be handed over to write_seg
for exporting the predicted segmentations, so make sure you have everything you need in there!
IMPORTANT: dict MUST have a 'spacing' key with a tuple/list of length 3 with the voxel spacing of the np.ndarray.
Example: my_dict = {'spacing': (3, 0.5, 0.5), ...}. This is needed for planning and
preprocessing. The ordering of the numbers must correspond to the axis ordering in the returned numpy array. So
if the array has shape c,x,y,z and the spacing is (a,b,c) then a must be the spacing of x, b the spacing of y
and c the spacing of z.
In the case of 2D images, the returned array should have shape (c, 1, x, y) and the spacing should be
(999, sp_x, sp_y). Make sure 999 is larger than sp_x and sp_y! Example: shape=(3, 1, 224, 224),
spacing=(999, 1, 1)
For images that don't have a spacing, set the spacing to 1 (2d exception with 999 for the first axis still applies!)
:param image_fnames:
:return:
1) a np.ndarray of shape (c, x, y, z) where c is the number of image channels (can be 1) and x, y, z are
the spatial dimensions (set x=1 for 2D! Example: (3, 1, 224, 224) for RGB image).
2) a dictionary with metadata. This can be anything. BUT it HAS to include a {'spacing': (a, b, c)} where a
is the spacing of x, b of y and c of z! If an image doesn't have spacing, just set this to 1. For 2D, set
a=999 (largest spacing value! Make it larger than b and c)
"""
pass
@abstractmethod
def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:
"""
Same requirements as BaseReaderWriter.read_image. Returned segmentations must have shape 1,x,y,z. Multiple
segmentations are not (yet?) allowed
If images and segmentations can be read the same way you can just `return self.read_image((image_fname,))`
:param seg_fname:
:return:
1) a np.ndarray of shape (1, x, y, z) where x, y, z are
the spatial dimensions (set x=1 for 2D! Example: (1, 1, 224, 224) for 2D segmentation).
2) a dictionary with metadata. This can be anything. BUT it HAS to include a {'spacing': (a, b, c)} where a
is the spacing of x, b of y and c of z! If an image doesn't have spacing, just set this to 1. For 2D, set
a=999 (largest spacing value! Make it larger than b and c)
"""
pass
@abstractmethod
def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:
"""
Export the predicted segmentation to the desired file format. The given seg array will have the same shape and
orientation as the corresponding image data, so you don't need to do any resampling or whatever. Just save :-)
properties is the same dictionary you created during read_images/read_seg so you can use the information here
to restore metadata
IMPORTANT: Segmentations are always 3D! If your input images were 2d then the segmentation will have shape
1,x,y. You need to catch that and export accordingly (for 2d images you need to convert the 3d segmentation
to 2d via seg = seg[0])!
:param seg: A segmentation (np.ndarray, integer) of shape (x, y, z). For 2D segmentations this will be (1, y, z)!
:param output_fname:
:param properties: the dictionary that you created in read_images (the ones this segmentation is based on).
Use this to restore metadata
:return:
"""
pass
================================================
FILE: Finetune/nnUNet/nnunetv2/imageio/natural_image_reader_writer.py
================================================
# Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center
# (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Tuple, Union, List
import numpy as np
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
from skimage import io
class NaturalImage2DIO(BaseReaderWriter):
"""
ONLY SUPPORTS 2D IMAGES!!!
"""
# there are surely more we could add here. Everything that can be read by skimage.io should be supported
supported_file_endings = [
'.png',
# '.jpg',
# '.jpeg', # jpg not supported because we cannot allow lossy compression! segmentation maps!
'.bmp',
'.tif'
]
def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:
images = []
for f in image_fnames:
npy_img = io.imread(f)
if npy_img.ndim == 3:
# rgb image, last dimension should be the color channel and the size of that channel should be 3
# (or 4 if we have alpha)
assert npy_img.shape[-1] == 3 or npy_img.shape[-1] == 4, "If image has three dimensions then the last " \
"dimension must have shape 3 or 4 " \
f"(RGB or RGBA). Image shape here is {npy_img.shape}"
# move RGB(A) to front, add additional dim so that we have shape (1, c, X, Y), where c is either 3 or 4
images.append(npy_img.transpose((2, 0, 1))[:, None])
elif npy_img.ndim == 2:
# grayscale image
images.append(npy_img[None, None])
if not self._check_all_same([i.shape for i in images]):
print('ERROR! Not all input images have the same shape!')
print('Shapes:')
print([i.shape for i in images])
print('Image files:')
print(image_fnames)
raise RuntimeError()
return np.vstack(images).astype(np.float32), {'spacing': (999, 1, 1)}
def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:
return self.read_images((seg_fname, ))
def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:
io.imsave(output_fname, seg[0].astype(np.uint8), check_contrast=False)
if __name__ == '__main__':
images = ('/media/fabian/data/nnUNet_raw/Dataset120_RoadSegmentation/imagesTr/img-11_0000.png',)
segmentation = '/media/fabian/data/nnUNet_raw/Dataset120_RoadSegmentation/labelsTr/img-11.png'
imgio = NaturalImage2DIO()
img, props = imgio.read_images(images)
seg, segprops = imgio.read_seg(segmentation)
================================================
FILE: Finetune/nnUNet/nnunetv2/imageio/nibabel_reader_writer.py
================================================
# Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center
# (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Tuple, Union, List
import numpy as np
from nibabel import io_orientation
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
import nibabel
class NibabelIO(BaseReaderWriter):
"""
Nibabel loads the images in a different order than sitk. We convert the axes to the sitk order to be
consistent. This is of course considered properly in segmentation export as well.
IMPORTANT: Run nnUNet_plot_dataset_pngs to verify that this did not destroy the alignment of data and seg!
"""
supported_file_endings = [
'.nii.gz',
'.nrrd',
'.mha'
]
def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:
images = []
original_affines = []
spacings_for_nnunet = []
for f in image_fnames:
nib_image = nibabel.load(f)
assert nib_image.ndim == 3, 'only 3d images are supported by NibabelIO'
original_affine = nib_image.affine
original_affines.append(original_affine)
# spacing is taken in reverse order to be consistent with SimpleITK axis ordering (confusing, I know...)
spacings_for_nnunet.append(
[float(i) for i in nib_image.header.get_zooms()[::-1]]
)
# transpose image to be consistent with the way SimpleITk reads images. Yeah. Annoying.
images.append(nib_image.get_fdata().transpose((2, 1, 0))[None])
if not self._check_all_same([i.shape for i in images]):
print('ERROR! Not all input images have the same shape!')
print('Shapes:')
print([i.shape for i in images])
print('Image files:')
print(image_fnames)
raise RuntimeError()
if not self._check_all_same_array(original_affines):
print('WARNING! Not all input images have the same original_affines!')
print('Affines:')
print(original_affines)
print('Image files:')
print(image_fnames)
print('It is up to you to decide whether that\'s a problem. You should run nnUNet_plot_dataset_pngs to verify '
'that segmentations and data overlap.')
if not self._check_all_same(spacings_for_nnunet):
print('ERROR! Not all input images have the same spacing_for_nnunet! This might be caused by them not '
'having the same affine')
print('spacings_for_nnunet:')
print(spacings_for_nnunet)
print('Image files:')
print(image_fnames)
raise RuntimeError()
stacked_images = np.vstack(images)
dict = {
'nibabel_stuff': {
'original_affine': original_affines[0],
},
'spacing': spacings_for_nnunet[0]
}
return stacked_images.astype(np.float32), dict
def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:
return self.read_images((seg_fname, ))
def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:
# revert transpose
seg = seg.transpose((2, 1, 0)).astype(np.uint8)
seg_nib = nibabel.Nifti1Image(seg, affine=properties['nibabel_stuff']['original_affine'])
nibabel.save(seg_nib, output_fname)
class NibabelIOWithReorient(BaseReaderWriter):
"""
Reorients images to RAS
Nibabel loads the images in a different order than sitk. We convert the axes to the sitk order to be
consistent. This is of course considered properly in segmentation export as well.
IMPORTANT: Run nnUNet_plot_dataset_pngs to verify that this did not destroy the alignment of data and seg!
"""
supported_file_endings = [
'.nii.gz',
'.nrrd',
'.mha'
]
def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:
images = []
original_affines = []
reoriented_affines = []
spacings_for_nnunet = []
for f in image_fnames:
nib_image = nibabel.load(f)
assert nib_image.ndim == 3, 'only 3d images are supported by NibabelIO'
original_affine = nib_image.affine
reoriented_image = nib_image.as_reoriented(io_orientation(original_affine))
reoriented_affine = reoriented_image.affine
original_affines.append(original_affine)
reoriented_affines.append(reoriented_affine)
# spacing is taken in reverse order to be consistent with SimpleITK axis ordering (confusing, I know...)
spacings_for_nnunet.append(
[float(i) for i in reoriented_image.header.get_zooms()[::-1]]
)
# transpose image to be consistent with the way SimpleITk reads images. Yeah. Annoying.
images.append(reoriented_image.get_fdata().transpose((2, 1, 0))[None])
if not self._check_all_same([i.shape for i in images]):
print('ERROR! Not all input images have the same shape!')
print('Shapes:')
print([i.shape for i in images])
print('Image files:')
print(image_fnames)
raise RuntimeError()
if not self._check_all_same_array(reoriented_affines):
print('WARNING! Not all input images have the same reoriented_affines!')
print('Affines:')
print(reoriented_affines)
print('Image files:')
print(image_fnames)
print('It is up to you to decide whether that\'s a problem. You should run nnUNet_plot_dataset_pngs to verify '
'that segmentations and data overlap.')
if not self._check_all_same(spacings_for_nnunet):
print('ERROR! Not all input images have the same spacing_for_nnunet! This might be caused by them not '
'having the same affine')
print('spacings_for_nnunet:')
print(spacings_for_nnunet)
print('Image files:')
print(image_fnames)
raise RuntimeError()
stacked_images = np.vstack(images)
dict = {
'nibabel_stuff': {
'original_affine': original_affines[0],
'reoriented_affine': reoriented_affines[0],
},
'spacing': spacings_for_nnunet[0]
}
return stacked_images.astype(np.float32), dict
def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:
return self.read_images((seg_fname, ))
def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:
# revert transpose
seg = seg.transpose((2, 1, 0)).astype(np.uint8)
seg_nib = nibabel.Nifti1Image(seg, affine=properties['nibabel_stuff']['reoriented_affine'])
seg_nib_reoriented = seg_nib.as_reoriented(io_orientation(properties['nibabel_stuff']['original_affine']))
assert np.allclose(properties['nibabel_stuff']['original_affine'], seg_nib_reoriented.affine), \
'restored affine does not match original affine'
nibabel.save(seg_nib_reoriented, output_fname)
if __name__ == '__main__':
img_file = 'patient028_frame01_0000.nii.gz'
seg_file = 'patient028_frame01.nii.gz'
nibio = NibabelIO()
images, dct = nibio.read_images([img_file])
seg, dctseg = nibio.read_seg(seg_file)
nibio_r = NibabelIOWithReorient()
images_r, dct_r = nibio_r.read_images([img_file])
seg_r, dctseg_r = nibio_r.read_seg(seg_file)
nibio.write_seg(seg[0], '/home/isensee/seg_nibio.nii.gz', dctseg)
nibio_r.write_seg(seg_r[0], '/home/isensee/seg_nibio_r.nii.gz', dctseg_r)
s_orig = nibabel.load(seg_file).get_fdata()
s_nibio = nibabel.load('/home/isensee/seg_nibio.nii.gz').get_fdata()
s_nibio_r = nibabel.load('/home/isensee/seg_nibio_r.nii.gz').get_fdata()
================================================
FILE: Finetune/nnUNet/nnunetv2/imageio/reader_writer_registry.py
================================================
import traceback
from typing import Type
from batchgenerators.utilities.file_and_folder_operations import join
import nnunetv2
from nnunetv2.imageio.natural_image_reader_writer import NaturalImage2DIO
from nnunetv2.imageio.nibabel_reader_writer import NibabelIO, NibabelIOWithReorient
from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO
from nnunetv2.imageio.tif_reader_writer import Tiff3DIO
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
from nnunetv2.utilities.find_class_by_name import recursive_find_python_class
LIST_OF_IO_CLASSES = [
NaturalImage2DIO,
SimpleITKIO,
Tiff3DIO,
NibabelIO,
NibabelIOWithReorient
]
def determine_reader_writer_from_dataset_json(dataset_json_content: dict, example_file: str = None,
allow_nonmatching_filename: bool = False, verbose: bool = True
) -> Type[BaseReaderWriter]:
if 'overwrite_image_reader_writer' in dataset_json_content.keys() and \
dataset_json_content['overwrite_image_reader_writer'] != 'None':
ioclass_name = dataset_json_content['overwrite_image_reader_writer']
# trying to find that class in the nnunetv2.imageio module
try:
ret = recursive_find_reader_writer_by_name(ioclass_name)
if verbose: print(f'Using {ret} reader/writer')
return ret
except RuntimeError:
if verbose: print(f'Warning: Unable to find ioclass specified in dataset.json: {ioclass_name}')
if verbose: print('Trying to automatically determine desired class')
return determine_reader_writer_from_file_ending(dataset_json_content['file_ending'], example_file,
allow_nonmatching_filename, verbose)
def determine_reader_writer_from_file_ending(file_ending: str, example_file: str = None, allow_nonmatching_filename: bool = False,
verbose: bool = True):
for rw in LIST_OF_IO_CLASSES:
if file_ending.lower() in rw.supported_file_endings:
if example_file is not None:
# if an example file is provided, try if we can actually read it. If not move on to the next reader
try:
tmp = rw()
_ = tmp.read_images((example_file,))
if verbose: print(f'Using {rw} as reader/writer')
return rw
except:
if verbose: print(f'Failed to open file {example_file} with reader {rw}:')
traceback.print_exc()
pass
else:
if verbose: print(f'Using {rw} as reader/writer')
return rw
else:
if allow_nonmatching_filename and example_file is not None:
try:
tmp = rw()
_ = tmp.read_images((example_file,))
if verbose: print(f'Using {rw} as reader/writer')
return rw
except:
if verbose: print(f'Failed to open file {example_file} with reader {rw}:')
if verbose: traceback.print_exc()
pass
raise RuntimeError(f"Unable to determine a reader for file ending {file_ending} and file {example_file} (file None means no file provided).")
def recursive_find_reader_writer_by_name(rw_class_name: str) -> Type[BaseReaderWriter]:
ret = recursive_find_python_class(join(nnunetv2.__path__[0], "imageio"), rw_class_name, 'nnunetv2.imageio')
if ret is None:
raise RuntimeError("Unable to find reader writer class '%s'. Please make sure this class is located in the "
"nnunetv2.imageio module." % rw_class_name)
else:
return ret
================================================
FILE: Finetune/nnUNet/nnunetv2/imageio/readme.md
================================================
- Derive your adapter from `BaseReaderWriter`.
- Reimplement all abstractmethods.
- make sure to support 2d and 3d input images (or raise some error).
- place it in this folder or nnU-Net won't find it!
- add it to LIST_OF_IO_CLASSES in `reader_writer_registry.py`
Bam, you're done!
================================================
FILE: Finetune/nnUNet/nnunetv2/imageio/simpleitk_reader_writer.py
================================================
# Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center
# (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Tuple, Union, List
import numpy as np
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
import SimpleITK as sitk
class SimpleITKIO(BaseReaderWriter):
supported_file_endings = [
'.nii.gz',
'.nrrd',
'.mha'
]
def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:
images = []
spacings = []
origins = []
directions = []
spacings_for_nnunet = []
for f in image_fnames:
itk_image = sitk.ReadImage(f)
spacings.append(itk_image.GetSpacing())
origins.append(itk_image.GetOrigin())
directions.append(itk_image.GetDirection())
npy_image = sitk.GetArrayFromImage(itk_image)
if npy_image.ndim == 2:
# 2d
npy_image = npy_image[None, None]
max_spacing = max(spacings[-1])
spacings_for_nnunet.append((max_spacing * 999, *list(spacings[-1])[::-1]))
elif npy_image.ndim == 3:
# 3d, as in original nnunet
npy_image = npy_image[None]
spacings_for_nnunet.append(list(spacings[-1])[::-1])
elif npy_image.ndim == 4:
# 4d, multiple modalities in one file
spacings_for_nnunet.append(list(spacings[-1])[::-1][1:])
pass
else:
raise RuntimeError(f"Unexpected number of dimensions: {npy_image.ndim} in file {f}")
images.append(npy_image)
spacings_for_nnunet[-1] = list(np.abs(spacings_for_nnunet[-1]))
if not self._check_all_same([i.shape for i in images]):
print('ERROR! Not all input images have the same shape!')
print('Shapes:')
print([i.shape for i in images])
print('Image files:')
print(image_fnames)
raise RuntimeError()
if not self._check_all_same(spacings):
print('ERROR! Not all input images have the same spacing!')
print('Spacings:')
print(spacings)
print('Image files:')
print(image_fnames)
raise RuntimeError()
if not self._check_all_same(origins):
print('WARNING! Not all input images have the same origin!')
print('Origins:')
print(origins)
print('Image files:')
print(image_fnames)
print('It is up to you to decide whether that\'s a problem. You should run nnUNet_plot_dataset_pngs to verify '
'that segmentations and data overlap.')
if not self._check_all_same(directions):
print('WARNING! Not all input images have the same direction!')
print('Directions:')
print(directions)
print('Image files:')
print(image_fnames)
print('It is up to you to decide whether that\'s a problem. You should run nnUNet_plot_dataset_pngs to verify '
'that segmentations and data overlap.')
if not self._check_all_same(spacings_for_nnunet):
print('ERROR! Not all input images have the same spacing_for_nnunet! (This should not happen and must be a '
'bug. Please report!')
print('spacings_for_nnunet:')
print(spacings_for_nnunet)
print('Image files:')
print(image_fnames)
raise RuntimeError()
stacked_images = np.vstack(images)
dict = {
'sitk_stuff': {
# this saves the sitk geometry information. This part is NOT used by nnU-Net!
'spacing': spacings[0],
'origin': origins[0],
'direction': directions[0]
},
# the spacing is inverted with [::-1] because sitk returns the spacing in the wrong order lol. Image arrays
# are returned x,y,z but spacing is returned z,y,x. Duh.
'spacing': spacings_for_nnunet[0]
}
return stacked_images.astype(np.float32), dict
def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:
return self.read_images((seg_fname, ))
def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:
assert seg.ndim == 3, 'segmentation must be 3d. If you are exporting a 2d segmentation, please provide it as shape 1,x,y'
output_dimension = len(properties['sitk_stuff']['spacing'])
assert 1 < output_dimension < 4
if output_dimension == 2:
seg = seg[0]
itk_image = sitk.GetImageFromArray(seg.astype(np.uint8))
itk_image.SetSpacing(properties['sitk_stuff']['spacing'])
itk_image.SetOrigin(properties['sitk_stuff']['origin'])
itk_image.SetDirection(properties['sitk_stuff']['direction'])
sitk.WriteImage(itk_image, output_fname, True)
================================================
FILE: Finetune/nnUNet/nnunetv2/imageio/tif_reader_writer.py
================================================
# Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center
# (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
from typing import Tuple, Union, List
import numpy as np
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
import tifffile
from batchgenerators.utilities.file_and_folder_operations import isfile, load_json, save_json, split_path, join
class Tiff3DIO(BaseReaderWriter):
"""
reads and writes 3D tif(f) images. Uses tifffile package. Ignores metadata (for now)!
If you have 2D tiffs, use NaturalImage2DIO
Supports the use of auxiliary files for spacing information. If used, the auxiliary files are expected to end
with .json and omit the channel identifier. So, for example, the corresponding of image image1_0000.tif is
expected to be image1.json)!
"""
supported_file_endings = [
'.tif',
'.tiff',
]
def read_images(self, image_fnames: Union[List[str], Tuple[str, ...]]) -> Tuple[np.ndarray, dict]:
# figure out file ending used here
ending = '.' + image_fnames[0].split('.')[-1]
assert ending.lower() in self.supported_file_endings, f'Ending {ending} not supported by {self.__class__.__name__}'
ending_length = len(ending)
truncate_length = ending_length + 5 # 5 comes from len(_0000)
images = []
for f in image_fnames:
image = tifffile.imread(f)
if image.ndim != 3:
raise RuntimeError(f"Only 3D images are supported! File: {f}")
images.append(image[None])
# see if aux file can be found
expected_aux_file = image_fnames[0][:-truncate_length] + '.json'
if isfile(expected_aux_file):
spacing = load_json(expected_aux_file)['spacing']
assert len(spacing) == 3, f'spacing must have 3 entries, one for each dimension of the image. File: {expected_aux_file}'
else:
print(f'WARNING no spacing file found for images {image_fnames}\nAssuming spacing (1, 1, 1).')
spacing = (1, 1, 1)
if not self._check_all_same([i.shape for i in images]):
print('ERROR! Not all input images have the same shape!')
print('Shapes:')
print([i.shape for i in images])
print('Image files:')
print(image_fnames)
raise RuntimeError()
return np.vstack(images).astype(np.float32), {'spacing': spacing}
def write_seg(self, seg: np.ndarray, output_fname: str, properties: dict) -> None:
# not ideal but I really have no clue how to set spacing/resolution information properly in tif files haha
tifffile.imwrite(output_fname, data=seg.astype(np.uint8), compression='zlib')
file = os.path.basename(output_fname)
out_dir = os.path.dirname(output_fname)
ending = file.split('.')[-1]
save_json({'spacing': properties['spacing']}, join(out_dir, file[:-(len(ending) + 1)] + '.json'))
def read_seg(self, seg_fname: str) -> Tuple[np.ndarray, dict]:
# figure out file ending used here
ending = '.' + seg_fname.split('.')[-1]
assert ending.lower() in self.supported_file_endings, f'Ending {ending} not supported by {self.__class__.__name__}'
ending_length = len(ending)
seg = tifffile.imread(seg_fname)
if seg.ndim != 3:
raise RuntimeError(f"Only 3D images are supported! File: {seg_fname}")
seg = seg[None]
# see if aux file can be found
expected_aux_file = seg_fname[:-ending_length] + '.json'
if isfile(expected_aux_file):
spacing = load_json(expected_aux_file)['spacing']
assert len(spacing) == 3, f'spacing must have 3 entries, one for each dimension of the image. File: {expected_aux_file}'
assert all([i > 0 for i in spacing]), f"Spacing must be > 0, spacing: {spacing}"
else:
print(f'WARNING no spacing file found for segmentation {seg_fname}\nAssuming spacing (1, 1, 1).')
spacing = (1, 1, 1)
return seg.astype(np.float32), {'spacing': spacing}
================================================
FILE: Finetune/nnUNet/nnunetv2/inference/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/inference/data_iterators.py
================================================
import multiprocessing
import queue
from torch.multiprocessing import Event, Process, Queue, Manager
from time import sleep
from typing import Union, List
import numpy as np
import torch
from batchgenerators.dataloading.data_loader import DataLoader
from nnunetv2.preprocessing.preprocessors.default_preprocessor import DefaultPreprocessor
from nnunetv2.utilities.label_handling.label_handling import convert_labelmap_to_one_hot
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager
def preprocess_fromfiles_save_to_queue(list_of_lists: List[List[str]],
list_of_segs_from_prev_stage_files: Union[None, List[str]],
output_filenames_truncated: Union[None, List[str]],
plans_manager: PlansManager,
dataset_json: dict,
configuration_manager: ConfigurationManager,
target_queue: Queue,
done_event: Event,
abort_event: Event,
verbose: bool = False):
try:
label_manager = plans_manager.get_label_manager(dataset_json)
preprocessor = configuration_manager.preprocessor_class(verbose=verbose)
for idx in range(len(list_of_lists)):
data, seg, data_properties = preprocessor.run_case(list_of_lists[idx],
list_of_segs_from_prev_stage_files[
idx] if list_of_segs_from_prev_stage_files is not None else None,
plans_manager,
configuration_manager,
dataset_json)
if list_of_segs_from_prev_stage_files is not None and list_of_segs_from_prev_stage_files[idx] is not None:
seg_onehot = convert_labelmap_to_one_hot(seg[0], label_manager.foreground_labels, data.dtype)
data = np.vstack((data, seg_onehot))
data = torch.from_numpy(data).contiguous().float()
item = {'data': data, 'data_properties': data_properties,
'ofile': output_filenames_truncated[idx] if output_filenames_truncated is not None else None}
success = False
while not success:
try:
if abort_event.is_set():
return
target_queue.put(item, timeout=0.01)
success = True
except queue.Full:
pass
done_event.set()
except Exception as e:
abort_event.set()
raise e
def preprocessing_iterator_fromfiles(list_of_lists: List[List[str]],
list_of_segs_from_prev_stage_files: Union[None, List[str]],
output_filenames_truncated: Union[None, List[str]],
plans_manager: PlansManager,
dataset_json: dict,
configuration_manager: ConfigurationManager,
num_processes: int,
pin_memory: bool = False,
verbose: bool = False):
context = multiprocessing.get_context('spawn')
manager = Manager()
num_processes = min(len(list_of_lists), num_processes)
assert num_processes >= 1
processes = []
done_events = []
target_queues = []
abort_event = manager.Event()
for i in range(num_processes):
event = manager.Event()
queue = Manager().Queue(maxsize=1)
pr = context.Process(target=preprocess_fromfiles_save_to_queue,
args=(
list_of_lists[i::num_processes],
list_of_segs_from_prev_stage_files[
i::num_processes] if list_of_segs_from_prev_stage_files is not None else None,
output_filenames_truncated[
i::num_processes] if output_filenames_truncated is not None else None,
plans_manager,
dataset_json,
configuration_manager,
queue,
event,
abort_event,
verbose
), daemon=True)
pr.start()
target_queues.append(queue)
done_events.append(event)
processes.append(pr)
worker_ctr = 0
while (not done_events[worker_ctr].is_set()) or (not target_queues[worker_ctr].empty()):
if not target_queues[worker_ctr].empty():
item = target_queues[worker_ctr].get()
worker_ctr = (worker_ctr + 1) % num_processes
else:
all_ok = all(
[i.is_alive() or j.is_set() for i, j in zip(processes, done_events)]) and not abort_event.is_set()
if not all_ok:
raise RuntimeError('Background workers died. Look for the error message further up! If there is '
'none then your RAM was full and the worker was killed by the OS. Use fewer '
'workers or get more RAM in that case!')
sleep(0.01)
continue
if pin_memory:
[i.pin_memory() for i in item.values() if isinstance(i, torch.Tensor)]
yield item
[p.join() for p in processes]
class PreprocessAdapter(DataLoader):
def __init__(self, list_of_lists: List[List[str]],
list_of_segs_from_prev_stage_files: Union[None, List[str]],
preprocessor: DefaultPreprocessor,
output_filenames_truncated: Union[None, List[str]],
plans_manager: PlansManager,
dataset_json: dict,
configuration_manager: ConfigurationManager,
num_threads_in_multithreaded: int = 1):
self.preprocessor, self.plans_manager, self.configuration_manager, self.dataset_json = \
preprocessor, plans_manager, configuration_manager, dataset_json
self.label_manager = plans_manager.get_label_manager(dataset_json)
if list_of_segs_from_prev_stage_files is None:
list_of_segs_from_prev_stage_files = [None] * len(list_of_lists)
if output_filenames_truncated is None:
output_filenames_truncated = [None] * len(list_of_lists)
super().__init__(list(zip(list_of_lists, list_of_segs_from_prev_stage_files, output_filenames_truncated)),
1, num_threads_in_multithreaded,
seed_for_shuffle=1, return_incomplete=True,
shuffle=False, infinite=False, sampling_probabilities=None)
self.indices = list(range(len(list_of_lists)))
def generate_train_batch(self):
idx = self.get_indices()[0]
files = self._data[idx][0]
seg_prev_stage = self._data[idx][1]
ofile = self._data[idx][2]
# if we have a segmentation from the previous stage we have to process it together with the images so that we
# can crop it appropriately (if needed). Otherwise it would just be resized to the shape of the data after
# preprocessing and then there might be misalignments
data, seg, data_properties = self.preprocessor.run_case(files, seg_prev_stage, self.plans_manager,
self.configuration_manager,
self.dataset_json)
if seg_prev_stage is not None:
seg_onehot = convert_labelmap_to_one_hot(seg[0], self.label_manager.foreground_labels, data.dtype)
data = np.vstack((data, seg_onehot))
data = torch.from_numpy(data)
return {'data': data, 'data_properties': data_properties, 'ofile': ofile}
class PreprocessAdapterFromNpy(DataLoader):
def __init__(self, list_of_images: List[np.ndarray],
list_of_segs_from_prev_stage: Union[List[np.ndarray], None],
list_of_image_properties: List[dict],
truncated_ofnames: Union[List[str], None],
plans_manager: PlansManager, dataset_json: dict, configuration_manager: ConfigurationManager,
num_threads_in_multithreaded: int = 1, verbose: bool = False):
preprocessor = configuration_manager.preprocessor_class(verbose=verbose)
self.preprocessor, self.plans_manager, self.configuration_manager, self.dataset_json, self.truncated_ofnames = \
preprocessor, plans_manager, configuration_manager, dataset_json, truncated_ofnames
self.label_manager = plans_manager.get_label_manager(dataset_json)
if list_of_segs_from_prev_stage is None:
list_of_segs_from_prev_stage = [None] * len(list_of_images)
if truncated_ofnames is None:
truncated_ofnames = [None] * len(list_of_images)
super().__init__(
list(zip(list_of_images, list_of_segs_from_prev_stage, list_of_image_properties, truncated_ofnames)),
1, num_threads_in_multithreaded,
seed_for_shuffle=1, return_incomplete=True,
shuffle=False, infinite=False, sampling_probabilities=None)
self.indices = list(range(len(list_of_images)))
def generate_train_batch(self):
idx = self.get_indices()[0]
image = self._data[idx][0]
seg_prev_stage = self._data[idx][1]
props = self._data[idx][2]
ofname = self._data[idx][3]
# if we have a segmentation from the previous stage we have to process it together with the images so that we
# can crop it appropriately (if needed). Otherwise it would just be resized to the shape of the data after
# preprocessing and then there might be misalignments
data, seg = self.preprocessor.run_case_npy(image, seg_prev_stage, props,
self.plans_manager,
self.configuration_manager,
self.dataset_json)
if seg_prev_stage is not None:
seg_onehot = convert_labelmap_to_one_hot(seg[0], self.label_manager.foreground_labels, data.dtype)
data = np.vstack((data, seg_onehot))
data = torch.from_numpy(data)
return {'data': data, 'data_properties': props, 'ofile': ofname}
def preprocess_fromnpy_save_to_queue(list_of_images: List[np.ndarray],
list_of_segs_from_prev_stage: Union[List[np.ndarray], None],
list_of_image_properties: List[dict],
truncated_ofnames: Union[List[str], None],
plans_manager: PlansManager,
dataset_json: dict,
configuration_manager: ConfigurationManager,
target_queue: Queue,
done_event: Event,
abort_event: Event,
verbose: bool = False):
try:
label_manager = plans_manager.get_label_manager(dataset_json)
preprocessor = configuration_manager.preprocessor_class(verbose=verbose)
for idx in range(len(list_of_images)):
data, seg = preprocessor.run_case_npy(list_of_images[idx],
list_of_segs_from_prev_stage[
idx] if list_of_segs_from_prev_stage is not None else None,
list_of_image_properties[idx],
plans_manager,
configuration_manager,
dataset_json)
if list_of_segs_from_prev_stage is not None and list_of_segs_from_prev_stage[idx] is not None:
seg_onehot = convert_labelmap_to_one_hot(seg[0], label_manager.foreground_labels, data.dtype)
data = np.vstack((data, seg_onehot))
data = torch.from_numpy(data).contiguous().float()
item = {'data': data, 'data_properties': list_of_image_properties[idx],
'ofile': truncated_ofnames[idx] if truncated_ofnames is not None else None}
success = False
while not success:
try:
if abort_event.is_set():
return
target_queue.put(item, timeout=0.01)
success = True
except queue.Full:
pass
done_event.set()
except Exception as e:
abort_event.set()
raise e
def preprocessing_iterator_fromnpy(list_of_images: List[np.ndarray],
list_of_segs_from_prev_stage: Union[List[np.ndarray], None],
list_of_image_properties: List[dict],
truncated_ofnames: Union[List[str], None],
plans_manager: PlansManager,
dataset_json: dict,
configuration_manager: ConfigurationManager,
num_processes: int,
pin_memory: bool = False,
verbose: bool = False):
context = multiprocessing.get_context('spawn')
manager = Manager()
num_processes = min(len(list_of_images), num_processes)
assert num_processes >= 1
target_queues = []
processes = []
done_events = []
abort_event = manager.Event()
for i in range(num_processes):
event = manager.Event()
queue = manager.Queue(maxsize=1)
pr = context.Process(target=preprocess_fromnpy_save_to_queue,
args=(
list_of_images[i::num_processes],
list_of_segs_from_prev_stage[
i::num_processes] if list_of_segs_from_prev_stage is not None else None,
list_of_image_properties[i::num_processes],
truncated_ofnames[i::num_processes] if truncated_ofnames is not None else None,
plans_manager,
dataset_json,
configuration_manager,
queue,
event,
abort_event,
verbose
), daemon=True)
pr.start()
done_events.append(event)
processes.append(pr)
target_queues.append(queue)
worker_ctr = 0
while (not done_events[worker_ctr].is_set()) or (not target_queues[worker_ctr].empty()):
if not target_queues[worker_ctr].empty():
item = target_queues[worker_ctr].get()
worker_ctr = (worker_ctr + 1) % num_processes
else:
all_ok = all(
[i.is_alive() or j.is_set() for i, j in zip(processes, done_events)]) and not abort_event.is_set()
if not all_ok:
raise RuntimeError('Background workers died. Look for the error message further up! If there is '
'none then your RAM was full and the worker was killed by the OS. Use fewer '
'workers or get more RAM in that case!')
sleep(0.01)
continue
if pin_memory:
[i.pin_memory() for i in item.values() if isinstance(i, torch.Tensor)]
yield item
[p.join() for p in processes]
================================================
FILE: Finetune/nnUNet/nnunetv2/inference/examples.py
================================================
if __name__ == '__main__':
from nnunetv2.paths import nnUNet_results, nnUNet_raw
import torch
from batchgenerators.utilities.file_and_folder_operations import join
from nnunetv2.inference.predict_from_raw_data import nnUNetPredictor
from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO
# nnUNetv2_predict -d 3 -f 0 -c 3d_lowres -i imagesTs -o imagesTs_predlowres --continue_prediction
# instantiate the nnUNetPredictor
predictor = nnUNetPredictor(
tile_step_size=0.5,
use_gaussian=True,
use_mirroring=True,
perform_everything_on_gpu=True,
device=torch.device('cuda', 0),
verbose=False,
verbose_preprocessing=False,
allow_tqdm=True
)
# initializes the network architecture, loads the checkpoint
predictor.initialize_from_trained_model_folder(
join(nnUNet_results, 'Dataset003_Liver/nnUNetTrainer__nnUNetPlans__3d_lowres'),
use_folds=(0,),
checkpoint_name='checkpoint_final.pth',
)
# variant 1: give input and output folders
predictor.predict_from_files(join(nnUNet_raw, 'Dataset003_Liver/imagesTs'),
join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres'),
save_probabilities=False, overwrite=False,
num_processes_preprocessing=2, num_processes_segmentation_export=2,
folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)
# variant 2, use list of files as inputs. Note how we use nested lists!!!
indir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs')
outdir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres')
predictor.predict_from_files([[join(indir, 'liver_152_0000.nii.gz')],
[join(indir, 'liver_142_0000.nii.gz')]],
[join(outdir, 'liver_152.nii.gz'),
join(outdir, 'liver_142.nii.gz')],
save_probabilities=False, overwrite=True,
num_processes_preprocessing=2, num_processes_segmentation_export=2,
folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)
# variant 2.5, returns segmentations
indir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs')
predicted_segmentations = predictor.predict_from_files([[join(indir, 'liver_152_0000.nii.gz')],
[join(indir, 'liver_142_0000.nii.gz')]],
None,
save_probabilities=True, overwrite=True,
num_processes_preprocessing=2,
num_processes_segmentation_export=2,
folder_with_segs_from_prev_stage=None, num_parts=1,
part_id=0)
# predict several npy images
from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO
img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])
img2, props2 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_146_0000.nii.gz')])
img3, props3 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_145_0000.nii.gz')])
img4, props4 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_144_0000.nii.gz')])
# we do not set output files so that the segmentations will be returned. You can of course also specify output
# files instead (no return value on that case)
ret = predictor.predict_from_list_of_npy_arrays([img, img2, img3, img4],
None,
[props, props2, props3, props4],
None, 2, save_probabilities=False,
num_processes_segmentation_export=2)
# predict a single numpy array
img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])
ret = predictor.predict_single_npy_array(img, props, None, None, True)
# custom iterator
img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])
img2, props2 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_146_0000.nii.gz')])
img3, props3 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_145_0000.nii.gz')])
img4, props4 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_144_0000.nii.gz')])
# each element returned by data_iterator must be a dict with 'data', 'ofile' and 'data_properties' keys!
# If 'ofile' is None, the result will be returned instead of written to a file
# the iterator is responsible for performing the correct preprocessing!
# note how the iterator here does not use multiprocessing -> preprocessing will be done in the main thread!
# take a look at the default iterators for predict_from_files and predict_from_list_of_npy_arrays
# (they both use predictor.predict_from_data_iterator) for inspiration!
def my_iterator(list_of_input_arrs, list_of_input_props):
preprocessor = predictor.configuration_manager.preprocessor_class(verbose=predictor.verbose)
for a, p in zip(list_of_input_arrs, list_of_input_props):
data, seg = preprocessor.run_case_npy(a,
None,
p,
predictor.plans_manager,
predictor.configuration_manager,
predictor.dataset_json)
yield {'data': torch.from_numpy(data).contiguous().pin_memory(), 'data_properties': p, 'ofile': None}
ret = predictor.predict_from_data_iterator(my_iterator([img, img2, img3, img4], [props, props2, props3, props4]),
save_probabilities=False, num_processes_segmentation_export=3)
================================================
FILE: Finetune/nnUNet/nnunetv2/inference/export_prediction.py
================================================
import os
from copy import deepcopy
from typing import Union, List
import numpy as np
import torch
from acvl_utils.cropping_and_padding.bounding_boxes import bounding_box_to_slice
from batchgenerators.utilities.file_and_folder_operations import load_json, isfile, save_pickle
from nnunetv2.configuration import default_num_processes
from nnunetv2.utilities.label_handling.label_handling import LabelManager
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager
def convert_predicted_logits_to_segmentation_with_correct_shape(predicted_logits: Union[torch.Tensor, np.ndarray],
plans_manager: PlansManager,
configuration_manager: ConfigurationManager,
label_manager: LabelManager,
properties_dict: dict,
return_probabilities: bool = False,
num_threads_torch: int = default_num_processes):
old_threads = torch.get_num_threads()
torch.set_num_threads(num_threads_torch)
# resample to original shape
current_spacing = configuration_manager.spacing if \
len(configuration_manager.spacing) == \
len(properties_dict['shape_after_cropping_and_before_resampling']) else \
[properties_dict['spacing'][0], *configuration_manager.spacing]
predicted_logits = configuration_manager.resampling_fn_probabilities(predicted_logits,
properties_dict['shape_after_cropping_and_before_resampling'],
current_spacing,
properties_dict['spacing'])
# return value of resampling_fn_probabilities can be ndarray or Tensor but that does not matter because
# apply_inference_nonlin will convert to torch
predicted_probabilities = label_manager.apply_inference_nonlin(predicted_logits)
del predicted_logits
segmentation = label_manager.convert_probabilities_to_segmentation(predicted_probabilities)
# segmentation may be torch.Tensor but we continue with numpy
if isinstance(segmentation, torch.Tensor):
segmentation = segmentation.cpu().numpy()
# put segmentation in bbox (revert cropping)
segmentation_reverted_cropping = np.zeros(properties_dict['shape_before_cropping'],
dtype=np.uint8 if len(label_manager.foreground_labels) < 255 else np.uint16)
slicer = bounding_box_to_slice(properties_dict['bbox_used_for_cropping'])
segmentation_reverted_cropping[slicer] = segmentation
del segmentation
# revert transpose
segmentation_reverted_cropping = segmentation_reverted_cropping.transpose(plans_manager.transpose_backward)
if return_probabilities:
# revert cropping
predicted_probabilities = label_manager.revert_cropping_on_probabilities(predicted_probabilities,
properties_dict[
'bbox_used_for_cropping'],
properties_dict[
'shape_before_cropping'])
predicted_probabilities = predicted_probabilities.cpu().numpy()
# revert transpose
predicted_probabilities = predicted_probabilities.transpose([0] + [i + 1 for i in
plans_manager.transpose_backward])
torch.set_num_threads(old_threads)
return segmentation_reverted_cropping, predicted_probabilities
else:
torch.set_num_threads(old_threads)
return segmentation_reverted_cropping
def export_prediction_from_logits(predicted_array_or_file: Union[np.ndarray, torch.Tensor], properties_dict: dict,
configuration_manager: ConfigurationManager,
plans_manager: PlansManager,
dataset_json_dict_or_file: Union[dict, str], output_file_truncated: str,
save_probabilities: bool = False):
# if isinstance(predicted_array_or_file, str):
# tmp = deepcopy(predicted_array_or_file)
# if predicted_array_or_file.endswith('.npy'):
# predicted_array_or_file = np.load(predicted_array_or_file)
# elif predicted_array_or_file.endswith('.npz'):
# predicted_array_or_file = np.load(predicted_array_or_file)['softmax']
# os.remove(tmp)
if isinstance(dataset_json_dict_or_file, str):
dataset_json_dict_or_file = load_json(dataset_json_dict_or_file)
label_manager = plans_manager.get_label_manager(dataset_json_dict_or_file)
ret = convert_predicted_logits_to_segmentation_with_correct_shape(
predicted_array_or_file, plans_manager, configuration_manager, label_manager, properties_dict,
return_probabilities=save_probabilities
)
del predicted_array_or_file
# save
if save_probabilities:
segmentation_final, probabilities_final = ret
np.savez_compressed(output_file_truncated + '.npz', probabilities=probabilities_final)
save_pickle(properties_dict, output_file_truncated + '.pkl')
del probabilities_final, ret
else:
segmentation_final = ret
del ret
rw = plans_manager.image_reader_writer_class()
rw.write_seg(segmentation_final, output_file_truncated + dataset_json_dict_or_file['file_ending'],
properties_dict)
def resample_and_save(predicted: Union[torch.Tensor, np.ndarray], target_shape: List[int], output_file: str,
plans_manager: PlansManager, configuration_manager: ConfigurationManager, properties_dict: dict,
dataset_json_dict_or_file: Union[dict, str], num_threads_torch: int = default_num_processes) \
-> None:
# # needed for cascade
# if isinstance(predicted, str):
# assert isfile(predicted), "If isinstance(segmentation_softmax, str) then " \
# "isfile(segmentation_softmax) must be True"
# del_file = deepcopy(predicted)
# predicted = np.load(predicted)
# os.remove(del_file)
old_threads = torch.get_num_threads()
torch.set_num_threads(num_threads_torch)
if isinstance(dataset_json_dict_or_file, str):
dataset_json_dict_or_file = load_json(dataset_json_dict_or_file)
# resample to original shape
current_spacing = configuration_manager.spacing if \
len(configuration_manager.spacing) == len(properties_dict['shape_after_cropping_and_before_resampling']) else \
[properties_dict['spacing'][0], *configuration_manager.spacing]
target_spacing = configuration_manager.spacing if len(configuration_manager.spacing) == \
len(properties_dict['shape_after_cropping_and_before_resampling']) else \
[properties_dict['spacing'][0], *configuration_manager.spacing]
predicted_array_or_file = configuration_manager.resampling_fn_probabilities(predicted,
target_shape,
current_spacing,
target_spacing)
# create segmentation (argmax, regions, etc)
label_manager = plans_manager.get_label_manager(dataset_json_dict_or_file)
segmentation = label_manager.convert_logits_to_segmentation(predicted_array_or_file)
# segmentation may be torch.Tensor but we continue with numpy
if isinstance(segmentation, torch.Tensor):
segmentation = segmentation.cpu().numpy()
np.savez_compressed(output_file, seg=segmentation.astype(np.uint8))
torch.set_num_threads(old_threads)
================================================
FILE: Finetune/nnUNet/nnunetv2/inference/predict_from_raw_data.py
================================================
import inspect
import itertools
import multiprocessing
import os
import traceback
from copy import deepcopy
from time import sleep
from typing import Tuple, Union, List, Optional
import numpy as np
import torch
from acvl_utils.cropping_and_padding.padding import pad_nd_image
from batchgenerators.dataloading.multi_threaded_augmenter import MultiThreadedAugmenter
from batchgenerators.utilities.file_and_folder_operations import load_json, join, isfile, maybe_mkdir_p, isdir, subdirs, \
save_json
from torch import nn
from torch._dynamo import OptimizedModule
from torch.nn.parallel import DistributedDataParallel
from tqdm import tqdm
import nnunetv2
from nnunetv2.configuration import default_num_processes
from nnunetv2.inference.data_iterators import PreprocessAdapterFromNpy, preprocessing_iterator_fromfiles, \
preprocessing_iterator_fromnpy
from nnunetv2.inference.export_prediction import export_prediction_from_logits, \
convert_predicted_logits_to_segmentation_with_correct_shape
from nnunetv2.inference.sliding_window_prediction import compute_gaussian, \
compute_steps_for_sliding_window
from nnunetv2.utilities.file_path_utilities import get_output_folder, check_workers_alive_and_busy
from nnunetv2.utilities.find_class_by_name import recursive_find_python_class
from nnunetv2.utilities.helpers import empty_cache, dummy_context
from nnunetv2.utilities.json_export import recursive_fix_for_json_export
from nnunetv2.utilities.label_handling.label_handling import determine_num_input_channels
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager
from nnunetv2.utilities.utils import create_lists_from_splitted_dataset_folder
class nnUNetPredictor(object):
def __init__(self,
tile_step_size: float = 0.5,
use_gaussian: bool = True,
use_mirroring: bool = True,
perform_everything_on_gpu: bool = True,
device: torch.device = torch.device('cuda'),
verbose: bool = False,
verbose_preprocessing: bool = False,
allow_tqdm: bool = True):
self.verbose = verbose
self.verbose_preprocessing = verbose_preprocessing
self.allow_tqdm = allow_tqdm
self.plans_manager, self.configuration_manager, self.list_of_parameters, self.network, self.dataset_json, \
self.trainer_name, self.allowed_mirroring_axes, self.label_manager = None, None, None, None, None, None, None, None
self.tile_step_size = tile_step_size
self.use_gaussian = use_gaussian
self.use_mirroring = use_mirroring
if device.type == 'cuda':
# device = torch.device(type='cuda', index=0) # set the desired GPU with CUDA_VISIBLE_DEVICES!
# why would I ever want to do that. Stupid dobby. This kills DDP inference...
pass
if device.type != 'cuda':
print(f'perform_everything_on_gpu=True is only supported for cuda devices! Setting this to False')
perform_everything_on_gpu = False
self.device = device
self.perform_everything_on_gpu = perform_everything_on_gpu
def initialize_from_trained_model_folder(self, model_training_output_dir: str,
use_folds: Union[Tuple[Union[int, str]], None],
checkpoint_name: str = 'checkpoint_final.pth'):
"""
This is used when making predictions with a trained model
"""
if use_folds is None:
use_folds = nnUNetPredictor.auto_detect_available_folds(model_training_output_dir, checkpoint_name)
dataset_json = load_json(join(model_training_output_dir, 'dataset.json'))
plans = load_json(join(model_training_output_dir, 'plans.json'))
plans_manager = PlansManager(plans)
if isinstance(use_folds, str):
use_folds = [use_folds]
parameters = []
for i, f in enumerate(use_folds):
f = int(f) if f != 'all' else f
checkpoint = torch.load(join(model_training_output_dir, f'fold_{f}', checkpoint_name),
map_location=torch.device('cpu'))
if i == 0:
trainer_name = checkpoint['trainer_name']
configuration_name = checkpoint['init_args']['configuration']
inference_allowed_mirroring_axes = checkpoint['inference_allowed_mirroring_axes'] if \
'inference_allowed_mirroring_axes' in checkpoint.keys() else None
parameters.append(checkpoint['network_weights'])
configuration_manager = plans_manager.get_configuration(configuration_name)
# restore network
num_input_channels = determine_num_input_channels(plans_manager, configuration_manager, dataset_json)
trainer_class = recursive_find_python_class(join(nnunetv2.__path__[0], "training", "nnUNetTrainer"),
trainer_name, 'nnunetv2.training.nnUNetTrainer')
network = trainer_class.build_network_architecture(plans_manager, dataset_json, configuration_manager,
num_input_channels, enable_deep_supervision=False)
self.plans_manager = plans_manager
self.configuration_manager = configuration_manager
self.list_of_parameters = parameters
self.network = network
self.dataset_json = dataset_json
self.trainer_name = trainer_name
self.allowed_mirroring_axes = inference_allowed_mirroring_axes
self.label_manager = plans_manager.get_label_manager(dataset_json)
if ('nnUNet_compile' in os.environ.keys()) and (os.environ['nnUNet_compile'].lower() in ('true', '1', 't')) \
and not isinstance(self.network, OptimizedModule):
print('compiling network')
self.network = torch.compile(self.network)
def manual_initialization(self, network: nn.Module, plans_manager: PlansManager,
configuration_manager: ConfigurationManager, parameters: Optional[List[dict]],
dataset_json: dict, trainer_name: str,
inference_allowed_mirroring_axes: Optional[Tuple[int, ...]]):
"""
This is used by the nnUNetTrainer to initialize nnUNetPredictor for the final validation
"""
self.plans_manager = plans_manager
self.configuration_manager = configuration_manager
self.list_of_parameters = parameters
self.network = network
self.dataset_json = dataset_json
self.trainer_name = trainer_name
self.allowed_mirroring_axes = inference_allowed_mirroring_axes
self.label_manager = plans_manager.get_label_manager(dataset_json)
allow_compile = True
allow_compile = allow_compile and ('nnUNet_compile' in os.environ.keys()) and (os.environ['nnUNet_compile'].lower() in ('true', '1', 't'))
allow_compile = allow_compile and not isinstance(self.network, OptimizedModule)
if isinstance(self.network, DistributedDataParallel):
allow_compile = allow_compile and isinstance(self.network.module, OptimizedModule)
if allow_compile:
print('compiling network')
self.network = torch.compile(self.network)
@staticmethod
def auto_detect_available_folds(model_training_output_dir, checkpoint_name):
print('use_folds is None, attempting to auto detect available folds')
fold_folders = subdirs(model_training_output_dir, prefix='fold_', join=False)
fold_folders = [i for i in fold_folders if i != 'fold_all']
fold_folders = [i for i in fold_folders if isfile(join(model_training_output_dir, i, checkpoint_name))]
use_folds = [int(i.split('_')[-1]) for i in fold_folders]
print(f'found the following folds: {use_folds}')
return use_folds
def _manage_input_and_output_lists(self, list_of_lists_or_source_folder: Union[str, List[List[str]]],
output_folder_or_list_of_truncated_output_files: Union[None, str, List[str]],
folder_with_segs_from_prev_stage: str = None,
overwrite: bool = True,
part_id: int = 0,
num_parts: int = 1,
save_probabilities: bool = False):
if isinstance(list_of_lists_or_source_folder, str):
list_of_lists_or_source_folder = create_lists_from_splitted_dataset_folder(list_of_lists_or_source_folder,
self.dataset_json['file_ending'])
print(f'There are {len(list_of_lists_or_source_folder)} cases in the source folder')
list_of_lists_or_source_folder = list_of_lists_or_source_folder[part_id::num_parts]
caseids = [os.path.basename(i[0])[:-(len(self.dataset_json['file_ending']) + 5)] for i in
list_of_lists_or_source_folder]
print(
f'I am process {part_id} out of {num_parts} (max process ID is {num_parts - 1}, we start counting with 0!)')
print(f'There are {len(caseids)} cases that I would like to predict')
if isinstance(output_folder_or_list_of_truncated_output_files, str):
output_filename_truncated = [join(output_folder_or_list_of_truncated_output_files, i) for i in caseids]
else:
output_filename_truncated = output_folder_or_list_of_truncated_output_files
seg_from_prev_stage_files = [join(folder_with_segs_from_prev_stage, i + self.dataset_json['file_ending']) if
folder_with_segs_from_prev_stage is not None else None for i in caseids]
# remove already predicted files form the lists
if not overwrite and output_filename_truncated is not None:
tmp = [isfile(i + self.dataset_json['file_ending']) for i in output_filename_truncated]
if save_probabilities:
tmp2 = [isfile(i + '.npz') for i in output_filename_truncated]
tmp = [i and j for i, j in zip(tmp, tmp2)]
not_existing_indices = [i for i, j in enumerate(tmp) if not j]
output_filename_truncated = [output_filename_truncated[i] for i in not_existing_indices]
list_of_lists_or_source_folder = [list_of_lists_or_source_folder[i] for i in not_existing_indices]
seg_from_prev_stage_files = [seg_from_prev_stage_files[i] for i in not_existing_indices]
print(f'overwrite was set to {overwrite}, so I am only working on cases that haven\'t been predicted yet. '
f'That\'s {len(not_existing_indices)} cases.')
return list_of_lists_or_source_folder, output_filename_truncated, seg_from_prev_stage_files
def predict_from_files(self,
list_of_lists_or_source_folder: Union[str, List[List[str]]],
output_folder_or_list_of_truncated_output_files: Union[str, None, List[str]],
save_probabilities: bool = False,
overwrite: bool = True,
num_processes_preprocessing: int = default_num_processes,
num_processes_segmentation_export: int = default_num_processes,
folder_with_segs_from_prev_stage: str = None,
num_parts: int = 1,
part_id: int = 0):
"""
This is nnU-Net's default function for making predictions. It works best for batch predictions
(predicting many images at once).
"""
if isinstance(output_folder_or_list_of_truncated_output_files, str):
output_folder = output_folder_or_list_of_truncated_output_files
elif isinstance(output_folder_or_list_of_truncated_output_files, list):
output_folder = os.path.dirname(output_folder_or_list_of_truncated_output_files[0])
else:
output_folder = None
########################
# let's store the input arguments so that its clear what was used to generate the prediction
if output_folder is not None:
my_init_kwargs = {}
for k in inspect.signature(self.predict_from_files).parameters.keys():
my_init_kwargs[k] = locals()[k]
my_init_kwargs = deepcopy(
my_init_kwargs) # let's not unintentionally change anything in-place. Take this as a
recursive_fix_for_json_export(my_init_kwargs)
maybe_mkdir_p(output_folder)
save_json(my_init_kwargs, join(output_folder, 'predict_from_raw_data_args.json'))
# we need these two if we want to do things with the predictions like for example apply postprocessing
save_json(self.dataset_json, join(output_folder, 'dataset.json'), sort_keys=False)
save_json(self.plans_manager.plans, join(output_folder, 'plans.json'), sort_keys=False)
#######################
# check if we need a prediction from the previous stage
if self.configuration_manager.previous_stage_name is not None:
assert folder_with_segs_from_prev_stage is not None, \
f'The requested configuration is a cascaded network. It requires the segmentations of the previous ' \
f'stage ({self.configuration_manager.previous_stage_name}) as input. Please provide the folder where' \
f' they are located via folder_with_segs_from_prev_stage'
# sort out input and output filenames
list_of_lists_or_source_folder, output_filename_truncated, seg_from_prev_stage_files = \
self._manage_input_and_output_lists(list_of_lists_or_source_folder,
output_folder_or_list_of_truncated_output_files,
folder_with_segs_from_prev_stage, overwrite, part_id, num_parts,
save_probabilities)
if len(list_of_lists_or_source_folder) == 0:
return
data_iterator = self._internal_get_data_iterator_from_lists_of_filenames(list_of_lists_or_source_folder,
seg_from_prev_stage_files,
output_filename_truncated,
num_processes_preprocessing)
return self.predict_from_data_iterator(data_iterator, save_probabilities, num_processes_segmentation_export)
def _internal_get_data_iterator_from_lists_of_filenames(self,
input_list_of_lists: List[List[str]],
seg_from_prev_stage_files: Union[List[str], None],
output_filenames_truncated: Union[List[str], None],
num_processes: int):
return preprocessing_iterator_fromfiles(input_list_of_lists, seg_from_prev_stage_files,
output_filenames_truncated, self.plans_manager, self.dataset_json,
self.configuration_manager, num_processes, self.device.type == 'cuda',
self.verbose_preprocessing)
# preprocessor = self.configuration_manager.preprocessor_class(verbose=self.verbose_preprocessing)
# # hijack batchgenerators, yo
# # we use the multiprocessing of the batchgenerators dataloader to handle all the background worker stuff. This
# # way we don't have to reinvent the wheel here.
# num_processes = max(1, min(num_processes, len(input_list_of_lists)))
# ppa = PreprocessAdapter(input_list_of_lists, seg_from_prev_stage_files, preprocessor,
# output_filenames_truncated, self.plans_manager, self.dataset_json,
# self.configuration_manager, num_processes)
# if num_processes == 0:
# mta = SingleThreadedAugmenter(ppa, None)
# else:
# mta = MultiThreadedAugmenter(ppa, None, num_processes, 1, None, pin_memory=pin_memory)
# return mta
def get_data_iterator_from_raw_npy_data(self,
image_or_list_of_images: Union[np.ndarray, List[np.ndarray]],
segs_from_prev_stage_or_list_of_segs_from_prev_stage: Union[None,
np.ndarray,
List[
np.ndarray]],
properties_or_list_of_properties: Union[dict, List[dict]],
truncated_ofname: Union[str, List[str], None],
num_processes: int = 3):
list_of_images = [image_or_list_of_images] if not isinstance(image_or_list_of_images, list) else \
image_or_list_of_images
if isinstance(segs_from_prev_stage_or_list_of_segs_from_prev_stage, np.ndarray):
segs_from_prev_stage_or_list_of_segs_from_prev_stage = [
segs_from_prev_stage_or_list_of_segs_from_prev_stage]
if isinstance(truncated_ofname, str):
truncated_ofname = [truncated_ofname]
if isinstance(properties_or_list_of_properties, dict):
properties_or_list_of_properties = [properties_or_list_of_properties]
num_processes = min(num_processes, len(list_of_images))
pp = preprocessing_iterator_fromnpy(
list_of_images,
segs_from_prev_stage_or_list_of_segs_from_prev_stage,
properties_or_list_of_properties,
truncated_ofname,
self.plans_manager,
self.dataset_json,
self.configuration_manager,
num_processes,
self.device.type == 'cuda',
self.verbose_preprocessing
)
return pp
def predict_from_list_of_npy_arrays(self,
image_or_list_of_images: Union[np.ndarray, List[np.ndarray]],
segs_from_prev_stage_or_list_of_segs_from_prev_stage: Union[None,
np.ndarray,
List[
np.ndarray]],
properties_or_list_of_properties: Union[dict, List[dict]],
truncated_ofname: Union[str, List[str], None],
num_processes: int = 3,
save_probabilities: bool = False,
num_processes_segmentation_export: int = default_num_processes):
iterator = self.get_data_iterator_from_raw_npy_data(image_or_list_of_images,
segs_from_prev_stage_or_list_of_segs_from_prev_stage,
properties_or_list_of_properties,
truncated_ofname,
num_processes)
return self.predict_from_data_iterator(iterator, save_probabilities, num_processes_segmentation_export)
def predict_from_data_iterator(self,
data_iterator,
save_probabilities: bool = False,
num_processes_segmentation_export: int = default_num_processes):
"""
each element returned by data_iterator must be a dict with 'data', 'ofile' and 'data_properties' keys!
If 'ofile' is None, the result will be returned instead of written to a file
"""
with multiprocessing.get_context("spawn").Pool(num_processes_segmentation_export) as export_pool:
worker_list = [i for i in export_pool._pool]
r = []
for preprocessed in data_iterator:
data = preprocessed['data']
if isinstance(data, str):
delfile = data
data = torch.from_numpy(np.load(data))
os.remove(delfile)
ofile = preprocessed['ofile']
if ofile is not None:
print(f'\nPredicting {os.path.basename(ofile)}:')
else:
print(f'\nPredicting image of shape {data.shape}:')
print(f'perform_everything_on_gpu: {self.perform_everything_on_gpu}')
properties = preprocessed['data_properties']
# let's not get into a runaway situation where the GPU predicts so fast that the disk has to b swamped with
# npy files
proceed = not check_workers_alive_and_busy(export_pool, worker_list, r, allowed_num_queued=2)
while not proceed:
# print('sleeping')
sleep(0.1)
proceed = not check_workers_alive_and_busy(export_pool, worker_list, r, allowed_num_queued=2)
prediction = self.predict_logits_from_preprocessed_data(data).cpu()
if ofile is not None:
# this needs to go into background processes
# export_prediction_from_logits(prediction, properties, configuration_manager, plans_manager,
# dataset_json, ofile, save_probabilities)
print('sending off prediction to background worker for resampling and export')
r.append(
export_pool.starmap_async(
export_prediction_from_logits,
((prediction, properties, self.configuration_manager, self.plans_manager,
self.dataset_json, ofile, save_probabilities),)
)
)
else:
# convert_predicted_logits_to_segmentation_with_correct_shape(prediction, plans_manager,
# configuration_manager, label_manager,
# properties,
# save_probabilities)
print('sending off prediction to background worker for resampling')
r.append(
export_pool.starmap_async(
convert_predicted_logits_to_segmentation_with_correct_shape, (
(prediction, self.plans_manager,
self.configuration_manager, self.label_manager,
properties,
save_probabilities),)
)
)
if ofile is not None:
print(f'done with {os.path.basename(ofile)}')
else:
print(f'\nDone with image of shape {data.shape}:')
ret = [i.get()[0] for i in r]
if isinstance(data_iterator, MultiThreadedAugmenter):
data_iterator._finish()
# clear lru cache
compute_gaussian.cache_clear()
# clear device cache
empty_cache(self.device)
return ret
def predict_single_npy_array(self, input_image: np.ndarray, image_properties: dict,
segmentation_previous_stage: np.ndarray = None,
output_file_truncated: str = None,
save_or_return_probabilities: bool = False):
"""
image_properties must only have a 'spacing' key!
"""
ppa = PreprocessAdapterFromNpy([input_image], [segmentation_previous_stage], [image_properties],
[output_file_truncated],
self.plans_manager, self.dataset_json, self.configuration_manager,
num_threads_in_multithreaded=1, verbose=self.verbose)
if self.verbose:
print('preprocessing')
dct = next(ppa)
if self.verbose:
print('predicting')
predicted_logits = self.predict_logits_from_preprocessed_data(dct['data']).cpu()
if self.verbose:
print('resampling to original shape')
if output_file_truncated is not None:
export_prediction_from_logits(predicted_logits, dct['data_properties'], self.configuration_manager,
self.plans_manager, self.dataset_json, output_file_truncated,
save_or_return_probabilities)
else:
ret = convert_predicted_logits_to_segmentation_with_correct_shape(predicted_logits, self.plans_manager,
self.configuration_manager,
self.label_manager,
dct['data_properties'],
return_probabilities=
save_or_return_probabilities)
if save_or_return_probabilities:
return ret[0], ret[1]
else:
return ret
def predict_logits_from_preprocessed_data(self, data: torch.Tensor) -> torch.Tensor:
"""
IMPORTANT! IF YOU ARE RUNNING THE CASCADE, THE SEGMENTATION FROM THE PREVIOUS STAGE MUST ALREADY BE STACKED ON
TOP OF THE IMAGE AS ONE-HOT REPRESENTATION! SEE PreprocessAdapter ON HOW THIS SHOULD BE DONE!
RETURNED LOGITS HAVE THE SHAPE OF THE INPUT. THEY MUST BE CONVERTED BACK TO THE ORIGINAL IMAGE SIZE.
SEE convert_predicted_logits_to_segmentation_with_correct_shape
"""
# we have some code duplication here but this allows us to run with perform_everything_on_gpu=True as
# default and not have the entire program crash in case of GPU out of memory. Neat. That should make
# things a lot faster for some datasets.
original_perform_everything_on_gpu = self.perform_everything_on_gpu
with torch.no_grad():
prediction = None
if self.perform_everything_on_gpu:
try:
for params in self.list_of_parameters:
# messing with state dict names...
if not isinstance(self.network, OptimizedModule):
self.network.load_state_dict(params)
else:
self.network._orig_mod.load_state_dict(params)
if prediction is None:
prediction = self.predict_sliding_window_return_logits(data)
else:
prediction += self.predict_sliding_window_return_logits(data)
if len(self.list_of_parameters) > 1:
prediction /= len(self.list_of_parameters)
except RuntimeError:
print('Prediction with perform_everything_on_gpu=True failed due to insufficient GPU memory. '
'Falling back to perform_everything_on_gpu=False. Not a big deal, just slower...')
print('Error:')
traceback.print_exc()
prediction = None
self.perform_everything_on_gpu = False
if prediction is None:
for params in self.list_of_parameters:
# messing with state dict names...
if not isinstance(self.network, OptimizedModule):
self.network.load_state_dict(params)
else:
self.network._orig_mod.load_state_dict(params)
if prediction is None:
prediction = self.predict_sliding_window_return_logits(data)
else:
prediction += self.predict_sliding_window_return_logits(data)
if len(self.list_of_parameters) > 1:
prediction /= len(self.list_of_parameters)
print('Prediction done, transferring to CPU if needed')
prediction = prediction.to('cpu')
self.perform_everything_on_gpu = original_perform_everything_on_gpu
return prediction
def _internal_get_sliding_window_slicers(self, image_size: Tuple[int, ...]):
slicers = []
if len(self.configuration_manager.patch_size) < len(image_size):
assert len(self.configuration_manager.patch_size) == len(
image_size) - 1, 'if tile_size has less entries than image_size, ' \
'len(tile_size) ' \
'must be one shorter than len(image_size) ' \
'(only dimension ' \
'discrepancy of 1 allowed).'
steps = compute_steps_for_sliding_window(image_size[1:], self.configuration_manager.patch_size,
self.tile_step_size)
if self.verbose: print(f'n_steps {image_size[0] * len(steps[0]) * len(steps[1])}, image size is'
f' {image_size}, tile_size {self.configuration_manager.patch_size}, '
f'tile_step_size {self.tile_step_size}\nsteps:\n{steps}')
for d in range(image_size[0]):
for sx in steps[0]:
for sy in steps[1]:
slicers.append(
tuple([slice(None), d, *[slice(si, si + ti) for si, ti in
zip((sx, sy), self.configuration_manager.patch_size)]]))
else:
steps = compute_steps_for_sliding_window(image_size, self.configuration_manager.patch_size,
self.tile_step_size)
if self.verbose: print(
f'n_steps {np.prod([len(i) for i in steps])}, image size is {image_size}, tile_size {self.configuration_manager.patch_size}, '
f'tile_step_size {self.tile_step_size}\nsteps:\n{steps}')
for sx in steps[0]:
for sy in steps[1]:
for sz in steps[2]:
slicers.append(
tuple([slice(None), *[slice(si, si + ti) for si, ti in
zip((sx, sy, sz), self.configuration_manager.patch_size)]]))
return slicers
def _internal_maybe_mirror_and_predict(self, x: torch.Tensor) -> torch.Tensor:
mirror_axes = self.allowed_mirroring_axes if self.use_mirroring else None
prediction = self.network(x)
if mirror_axes is not None:
# check for invalid numbers in mirror_axes
# x should be 5d for 3d images and 4d for 2d. so the max value of mirror_axes cannot exceed len(x.shape) - 3
assert max(mirror_axes) <= x.ndim - 3, 'mirror_axes does not match the dimension of the input!'
axes_combinations = [
c for i in range(len(mirror_axes)) for c in itertools.combinations([m + 2 for m in mirror_axes], i + 1)
]
for axes in axes_combinations:
prediction += torch.flip(self.network(torch.flip(x, (*axes,))), (*axes,))
prediction /= (len(axes_combinations) + 1)
return prediction
def predict_sliding_window_return_logits(self, input_image: torch.Tensor) \
-> Union[np.ndarray, torch.Tensor]:
assert isinstance(input_image, torch.Tensor)
self.network = self.network.to(self.device)
self.network.eval()
empty_cache(self.device)
# Autocast is a little bitch.
# If the device_type is 'cpu' then it's slow as heck on some CPUs (no auto bfloat16 support detection)
# and needs to be disabled.
# If the device_type is 'mps' then it will complain that mps is not implemented, even if enabled=False
# is set. Whyyyyyyy. (this is why we don't make use of enabled=False)
# So autocast will only be active if we have a cuda device.
with torch.no_grad():
with torch.autocast(self.device.type, enabled=True) if self.device.type == 'cuda' else dummy_context():
assert input_image.ndim == 4, 'input_image must be a 4D np.ndarray or torch.Tensor (c, x, y, z)'
if self.verbose: print(f'Input shape: {input_image.shape}')
if self.verbose: print("step_size:", self.tile_step_size)
if self.verbose: print("mirror_axes:", self.allowed_mirroring_axes if self.use_mirroring else None)
# if input_image is smaller than tile_size we need to pad it to tile_size.
data, slicer_revert_padding = pad_nd_image(input_image, self.configuration_manager.patch_size,
'constant', {'value': 0}, True,
None)
slicers = self._internal_get_sliding_window_slicers(data.shape[1:])
# preallocate results and num_predictions
results_device = self.device if self.perform_everything_on_gpu else torch.device('cpu')
if self.verbose: print('preallocating arrays')
try:
data = data.to(self.device)
predicted_logits = torch.zeros((self.label_manager.num_segmentation_heads, *data.shape[1:]),
dtype=torch.half,
device=results_device)
n_predictions = torch.zeros(data.shape[1:], dtype=torch.half,
device=results_device)
if self.use_gaussian:
gaussian = compute_gaussian(tuple(self.configuration_manager.patch_size), sigma_scale=1. / 8,
value_scaling_factor=10,
device=results_device)
except RuntimeError:
# sometimes the stuff is too large for GPUs. In that case fall back to CPU
results_device = torch.device('cpu')
data = data.to(results_device)
predicted_logits = torch.zeros((self.label_manager.num_segmentation_heads, *data.shape[1:]),
dtype=torch.half,
device=results_device)
n_predictions = torch.zeros(data.shape[1:], dtype=torch.half,
device=results_device)
if self.use_gaussian:
gaussian = compute_gaussian(tuple(self.configuration_manager.patch_size), sigma_scale=1. / 8,
value_scaling_factor=10,
device=results_device)
finally:
empty_cache(self.device)
if self.verbose: print('running prediction')
for sl in tqdm(slicers, disable=not self.allow_tqdm):
workon = data[sl][None]
workon = workon.to(self.device, non_blocking=False)
prediction = self._internal_maybe_mirror_and_predict(workon)[0].to(results_device)
predicted_logits[sl] += (prediction * gaussian if self.use_gaussian else prediction)
n_predictions[sl[1:]] += (gaussian if self.use_gaussian else 1)
predicted_logits /= n_predictions
# check for infs
if torch.any(torch.isinf(predicted_logits)):
raise RuntimeError('Encountered inf in predicted array. Aborting... If this problem persists, '
'reduce value_scaling_factor in compute_gaussian or increase the dtype of '
'predicted_logits to fp32')
empty_cache(self.device)
return predicted_logits[tuple([slice(None), *slicer_revert_padding[1:]])]
def predict_entry_point_modelfolder():
import argparse
parser = argparse.ArgumentParser(description='Use this to run inference with nnU-Net. This function is used when '
'you want to manually specify a folder containing a trained nnU-Net '
'model. This is useful when the nnunet environment variables '
'(nnUNet_results) are not set.')
parser.add_argument('-i', type=str, required=True,
help='input folder. Remember to use the correct channel numberings for your files (_0000 etc). '
'File endings must be the same as the training dataset!')
parser.add_argument('-o', type=str, required=True,
help='Output folder. If it does not exist it will be created. Predicted segmentations will '
'have the same name as their source images.')
parser.add_argument('-m', type=str, required=True,
help='Folder in which the trained model is. Must have subfolders fold_X for the different '
'folds you trained')
parser.add_argument('-f', nargs='+', type=str, required=False, default=(0, 1, 2, 3, 4),
help='Specify the folds of the trained model that should be used for prediction. '
'Default: (0, 1, 2, 3, 4)')
parser.add_argument('-step_size', type=float, required=False, default=0.5,
help='Step size for sliding window prediction. The larger it is the faster but less accurate '
'the prediction. Default: 0.5. Cannot be larger than 1. We recommend the default.')
parser.add_argument('--disable_tta', action='store_true', required=False, default=False,
help='Set this flag to disable test time data augmentation in the form of mirroring. Faster, '
'but less accurate inference. Not recommended.')
parser.add_argument('--verbose', action='store_true', help="Set this if you like being talked to. You will have "
"to be a good listener/reader.")
parser.add_argument('--save_probabilities', action='store_true',
help='Set this to export predicted class "probabilities". Required if you want to ensemble '
'multiple configurations.')
parser.add_argument('--continue_prediction', '--c', action='store_true',
help='Continue an aborted previous prediction (will not overwrite existing files)')
parser.add_argument('-chk', type=str, required=False, default='checkpoint_final.pth',
help='Name of the checkpoint you want to use. Default: checkpoint_final.pth')
parser.add_argument('-npp', type=int, required=False, default=3,
help='Number of processes used for preprocessing. More is not always better. Beware of '
'out-of-RAM issues. Default: 3')
parser.add_argument('-nps', type=int, required=False, default=3,
help='Number of processes used for segmentation export. More is not always better. Beware of '
'out-of-RAM issues. Default: 3')
parser.add_argument('-prev_stage_predictions', type=str, required=False, default=None,
help='Folder containing the predictions of the previous stage. Required for cascaded models.')
parser.add_argument('-device', type=str, default='cuda', required=False,
help="Use this to set the device the inference should run with. Available options are 'cuda' "
"(GPU), 'cpu' (CPU) and 'mps' (Apple M1/M2). Do NOT use this to set which GPU ID! "
"Use CUDA_VISIBLE_DEVICES=X nnUNetv2_predict [...] instead!")
print(
"\n#######################################################################\nPlease cite the following paper "
"when using nnU-Net:\n"
"Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). "
"nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. "
"Nature methods, 18(2), 203-211.\n#######################################################################\n")
args = parser.parse_args()
args.f = [i if i == 'all' else int(i) for i in args.f]
if not isdir(args.o):
maybe_mkdir_p(args.o)
assert args.device in ['cpu', 'cuda',
'mps'], f'-device must be either cpu, mps or cuda. Other devices are not tested/supported. Got: {args.device}.'
if args.device == 'cpu':
# let's allow torch to use hella threads
import multiprocessing
torch.set_num_threads(multiprocessing.cpu_count())
device = torch.device('cpu')
elif args.device == 'cuda':
# multithreading in torch doesn't help nnU-Net if run on GPU
torch.set_num_threads(1)
torch.set_num_interop_threads(1)
device = torch.device('cuda')
else:
device = torch.device('mps')
predictor = nnUNetPredictor(tile_step_size=args.step_size,
use_gaussian=True,
use_mirroring=not args.disable_tta,
perform_everything_on_gpu=True,
device=device,
verbose=args.verbose)
predictor.initialize_from_trained_model_folder(args.m, args.f, args.chk)
predictor.predict_from_files(args.i, args.o, save_probabilities=args.save_probabilities,
overwrite=not args.continue_prediction,
num_processes_preprocessing=args.npp,
num_processes_segmentation_export=args.nps,
folder_with_segs_from_prev_stage=args.prev_stage_predictions,
num_parts=1, part_id=0)
def predict_entry_point():
import argparse
parser = argparse.ArgumentParser(description='Use this to run inference with nnU-Net. This function is used when '
'you want to manually specify a folder containing a trained nnU-Net '
'model. This is useful when the nnunet environment variables '
'(nnUNet_results) are not set.')
parser.add_argument('-i', type=str, required=True,
help='input folder. Remember to use the correct channel numberings for your files (_0000 etc). '
'File endings must be the same as the training dataset!')
parser.add_argument('-o', type=str, required=True,
help='Output folder. If it does not exist it will be created. Predicted segmentations will '
'have the same name as their source images.')
parser.add_argument('-d', type=str, required=True,
help='Dataset with which you would like to predict. You can specify either dataset name or id')
parser.add_argument('-p', type=str, required=False, default='nnUNetPlans',
help='Plans identifier. Specify the plans in which the desired configuration is located. '
'Default: nnUNetPlans')
parser.add_argument('-tr', type=str, required=False, default='nnUNetTrainer',
help='What nnU-Net trainer class was used for training? Default: nnUNetTrainer')
parser.add_argument('-c', type=str, required=True,
help='nnU-Net configuration that should be used for prediction. Config must be located '
'in the plans specified with -p')
parser.add_argument('-f', nargs='+', type=str, required=False, default=(0, 1, 2, 3, 4),
help='Specify the folds of the trained model that should be used for prediction. '
'Default: (0, 1, 2, 3, 4)')
parser.add_argument('-step_size', type=float, required=False, default=0.5,
help='Step size for sliding window prediction. The larger it is the faster but less accurate '
'the prediction. Default: 0.5. Cannot be larger than 1. We recommend the default.')
parser.add_argument('--disable_tta', action='store_true', required=False, default=False,
help='Set this flag to disable test time data augmentation in the form of mirroring. Faster, '
'but less accurate inference. Not recommended.')
parser.add_argument('--verbose', action='store_true', help="Set this if you like being talked to. You will have "
"to be a good listener/reader.")
parser.add_argument('--save_probabilities', action='store_true',
help='Set this to export predicted class "probabilities". Required if you want to ensemble '
'multiple configurations.')
parser.add_argument('--continue_prediction', action='store_true',
help='Continue an aborted previous prediction (will not overwrite existing files)')
parser.add_argument('-chk', type=str, required=False, default='checkpoint_final.pth',
help='Name of the checkpoint you want to use. Default: checkpoint_final.pth')
parser.add_argument('-npp', type=int, required=False, default=3,
help='Number of processes used for preprocessing. More is not always better. Beware of '
'out-of-RAM issues. Default: 3')
parser.add_argument('-nps', type=int, required=False, default=3,
help='Number of processes used for segmentation export. More is not always better. Beware of '
'out-of-RAM issues. Default: 3')
parser.add_argument('-prev_stage_predictions', type=str, required=False, default=None,
help='Folder containing the predictions of the previous stage. Required for cascaded models.')
parser.add_argument('-num_parts', type=int, required=False, default=1,
help='Number of separate nnUNetv2_predict call that you will be making. Default: 1 (= this one '
'call predicts everything)')
parser.add_argument('-part_id', type=int, required=False, default=0,
help='If multiple nnUNetv2_predict exist, which one is this? IDs start with 0 can end with '
'num_parts - 1. So when you submit 5 nnUNetv2_predict calls you need to set -num_parts '
'5 and use -part_id 0, 1, 2, 3 and 4. Simple, right? Note: You are yourself responsible '
'to make these run on separate GPUs! Use CUDA_VISIBLE_DEVICES (google, yo!)')
parser.add_argument('-device', type=str, default='cuda', required=False,
help="Use this to set the device the inference should run with. Available options are 'cuda' "
"(GPU), 'cpu' (CPU) and 'mps' (Apple M1/M2). Do NOT use this to set which GPU ID! "
"Use CUDA_VISIBLE_DEVICES=X nnUNetv2_predict [...] instead!")
print(
"\n#######################################################################\nPlease cite the following paper "
"when using nnU-Net:\n"
"Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). "
"nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. "
"Nature methods, 18(2), 203-211.\n#######################################################################\n")
args = parser.parse_args()
args.f = [i if i == 'all' else int(i) for i in args.f]
model_folder = get_output_folder(args.d, args.tr, args.p, args.c)
if not isdir(args.o):
maybe_mkdir_p(args.o)
# slightly passive aggressive haha
assert args.part_id < args.num_parts, 'Do you even read the documentation? See nnUNetv2_predict -h.'
assert args.device in ['cpu', 'cuda',
'mps'], f'-device must be either cpu, mps or cuda. Other devices are not tested/supported. Got: {args.device}.'
if args.device == 'cpu':
# let's allow torch to use hella threads
import multiprocessing
torch.set_num_threads(multiprocessing.cpu_count())
device = torch.device('cpu')
elif args.device == 'cuda':
# multithreading in torch doesn't help nnU-Net if run on GPU
torch.set_num_threads(1)
torch.set_num_interop_threads(1)
device = torch.device('cuda')
else:
device = torch.device('mps')
predictor = nnUNetPredictor(tile_step_size=args.step_size,
use_gaussian=True,
use_mirroring=not args.disable_tta,
perform_everything_on_gpu=True,
device=device,
verbose=args.verbose,
verbose_preprocessing=False)
predictor.initialize_from_trained_model_folder(
model_folder,
args.f,
checkpoint_name=args.chk
)
predictor.predict_from_files(args.i, args.o, save_probabilities=args.save_probabilities,
overwrite=not args.continue_prediction,
num_processes_preprocessing=args.npp,
num_processes_segmentation_export=args.nps,
folder_with_segs_from_prev_stage=args.prev_stage_predictions,
num_parts=args.num_parts,
part_id=args.part_id)
# r = predict_from_raw_data(args.i,
# args.o,
# model_folder,
# args.f,
# args.step_size,
# use_gaussian=True,
# use_mirroring=not args.disable_tta,
# perform_everything_on_gpu=True,
# verbose=args.verbose,
# save_probabilities=args.save_probabilities,
# overwrite=not args.continue_prediction,
# checkpoint_name=args.chk,
# num_processes_preprocessing=args.npp,
# num_processes_segmentation_export=args.nps,
# folder_with_segs_from_prev_stage=args.prev_stage_predictions,
# num_parts=args.num_parts,
# part_id=args.part_id,
# device=device)
if __name__ == '__main__':
# predict a bunch of files
from nnunetv2.paths import nnUNet_results, nnUNet_raw
predictor = nnUNetPredictor(
tile_step_size=0.5,
use_gaussian=True,
use_mirroring=True,
perform_everything_on_gpu=True,
device=torch.device('cuda', 0),
verbose=False,
verbose_preprocessing=False,
allow_tqdm=True
)
predictor.initialize_from_trained_model_folder(
join(nnUNet_results, 'Dataset003_Liver/nnUNetTrainer__nnUNetPlans__3d_lowres'),
use_folds=(0, ),
checkpoint_name='checkpoint_final.pth',
)
predictor.predict_from_files(join(nnUNet_raw, 'Dataset003_Liver/imagesTs'),
join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres'),
save_probabilities=False, overwrite=False,
num_processes_preprocessing=2, num_processes_segmentation_export=2,
folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)
# predict a numpy array
from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO
img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTr/liver_63_0000.nii.gz')])
ret = predictor.predict_single_npy_array(img, props, None, None, False)
iterator = predictor.get_data_iterator_from_raw_npy_data([img], None, [props], None, 1)
ret = predictor.predict_from_data_iterator(iterator, False, 1)
# predictor = nnUNetPredictor(
# tile_step_size=0.5,
# use_gaussian=True,
# use_mirroring=True,
# perform_everything_on_gpu=True,
# device=torch.device('cuda', 0),
# verbose=False,
# allow_tqdm=True
# )
# predictor.initialize_from_trained_model_folder(
# join(nnUNet_results, 'Dataset003_Liver/nnUNetTrainer__nnUNetPlans__3d_cascade_fullres'),
# use_folds=(0,),
# checkpoint_name='checkpoint_final.pth',
# )
# predictor.predict_from_files(join(nnUNet_raw, 'Dataset003_Liver/imagesTs'),
# join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predCascade'),
# save_probabilities=False, overwrite=False,
# num_processes_preprocessing=2, num_processes_segmentation_export=2,
# folder_with_segs_from_prev_stage='/media/isensee/data/nnUNet_raw/Dataset003_Liver/imagesTs_predlowres',
# num_parts=1, part_id=0)
================================================
FILE: Finetune/nnUNet/nnunetv2/inference/readme.md
================================================
The nnU-Net inference is now much more dynamic than before, allowing you to more seamlessly integrate nnU-Net into
your existing workflows.
This readme will give you a quick rundown of your options. This is not a complete guide. Look into the code to learn
all the details!
# Preface
In terms of speed, the most efficient inference strategy is the one done by the nnU-Net defaults! Images are read on
the fly and preprocessed in background workers. The main process takes the preprocessed images, predicts them and
sends the prediction off to another set of background workers which will resize the resulting logits, convert
them to a segmentation and export the segmentation.
The reason the default setup is the best option is because
1) loading and preprocessing as well as segmentation export are interlaced with the prediction. The main process can
focus on communicating with the compute device (i.e. your GPU) and does not have to do any other processing.
This uses your resources as well as possible!
2) only the images and segmentation that are currently being needed are stored in RAM! Imaging predicting many images
and having to store all of them + the results in your system memory
# nnUNetPredictor
The new nnUNetPredictor class encapsulates the inferencing code and makes it simple to switch between modes. Your
code can hold a nnUNetPredictor instance and perform prediction on the fly. Previously this was not possible and each
new prediction request resulted in reloading the parameters and reinstantiating the network architecture. Not ideal.
The nnUNetPredictor must be ininitialized manually! You will want to use the
`predictor.initialize_from_trained_model_folder` function for 99% of use cases!
New feature: If you do not specify an output folder / output files then the predicted segmentations will be
returned
## Recommended nnU-Net default: predict from source files
tldr:
- loads images on the fly
- performs preprocessing in background workers
- main process focuses only on making predictions
- results are again given to background workers for resampling and (optional) export
pros:
- best suited for predicting a large number of images
- nicer to your RAM
cons:
- not ideal when single images are to be predicted
- requires images to be present as files
Example:
```python
from nnunetv2.paths import nnUNet_results, nnUNet_raw
import torch
from batchgenerators.utilities.file_and_folder_operations import join
from nnunetv2.inference.predict_from_raw_data import nnUNetPredictor
# instantiate the nnUNetPredictor
predictor = nnUNetPredictor(
tile_step_size=0.5,
use_gaussian=True,
use_mirroring=True,
perform_everything_on_gpu=True,
device=torch.device('cuda', 0),
verbose=False,
verbose_preprocessing=False,
allow_tqdm=True
)
# initializes the network architecture, loads the checkpoint
predictor.initialize_from_trained_model_folder(
join(nnUNet_results, 'Dataset003_Liver/nnUNetTrainer__nnUNetPlans__3d_lowres'),
use_folds=(0,),
checkpoint_name='checkpoint_final.pth',
)
# variant 1: give input and output folders
predictor.predict_from_files(join(nnUNet_raw, 'Dataset003_Liver/imagesTs'),
join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres'),
save_probabilities=False, overwrite=False,
num_processes_preprocessing=2, num_processes_segmentation_export=2,
folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)
```
Instead if giving input and output folders you can also give concrete files. If you give concrete files, there is no
need for the _0000 suffix anymore! This can be useful in situations where you have no control over the filenames!
Remember that the files must be given as 'list of lists' where each entry in the outer list is a case to be predicted
and the inner list contains all the files belonging to that case. There is just one file for datasets with just one
input modality (such as CT) but may be more files for others (such as MRI where there is sometimes T1, T2, Flair etc).
IMPORTANT: the order in which the files for each case are given must match the order of the channels as defined in the
dataset.json!
If you give files as input, you need to give individual output files as output!
```python
# variant 2, use list of files as inputs. Note how we use nested lists!!!
indir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs')
outdir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs_predlowres')
predictor.predict_from_files([[join(indir, 'liver_152_0000.nii.gz')],
[join(indir, 'liver_142_0000.nii.gz')]],
[join(outdir, 'liver_152.nii.gz'),
join(outdir, 'liver_142.nii.gz')],
save_probabilities=False, overwrite=False,
num_processes_preprocessing=2, num_processes_segmentation_export=2,
folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)
```
Did you know? If you do not specify output files, the predicted segmentations will be returned:
```python
# variant 2.5, returns segmentations
indir = join(nnUNet_raw, 'Dataset003_Liver/imagesTs')
predicted_segmentations = predictor.predict_from_files([[join(indir, 'liver_152_0000.nii.gz')],
[join(indir, 'liver_142_0000.nii.gz')]],
None,
save_probabilities=False, overwrite=True,
num_processes_preprocessing=2, num_processes_segmentation_export=2,
folder_with_segs_from_prev_stage=None, num_parts=1, part_id=0)
```
## Prediction from npy arrays
tldr:
- you give images as a list of npy arrays
- performs preprocessing in background workers
- main process focuses only on making predictions
- results are again given to background workers for resampling and (optional) export
pros:
- the correct variant for when you have images in RAM already
- well suited for predicting multiple images
cons:
- uses more ram than the default
- unsuited for large number of images as all images must be held in RAM
```python
from nnunetv2.imageio.simpleitk_reader_writer import SimpleITKIO
img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])
img2, props2 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_146_0000.nii.gz')])
img3, props3 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_145_0000.nii.gz')])
img4, props4 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_144_0000.nii.gz')])
# we do not set output files so that the segmentations will be returned. You can of course also specify output
# files instead (no return value on that case)
ret = predictor.predict_from_list_of_npy_arrays([img, img2, img3, img4],
None,
[props, props2, props3, props4],
None, 2, save_probabilities=False,
num_processes_segmentation_export=2)
```
## Predicting a single npy array
tldr:
- you give one image as npy array
- everything is done in the main process: preprocessing, prediction, resampling, (export)
- no interlacing, slowest variant!
- ONLY USE THIS IF YOU CANNOT GIVE NNUNET MULTIPLE IMAGES AT ONCE FOR SOME REASON
pros:
- no messing with multiprocessing
- no messing with data iterator blabla
cons:
- slows as heck, yo
- never the right choice unless you can only give a single image at a time to nnU-Net
```python
# predict a single numpy array
img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTr/liver_63_0000.nii.gz')])
ret = predictor.predict_single_npy_array(img, props, None, None, False)
```
## Predicting with a custom data iterator
tldr:
- highly flexible
- not for newbies
pros:
- you can do everything yourself
- you have all the freedom you want
- really fast if you remember to use multiprocessing in your iterator
cons:
- you need to do everything yourself
- harder than you might think
```python
img, props = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_147_0000.nii.gz')])
img2, props2 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_146_0000.nii.gz')])
img3, props3 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_145_0000.nii.gz')])
img4, props4 = SimpleITKIO().read_images([join(nnUNet_raw, 'Dataset003_Liver/imagesTs/liver_144_0000.nii.gz')])
# each element returned by data_iterator must be a dict with 'data', 'ofile' and 'data_properties' keys!
# If 'ofile' is None, the result will be returned instead of written to a file
# the iterator is responsible for performing the correct preprocessing!
# note how the iterator here does not use multiprocessing -> preprocessing will be done in the main thread!
# take a look at the default iterators for predict_from_files and predict_from_list_of_npy_arrays
# (they both use predictor.predict_from_data_iterator) for inspiration!
def my_iterator(list_of_input_arrs, list_of_input_props):
preprocessor = predictor.configuration_manager.preprocessor_class(verbose=predictor.verbose)
for a, p in zip(list_of_input_arrs, list_of_input_props):
data, seg = preprocessor.run_case_npy(a,
None,
p,
predictor.plans_manager,
predictor.configuration_manager,
predictor.dataset_json)
yield {'data': torch.from_numpy(data).contiguous().pin_memory(), 'data_properties': p, 'ofile': None}
ret = predictor.predict_from_data_iterator(my_iterator([img, img2, img3, img4], [props, props2, props3, props4]),
save_probabilities=False, num_processes_segmentation_export=3)
```
================================================
FILE: Finetune/nnUNet/nnunetv2/inference/sliding_window_prediction.py
================================================
from functools import lru_cache
import numpy as np
import torch
from typing import Union, Tuple, List
from acvl_utils.cropping_and_padding.padding import pad_nd_image
from scipy.ndimage import gaussian_filter
@lru_cache(maxsize=2)
def compute_gaussian(tile_size: Union[Tuple[int, ...], List[int]], sigma_scale: float = 1. / 8,
value_scaling_factor: float = 1, dtype=torch.float16, device=torch.device('cuda', 0)) \
-> torch.Tensor:
tmp = np.zeros(tile_size)
center_coords = [i // 2 for i in tile_size]
sigmas = [i * sigma_scale for i in tile_size]
tmp[tuple(center_coords)] = 1
gaussian_importance_map = gaussian_filter(tmp, sigmas, 0, mode='constant', cval=0)
gaussian_importance_map = torch.from_numpy(gaussian_importance_map)
gaussian_importance_map = gaussian_importance_map / torch.max(gaussian_importance_map) * value_scaling_factor
gaussian_importance_map = gaussian_importance_map.type(dtype).to(device)
# gaussian_importance_map cannot be 0, otherwise we may end up with nans!
gaussian_importance_map[gaussian_importance_map == 0] = torch.min(
gaussian_importance_map[gaussian_importance_map != 0])
return gaussian_importance_map
def compute_steps_for_sliding_window(image_size: Tuple[int, ...], tile_size: Tuple[int, ...], tile_step_size: float) -> \
List[List[int]]:
assert [i >= j for i, j in zip(image_size, tile_size)], "image size must be as large or larger than patch_size"
assert 0 < tile_step_size <= 1, 'step_size must be larger than 0 and smaller or equal to 1'
# our step width is patch_size*step_size at most, but can be narrower. For example if we have image size of
# 110, patch size of 64 and step_size of 0.5, then we want to make 3 steps starting at coordinate 0, 23, 46
target_step_sizes_in_voxels = [i * tile_step_size for i in tile_size]
num_steps = [int(np.ceil((i - k) / j)) + 1 for i, j, k in zip(image_size, target_step_sizes_in_voxels, tile_size)]
steps = []
for dim in range(len(tile_size)):
# the highest step value for this dimension is
max_step_value = image_size[dim] - tile_size[dim]
if num_steps[dim] > 1:
actual_step_size = max_step_value / (num_steps[dim] - 1)
else:
actual_step_size = 99999999999 # does not matter because there is only one step at 0
steps_here = [int(np.round(actual_step_size * i)) for i in range(num_steps[dim])]
steps.append(steps_here)
return steps
if __name__ == '__main__':
a = torch.rand((4, 2, 32, 23))
a_npy = a.numpy()
a_padded = pad_nd_image(a, new_shape=(48, 27))
a_npy_padded = pad_nd_image(a_npy, new_shape=(48, 27))
assert all([i == j for i, j in zip(a_padded.shape, (4, 2, 48, 27))])
assert all([i == j for i, j in zip(a_npy_padded.shape, (4, 2, 48, 27))])
assert np.all(a_padded.numpy() == a_npy_padded)
================================================
FILE: Finetune/nnUNet/nnunetv2/model_sharing/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/model_sharing/entry_points.py
================================================
from nnunetv2.model_sharing.model_download import download_and_install_from_url
from nnunetv2.model_sharing.model_export import export_pretrained_model
from nnunetv2.model_sharing.model_import import install_model_from_zip_file
def print_license_warning():
print('')
print('######################################################')
print('!!!!!!!!!!!!!!!!!!!!!!!!WARNING!!!!!!!!!!!!!!!!!!!!!!!')
print('######################################################')
print("Using the pretrained model weights is subject to the license of the dataset they were trained on. Some "
"allow commercial use, others don't. It is your responsibility to make sure you use them appropriately! Use "
"nnUNet_print_pretrained_model_info(task_name) to see a summary of the dataset and where to find its license!")
print('######################################################')
print('')
def download_by_url():
import argparse
parser = argparse.ArgumentParser(
description="Use this to download pretrained models. This script is intended to download models via url only. "
"CAREFUL: This script will overwrite "
"existing models (if they share the same trainer class and plans as "
"the pretrained model.")
parser.add_argument("url", type=str, help='URL of the pretrained model')
args = parser.parse_args()
url = args.url
download_and_install_from_url(url)
def install_from_zip_entry_point():
import argparse
parser = argparse.ArgumentParser(
description="Use this to install a zip file containing a pretrained model.")
parser.add_argument("zip", type=str, help='zip file')
args = parser.parse_args()
zip = args.zip
install_model_from_zip_file(zip)
def export_pretrained_model_entry():
import argparse
parser = argparse.ArgumentParser(
description="Use this to export a trained model as a zip file.")
parser.add_argument('-d', type=str, required=True, help='Dataset name or id')
parser.add_argument('-o', type=str, required=True, help='Output file name')
parser.add_argument('-c', nargs='+', type=str, required=False,
default=('3d_lowres', '3d_fullres', '2d', '3d_cascade_fullres'),
help="List of configuration names")
parser.add_argument('-tr', required=False, type=str, default='nnUNetTrainer', help='Trainer class')
parser.add_argument('-p', required=False, type=str, default='nnUNetPlans', help='plans identifier')
parser.add_argument('-f', required=False, nargs='+', type=str, default=(0, 1, 2, 3, 4), help='list of fold ids')
parser.add_argument('-chk', required=False, nargs='+', type=str, default=('checkpoint_final.pth', ),
help='Lis tof checkpoint names to export. Default: checkpoint_final.pth')
parser.add_argument('--not_strict', action='store_false', default=False, required=False, help='Set this to allow missing folds and/or configurations')
parser.add_argument('--exp_cv_preds', action='store_true', required=False, help='Set this to export the cross-validation predictions as well')
args = parser.parse_args()
export_pretrained_model(dataset_name_or_id=args.d, output_file=args.o, configurations=args.c, trainer=args.tr,
plans_identifier=args.p, folds=args.f, strict=not args.not_strict, save_checkpoints=args.chk,
export_crossval_predictions=args.exp_cv_preds)
================================================
FILE: Finetune/nnUNet/nnunetv2/model_sharing/model_download.py
================================================
from typing import Optional
import requests
from batchgenerators.utilities.file_and_folder_operations import *
from time import time
from nnunetv2.model_sharing.model_import import install_model_from_zip_file
from nnunetv2.paths import nnUNet_results
from tqdm import tqdm
def download_and_install_from_url(url):
assert nnUNet_results is not None, "Cannot install model because network_training_output_dir is not " \
"set (RESULTS_FOLDER missing as environment variable, see " \
"Installation instructions)"
print('Downloading pretrained model from url:', url)
import http.client
http.client.HTTPConnection._http_vsn = 10
http.client.HTTPConnection._http_vsn_str = 'HTTP/1.0'
import os
home = os.path.expanduser('~')
random_number = int(time() * 1e7)
tempfile = join(home, f'.nnunetdownload_{str(random_number)}')
try:
download_file(url=url, local_filename=tempfile, chunk_size=8192 * 16)
print("Download finished. Extracting...")
install_model_from_zip_file(tempfile)
print("Done")
except Exception as e:
raise e
finally:
if isfile(tempfile):
os.remove(tempfile)
def download_file(url: str, local_filename: str, chunk_size: Optional[int] = 8192 * 16) -> str:
# borrowed from https://stackoverflow.com/questions/16694907/download-large-file-in-python-with-requests
# NOTE the stream=True parameter below
with requests.get(url, stream=True, timeout=100) as r:
r.raise_for_status()
with tqdm.wrapattr(open(local_filename, 'wb'), "write", total=int(r.headers.get("Content-Length"))) as f:
for chunk in r.iter_content(chunk_size=chunk_size):
f.write(chunk)
return local_filename
================================================
FILE: Finetune/nnUNet/nnunetv2/model_sharing/model_export.py
================================================
import zipfile
from nnunetv2.utilities.file_path_utilities import *
def export_pretrained_model(dataset_name_or_id: Union[int, str], output_file: str,
configurations: Tuple[str] = ("2d", "3d_lowres", "3d_fullres", "3d_cascade_fullres"),
trainer: str = 'nnUNetTrainer',
plans_identifier: str = 'nnUNetPlans',
folds: Tuple[int, ...] = (0, 1, 2, 3, 4),
strict: bool = True,
save_checkpoints: Tuple[str, ...] = ('checkpoint_final.pth',),
export_crossval_predictions: bool = False) -> None:
dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)
with(zipfile.ZipFile(output_file, 'w', zipfile.ZIP_DEFLATED)) as zipf:
for c in configurations:
print(f"Configuration {c}")
trainer_output_dir = get_output_folder(dataset_name, trainer, plans_identifier, c)
if not isdir(trainer_output_dir):
if strict:
raise RuntimeError(f"{dataset_name} is missing the trained model of configuration {c}")
else:
continue
expected_fold_folder = [f"fold_{i}" if i != 'all' else 'fold_all' for i in folds]
assert all([isdir(join(trainer_output_dir, i)) for i in expected_fold_folder]), \
f"not all requested folds are present; {dataset_name} {c}; requested folds: {folds}"
assert isfile(join(trainer_output_dir, "plans.json")), f"plans.json missing, {dataset_name} {c}"
for fold_folder in expected_fold_folder:
print(f"Exporting {fold_folder}")
# debug.json, does not exist yet
source_file = join(trainer_output_dir, fold_folder, "debug.json")
if isfile(source_file):
zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))
# all requested checkpoints
for chk in save_checkpoints:
source_file = join(trainer_output_dir, fold_folder, chk)
zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))
# progress.png
source_file = join(trainer_output_dir, fold_folder, "progress.png")
zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))
# if it exists, network architecture.png
source_file = join(trainer_output_dir, fold_folder, "network_architecture.pdf")
if isfile(source_file):
zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))
# validation folder with all predicted segmentations etc
if export_crossval_predictions:
source_folder = join(trainer_output_dir, fold_folder, "validation")
files = [i for i in subfiles(source_folder, join=False) if not i.endswith('.npz') and not i.endswith('.pkl')]
for f in files:
zipf.write(join(source_folder, f), os.path.relpath(join(source_folder, f), nnUNet_results))
# just the summary.json file from the validation
else:
source_file = join(trainer_output_dir, fold_folder, "validation", "summary.json")
zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))
source_folder = join(trainer_output_dir, f'crossval_results_folds_{folds_tuple_to_string(folds)}')
if isdir(source_folder):
if export_crossval_predictions:
source_files = subfiles(source_folder, join=True)
else:
source_files = [
join(trainer_output_dir, f'crossval_results_folds_{folds_tuple_to_string(folds)}', i) for i in
['summary.json', 'postprocessing.pkl', 'postprocessing.json']
]
for s in source_files:
if isfile(s):
zipf.write(s, os.path.relpath(s, nnUNet_results))
# plans
source_file = join(trainer_output_dir, "plans.json")
zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))
# fingerprint
source_file = join(trainer_output_dir, "dataset_fingerprint.json")
zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))
# dataset
source_file = join(trainer_output_dir, "dataset.json")
zipf.write(source_file, os.path.relpath(source_file, nnUNet_results))
ensemble_dir = join(nnUNet_results, dataset_name, 'ensembles')
if not isdir(ensemble_dir):
print("No ensemble directory found for task", dataset_name_or_id)
return
subd = subdirs(ensemble_dir, join=False)
# figure out whether the models in the ensemble are all within the exported models here
for ens in subd:
identifiers, folds = convert_ensemble_folder_to_model_identifiers_and_folds(ens)
ok = True
for i in identifiers:
tr, pl, c = convert_identifier_to_trainer_plans_config(i)
if tr == trainer and pl == plans_identifier and c in configurations:
pass
else:
ok = False
if ok:
print(f'found matching ensemble: {ens}')
source_folder = join(ensemble_dir, ens)
if export_crossval_predictions:
source_files = subfiles(source_folder, join=True)
else:
source_files = [
join(source_folder, i) for i in
['summary.json', 'postprocessing.pkl', 'postprocessing.json'] if isfile(join(source_folder, i))
]
for s in source_files:
zipf.write(s, os.path.relpath(s, nnUNet_results))
inference_information_file = join(nnUNet_results, dataset_name, 'inference_information.json')
if isfile(inference_information_file):
zipf.write(inference_information_file, os.path.relpath(inference_information_file, nnUNet_results))
inference_information_txt_file = join(nnUNet_results, dataset_name, 'inference_information.txt')
if isfile(inference_information_txt_file):
zipf.write(inference_information_txt_file, os.path.relpath(inference_information_txt_file, nnUNet_results))
print('Done')
if __name__ == '__main__':
export_pretrained_model(2, '/home/fabian/temp/dataset2.zip', strict=False, export_crossval_predictions=True, folds=(0, ))
================================================
FILE: Finetune/nnUNet/nnunetv2/model_sharing/model_import.py
================================================
import zipfile
from nnunetv2.paths import nnUNet_results
def install_model_from_zip_file(zip_file: str):
with zipfile.ZipFile(zip_file, 'r') as zip_ref:
zip_ref.extractall(nnUNet_results)
================================================
FILE: Finetune/nnUNet/nnunetv2/paths.py
================================================
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
"""
PLEASE READ paths.md FOR INFORMATION TO HOW TO SET THIS UP
"""
base = '/data/linshan/nnunet_data'
nnUNet_raw = '/data/linshan/nnunet_data/nnUNet_raw'
nnUNet_preprocessed = '/data/linshan/nnunet_data/nnUNet_preprocessed'
nnUNet_results = '/data/linshan/nnunet_data/nnUNet_results'
if nnUNet_raw is None:
print("nnUNet_raw is not defined and nnU-Net can only be used on data for which preprocessed files "
"are already present on your system. nnU-Net cannot be used for experiment planning and preprocessing like "
"this. If this is not intended, please read documentation/setting_up_paths.md for information on how to set "
"this up properly.")
if nnUNet_preprocessed is None:
print("nnUNet_preprocessed is not defined and nnU-Net can not be used for preprocessing "
"or training. If this is not intended, please read documentation/setting_up_paths.md for information on how "
"to set this up.")
if nnUNet_results is None:
print("nnUNet_results is not defined and nnU-Net cannot be used for training or "
"inference. If this is not intended behavior, please read documentation/setting_up_paths.md for information "
"on how to set this up.")
================================================
FILE: Finetune/nnUNet/nnunetv2/postprocessing/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/postprocessing/remove_connected_components.py
================================================
import argparse
import multiprocessing
import shutil
from multiprocessing import Pool
from typing import Union, Tuple, List, Callable
import numpy as np
from acvl_utils.morphology.morphology_helper import remove_all_but_largest_component
from batchgenerators.utilities.file_and_folder_operations import load_json, subfiles, maybe_mkdir_p, join, isfile, \
isdir, save_pickle, load_pickle, save_json
from nnunetv2.configuration import default_num_processes
from nnunetv2.evaluation.accumulate_cv_results import accumulate_cv_results
from nnunetv2.evaluation.evaluate_predictions import region_or_label_to_mask, compute_metrics_on_folder, \
load_summary_json, label_or_region_to_key
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
from nnunetv2.paths import nnUNet_raw
from nnunetv2.utilities.file_path_utilities import folds_tuple_to_string
from nnunetv2.utilities.json_export import recursive_fix_for_json_export
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager
def remove_all_but_largest_component_from_segmentation(segmentation: np.ndarray,
labels_or_regions: Union[int, Tuple[int, ...],
List[Union[int, Tuple[int, ...]]]],
background_label: int = 0) -> np.ndarray:
mask = np.zeros_like(segmentation, dtype=bool)
if not isinstance(labels_or_regions, list):
labels_or_regions = [labels_or_regions]
for l_or_r in labels_or_regions:
mask |= region_or_label_to_mask(segmentation, l_or_r)
mask_keep = remove_all_but_largest_component(mask)
ret = np.copy(segmentation) # do not modify the input!
ret[mask & ~mask_keep] = background_label
return ret
def apply_postprocessing(segmentation: np.ndarray, pp_fns: List[Callable], pp_fn_kwargs: List[dict]):
for fn, kwargs in zip(pp_fns, pp_fn_kwargs):
segmentation = fn(segmentation, **kwargs)
return segmentation
def load_postprocess_save(segmentation_file: str,
output_fname: str,
image_reader_writer: BaseReaderWriter,
pp_fns: List[Callable],
pp_fn_kwargs: List[dict]):
seg, props = image_reader_writer.read_seg(segmentation_file)
seg = apply_postprocessing(seg[0], pp_fns, pp_fn_kwargs)
image_reader_writer.write_seg(seg, output_fname, props)
def determine_postprocessing(folder_predictions: str,
folder_ref: str,
plans_file_or_dict: Union[str, dict],
dataset_json_file_or_dict: Union[str, dict],
num_processes: int = default_num_processes,
keep_postprocessed_files: bool = True):
"""
Determines nnUNet postprocessing. Its output is a postprocessing.pkl file in folder_predictions which can be
used with apply_postprocessing_to_folder.
Postprocessed files are saved in folder_predictions/postprocessed. Set
keep_postprocessed_files=False to delete these files after this function is done (temp files will eb created
and deleted regardless).
If plans_file_or_dict or dataset_json_file_or_dict are None, we will look for them in input_folder
"""
output_folder = join(folder_predictions, 'postprocessed')
if plans_file_or_dict is None:
expected_plans_file = join(folder_predictions, 'plans.json')
if not isfile(expected_plans_file):
raise RuntimeError(f"Expected plans file missing: {expected_plans_file}. The plans files should have been "
f"created while running nnUNetv2_predict. Sadge.")
plans_file_or_dict = load_json(expected_plans_file)
plans_manager = PlansManager(plans_file_or_dict)
if dataset_json_file_or_dict is None:
expected_dataset_json_file = join(folder_predictions, 'dataset.json')
if not isfile(expected_dataset_json_file):
raise RuntimeError(
f"Expected plans file missing: {expected_dataset_json_file}. The plans files should have been "
f"created while running nnUNetv2_predict. Sadge.")
dataset_json_file_or_dict = load_json(expected_dataset_json_file)
if not isinstance(dataset_json_file_or_dict, dict):
dataset_json = load_json(dataset_json_file_or_dict)
else:
dataset_json = dataset_json_file_or_dict
rw = plans_manager.image_reader_writer_class()
label_manager = plans_manager.get_label_manager(dataset_json)
labels_or_regions = label_manager.foreground_regions if label_manager.has_regions else label_manager.foreground_labels
predicted_files = subfiles(folder_predictions, suffix=dataset_json['file_ending'], join=False)
ref_files = subfiles(folder_ref, suffix=dataset_json['file_ending'], join=False)
# we should print a warning if not all files from folder_ref are present in folder_predictions
if not all([i in predicted_files for i in ref_files]):
print(f'WARNING: Not all files in folder_ref were found in folder_predictions. Determining postprocessing '
f'should always be done on the entire dataset!')
# before we start we should evaluate the imaegs in the source folder
if not isfile(join(folder_predictions, 'summary.json')):
compute_metrics_on_folder(folder_ref,
folder_predictions,
join(folder_predictions, 'summary.json'),
rw,
dataset_json['file_ending'],
labels_or_regions,
label_manager.ignore_label,
num_processes)
# we save the postprocessing functions in here
pp_fns = []
pp_fn_kwargs = []
# pool party!
with multiprocessing.get_context("spawn").Pool(num_processes) as pool:
# now let's see whether removing all but the largest foreground region improves the scores
output_here = join(output_folder, 'temp', 'keep_largest_fg')
maybe_mkdir_p(output_here)
pp_fn = remove_all_but_largest_component_from_segmentation
kwargs = {
'labels_or_regions': label_manager.foreground_labels,
}
pool.starmap(
load_postprocess_save,
zip(
[join(folder_predictions, i) for i in predicted_files],
[join(output_here, i) for i in predicted_files],
[rw] * len(predicted_files),
[[pp_fn]] * len(predicted_files),
[[kwargs]] * len(predicted_files)
)
)
compute_metrics_on_folder(folder_ref,
output_here,
join(output_here, 'summary.json'),
rw,
dataset_json['file_ending'],
labels_or_regions,
label_manager.ignore_label,
num_processes)
# now we need to figure out if doing this improved the dice scores. We will implement that defensively in so far
# that if a single class got worse as a result we won't do this. We can change this in the future but right now I
# prefer to do it this way
baseline_results = load_summary_json(join(folder_predictions, 'summary.json'))
pp_results = load_summary_json(join(output_here, 'summary.json'))
do_this = pp_results['foreground_mean']['Dice'] > baseline_results['foreground_mean']['Dice']
if do_this:
for class_id in pp_results['mean'].keys():
if pp_results['mean'][class_id]['Dice'] < baseline_results['mean'][class_id]['Dice']:
do_this = False
break
if do_this:
print(f'Results were improved by removing all but the largest foreground region. '
f'Mean dice before: {round(baseline_results["foreground_mean"]["Dice"], 5)} '
f'after: {round(pp_results["foreground_mean"]["Dice"], 5)}')
source = output_here
pp_fns.append(pp_fn)
pp_fn_kwargs.append(kwargs)
else:
print(f'Removing all but the largest foreground region did not improve results!')
source = folder_predictions
# in the old nnU-Net we could just apply all-but-largest component removal to all classes at the same time and
# then evaluate for each class whether this improved results. This is no longer possible because we now support
# region-based predictions and regions can overlap, causing interactions
# in principle the order with which the postprocessing is applied to the regions matter as well and should be
# investigated, but due to some things that I am too lazy to explain right now it's going to be alright (I think)
# to stick to the order in which they are declared in dataset.json (if you want to think about it then think about
# region_class_order)
# 2023_02_06: I hate myself for the comment above. Thanks past me
if len(labels_or_regions) > 1:
for label_or_region in labels_or_regions:
pp_fn = remove_all_but_largest_component_from_segmentation
kwargs = {
'labels_or_regions': label_or_region,
}
output_here = join(output_folder, 'temp', 'keep_largest_perClassOrRegion')
maybe_mkdir_p(output_here)
pool.starmap(
load_postprocess_save,
zip(
[join(source, i) for i in predicted_files],
[join(output_here, i) for i in predicted_files],
[rw] * len(predicted_files),
[[pp_fn]] * len(predicted_files),
[[kwargs]] * len(predicted_files)
)
)
compute_metrics_on_folder(folder_ref,
output_here,
join(output_here, 'summary.json'),
rw,
dataset_json['file_ending'],
labels_or_regions,
label_manager.ignore_label,
num_processes)
baseline_results = load_summary_json(join(source, 'summary.json'))
pp_results = load_summary_json(join(output_here, 'summary.json'))
do_this = pp_results['mean'][label_or_region]['Dice'] > baseline_results['mean'][label_or_region]['Dice']
if do_this:
print(f'Results were improved by removing all but the largest component for {label_or_region}. '
f'Dice before: {round(baseline_results["mean"][label_or_region]["Dice"], 5)} '
f'after: {round(pp_results["mean"][label_or_region]["Dice"], 5)}')
if isdir(join(output_folder, 'temp', 'keep_largest_perClassOrRegion_currentBest')):
shutil.rmtree(join(output_folder, 'temp', 'keep_largest_perClassOrRegion_currentBest'))
shutil.move(output_here, join(output_folder, 'temp', 'keep_largest_perClassOrRegion_currentBest'), )
source = join(output_folder, 'temp', 'keep_largest_perClassOrRegion_currentBest')
pp_fns.append(pp_fn)
pp_fn_kwargs.append(kwargs)
else:
print(f'Removing all but the largest component for {label_or_region} did not improve results! '
f'Dice before: {round(baseline_results["mean"][label_or_region]["Dice"], 5)} '
f'after: {round(pp_results["mean"][label_or_region]["Dice"], 5)}')
[shutil.copy(join(source, i), join(output_folder, i)) for i in subfiles(source, join=False)]
save_pickle((pp_fns, pp_fn_kwargs), join(folder_predictions, 'postprocessing.pkl'))
baseline_results = load_summary_json(join(folder_predictions, 'summary.json'))
final_results = load_summary_json(join(output_folder, 'summary.json'))
tmp = {
'input_folder': {i: baseline_results[i] for i in ['foreground_mean', 'mean']},
'postprocessed': {i: final_results[i] for i in ['foreground_mean', 'mean']},
'postprocessing_fns': [i.__name__ for i in pp_fns],
'postprocessing_kwargs': pp_fn_kwargs,
}
# json is a very annoying little bi###. Can't handle tuples as dict keys.
tmp['input_folder']['mean'] = {label_or_region_to_key(k): tmp['input_folder']['mean'][k] for k in
tmp['input_folder']['mean'].keys()}
tmp['postprocessed']['mean'] = {label_or_region_to_key(k): tmp['postprocessed']['mean'][k] for k in
tmp['postprocessed']['mean'].keys()}
# did I already say that I hate json? "TypeError: Object of type int64 is not JSON serializable" You retarded bro?
recursive_fix_for_json_export(tmp)
save_json(tmp, join(folder_predictions, 'postprocessing.json'))
shutil.rmtree(join(output_folder, 'temp'))
if not keep_postprocessed_files:
shutil.rmtree(output_folder)
return pp_fns, pp_fn_kwargs
def apply_postprocessing_to_folder(input_folder: str,
output_folder: str,
pp_fns: List[Callable],
pp_fn_kwargs: List[dict],
plans_file_or_dict: Union[str, dict] = None,
dataset_json_file_or_dict: Union[str, dict] = None,
num_processes=8) -> None:
"""
If plans_file_or_dict or dataset_json_file_or_dict are None, we will look for them in input_folder
"""
if plans_file_or_dict is None:
expected_plans_file = join(input_folder, 'plans.json')
if not isfile(expected_plans_file):
raise RuntimeError(f"Expected plans file missing: {expected_plans_file}. The plans file should have been "
f"created while running nnUNetv2_predict. Sadge. If the folder you want to apply "
f"postprocessing to was create from an ensemble then just specify one of the "
f"plans files of the ensemble members in plans_file_or_dict")
plans_file_or_dict = load_json(expected_plans_file)
plans_manager = PlansManager(plans_file_or_dict)
if dataset_json_file_or_dict is None:
expected_dataset_json_file = join(input_folder, 'dataset.json')
if not isfile(expected_dataset_json_file):
raise RuntimeError(
f"Expected plans file missing: {expected_dataset_json_file}. The dataset.json should have been "
f"copied while running nnUNetv2_predict/nnUNetv2_ensemble. Sadge.")
dataset_json_file_or_dict = load_json(expected_dataset_json_file)
if not isinstance(dataset_json_file_or_dict, dict):
dataset_json = load_json(dataset_json_file_or_dict)
else:
dataset_json = dataset_json_file_or_dict
rw = plans_manager.image_reader_writer_class()
maybe_mkdir_p(output_folder)
with multiprocessing.get_context("spawn").Pool(num_processes) as p:
files = subfiles(input_folder, suffix=dataset_json['file_ending'], join=False)
_ = p.starmap(load_postprocess_save,
zip(
[join(input_folder, i) for i in files],
[join(output_folder, i) for i in files],
[rw] * len(files),
[pp_fns] * len(files),
[pp_fn_kwargs] * len(files)
)
)
def entry_point_determine_postprocessing_folder():
parser = argparse.ArgumentParser('Writes postprocessing.pkl and postprocessing.json in input_folder.')
parser.add_argument('-i', type=str, required=True, help='Input folder')
parser.add_argument('-ref', type=str, required=True, help='Folder with gt labels')
parser.add_argument('-plans_json', type=str, required=False, default=None,
help="plans file to use. If not specified we will look for the plans.json file in the "
"input folder (input_folder/plans.json)")
parser.add_argument('-dataset_json', type=str, required=False, default=None,
help="dataset.json file to use. If not specified we will look for the dataset.json file in the "
"input folder (input_folder/dataset.json)")
parser.add_argument('-np', type=int, required=False, default=default_num_processes,
help=f"number of processes to use. Default: {default_num_processes}")
parser.add_argument('--remove_postprocessed', action='store_true', required=False,
help='set this is you don\'t want to keep the postprocessed files')
args = parser.parse_args()
determine_postprocessing(args.i, args.ref, args.plans_json, args.dataset_json, args.np,
not args.remove_postprocessed)
def entry_point_apply_postprocessing():
parser = argparse.ArgumentParser('Apples postprocessing specified in pp_pkl_file to input folder.')
parser.add_argument('-i', type=str, required=True, help='Input folder')
parser.add_argument('-o', type=str, required=True, help='Output folder')
parser.add_argument('-pp_pkl_file', type=str, required=True, help='postprocessing.pkl file')
parser.add_argument('-np', type=int, required=False, default=default_num_processes,
help=f"number of processes to use. Default: {default_num_processes}")
parser.add_argument('-plans_json', type=str, required=False, default=None,
help="plans file to use. If not specified we will look for the plans.json file in the "
"input folder (input_folder/plans.json)")
parser.add_argument('-dataset_json', type=str, required=False, default=None,
help="dataset.json file to use. If not specified we will look for the dataset.json file in the "
"input folder (input_folder/dataset.json)")
args = parser.parse_args()
pp_fns, pp_fn_kwargs = load_pickle(args.pp_pkl_file)
apply_postprocessing_to_folder(args.i, args.o, pp_fns, pp_fn_kwargs, args.plans_json, args.dataset_json, args.np)
if __name__ == '__main__':
trained_model_folder = '/home/fabian/results/nnUNet_remake/Dataset004_Hippocampus/nnUNetTrainer__nnUNetPlans__3d_fullres'
labelstr = join(nnUNet_raw, 'Dataset004_Hippocampus', 'labelsTr')
plans_manager = PlansManager(join(trained_model_folder, 'plans.json'))
dataset_json = load_json(join(trained_model_folder, 'dataset.json'))
folds = (0, 1, 2, 3, 4)
label_manager = plans_manager.get_label_manager(dataset_json)
merged_output_folder = join(trained_model_folder, f'crossval_results_folds_{folds_tuple_to_string(folds)}')
accumulate_cv_results(trained_model_folder, merged_output_folder, folds, 8, False)
fns, kwargs = determine_postprocessing(merged_output_folder, labelstr, plans_manager.plans,
dataset_json, 8, keep_postprocessed_files=True)
save_pickle((fns, kwargs), join(trained_model_folder, 'postprocessing.pkl'))
fns, kwargs = load_pickle(join(trained_model_folder, 'postprocessing.pkl'))
apply_postprocessing_to_folder(merged_output_folder, merged_output_folder + '_pp', fns, kwargs,
plans_manager.plans, dataset_json,
8)
compute_metrics_on_folder(labelstr,
merged_output_folder + '_pp',
join(merged_output_folder + '_pp', 'summary.json'),
plans_manager.image_reader_writer_class(),
dataset_json['file_ending'],
label_manager.foreground_regions if label_manager.has_regions else label_manager.foreground_labels,
label_manager.ignore_label,
8)
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/cropping/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/cropping/cropping.py
================================================
import numpy as np
# Hello! crop_to_nonzero is the function you are looking for. Ignore the rest.
from acvl_utils.cropping_and_padding.bounding_boxes import get_bbox_from_mask, crop_to_bbox, bounding_box_to_slice
def create_nonzero_mask(data):
"""
:param data:
:return: the mask is True where the data is nonzero
"""
from scipy.ndimage import binary_fill_holes
assert data.ndim in (3, 4), "data must have shape (C, X, Y, Z) or shape (C, X, Y)"
nonzero_mask = np.zeros(data.shape[1:], dtype=bool)
for c in range(data.shape[0]):
this_mask = data[c] != 0
nonzero_mask = nonzero_mask | this_mask
nonzero_mask = binary_fill_holes(nonzero_mask)
return nonzero_mask
def crop_to_nonzero(data, seg=None, nonzero_label=-1):
"""
:param data:
:param seg:
:param nonzero_label: this will be written into the segmentation map
:return:
"""
nonzero_mask = create_nonzero_mask(data)
bbox = get_bbox_from_mask(nonzero_mask)
slicer = bounding_box_to_slice(bbox)
data = data[tuple([slice(None), *slicer])]
if seg is not None:
seg = seg[tuple([slice(None), *slicer])]
nonzero_mask = nonzero_mask[slicer][None]
if seg is not None:
seg[(seg == 0) & (~nonzero_mask)] = nonzero_label
else:
nonzero_mask = nonzero_mask.astype(np.int8)
nonzero_mask[nonzero_mask == 0] = nonzero_label
nonzero_mask[nonzero_mask > 0] = 0
seg = nonzero_mask
return data, seg, bbox
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/normalization/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/normalization/default_normalization_schemes.py
================================================
from abc import ABC, abstractmethod
from typing import Type
import numpy as np
from numpy import number
class ImageNormalization(ABC):
leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = None
def __init__(self, use_mask_for_norm: bool = None, intensityproperties: dict = None,
target_dtype: Type[number] = np.float32):
assert use_mask_for_norm is None or isinstance(use_mask_for_norm, bool)
self.use_mask_for_norm = use_mask_for_norm
assert isinstance(intensityproperties, dict)
self.intensityproperties = intensityproperties
self.target_dtype = target_dtype
@abstractmethod
def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
"""
Image and seg must have the same shape. Seg is not always used
"""
pass
class ZScoreNormalization(ImageNormalization):
leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = True
def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
"""
here seg is used to store the zero valued region. The value for that region in the segmentation is -1 by
default.
"""
image = image.astype(self.target_dtype)
if self.use_mask_for_norm is not None and self.use_mask_for_norm:
# negative values in the segmentation encode the 'outside' region (think zero values around the brain as
# in BraTS). We want to run the normalization only in the brain region, so we need to mask the image.
# The default nnU-net sets use_mask_for_norm to True if cropping to the nonzero region substantially
# reduced the image size.
mask = seg >= 0
mean = image[mask].mean()
std = image[mask].std()
image[mask] = (image[mask] - mean) / (max(std, 1e-8))
else:
mean = image.mean()
std = image.std()
image = (image - mean) / (max(std, 1e-8))
return image
class CTNormalization(ImageNormalization):
leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False
def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
assert self.intensityproperties is not None, "CTNormalization requires intensity properties"
image = image.astype(self.target_dtype)
mean_intensity = self.intensityproperties['mean']
std_intensity = self.intensityproperties['std']
lower_bound = self.intensityproperties['percentile_00_5']
upper_bound = self.intensityproperties['percentile_99_5']
image = np.clip(image, lower_bound, upper_bound)
image = (image - mean_intensity) / max(std_intensity, 1e-8)
return image
class NoNormalization(ImageNormalization):
leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False
def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
return image.astype(self.target_dtype)
class RescaleTo01Normalization(ImageNormalization):
leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False
def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
image = image.astype(self.target_dtype)
image = image - image.min()
image = image / np.clip(image.max(), a_min=1e-8, a_max=None)
return image
class RGBTo01Normalization(ImageNormalization):
leaves_pixels_outside_mask_at_zero_if_use_mask_for_norm_is_true = False
def run(self, image: np.ndarray, seg: np.ndarray = None) -> np.ndarray:
assert image.min() >= 0, "RGB images are uint 8, for whatever reason I found pixel values smaller than 0. " \
"Your images do not seem to be RGB images"
assert image.max() <= 255, "RGB images are uint 8, for whatever reason I found pixel values greater than 255" \
". Your images do not seem to be RGB images"
image = image.astype(self.target_dtype)
image = image / 255.
return image
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/normalization/map_channel_name_to_normalization.py
================================================
from typing import Type
from nnunetv2.preprocessing.normalization.default_normalization_schemes import CTNormalization, NoNormalization, \
ZScoreNormalization, RescaleTo01Normalization, RGBTo01Normalization, ImageNormalization
channel_name_to_normalization_mapping = {
'CT': CTNormalization,
'noNorm': NoNormalization,
'zscore': ZScoreNormalization,
'rescale_to_0_1': RescaleTo01Normalization,
'rgb_to_0_1': RGBTo01Normalization
}
def get_normalization_scheme(channel_name: str) -> Type[ImageNormalization]:
"""
If we find the channel_name in channel_name_to_normalization_mapping return the corresponding normalization. If it is
not found, use the default (ZScoreNormalization)
"""
norm_scheme = channel_name_to_normalization_mapping.get(channel_name)
if norm_scheme is None:
norm_scheme = ZScoreNormalization
# print('Using %s for image normalization' % norm_scheme.__name__)
return norm_scheme
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/normalization/readme.md
================================================
The channel_names entry in dataset.json only determines the normlaization scheme. So if you want to use something different
then you can just
- create a new subclass of ImageNormalization
- map your custom channel identifier to that subclass in channel_name_to_normalization_mapping
- run plan and preprocess again with your custom normlaization scheme
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/preprocessors/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/preprocessors/default_preprocessor.py
================================================
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import multiprocessing
import shutil
from time import sleep
from typing import Union, Tuple
import nnunetv2
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.paths import nnUNet_preprocessed, nnUNet_raw
from nnunetv2.preprocessing.cropping.cropping import crop_to_nonzero
from nnunetv2.preprocessing.resampling.default_resampling import compute_new_shape
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.utilities.find_class_by_name import recursive_find_python_class
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager
from nnunetv2.utilities.utils import get_identifiers_from_splitted_dataset_folder, \
create_lists_from_splitted_dataset_folder, get_filenames_of_train_images_and_targets
from tqdm import tqdm
class DefaultPreprocessor(object):
def __init__(self, verbose: bool = True):
self.verbose = verbose
"""
Everything we need is in the plans. Those are given when run() is called
"""
def run_case_npy(self, data: np.ndarray, seg: Union[np.ndarray, None], properties: dict,
plans_manager: PlansManager, configuration_manager: ConfigurationManager,
dataset_json: Union[dict, str]):
# let's not mess up the inputs!
data = np.copy(data)
if seg is not None:
assert data.shape[1:] == seg.shape[1:], "Shape mismatch between image and segmentation. Please fix your dataset and make use of the --verify_dataset_integrity flag to ensure everything is correct"
seg = np.copy(seg)
has_seg = seg is not None
# apply transpose_forward, this also needs to be applied to the spacing!
data = data.transpose([0, *[i + 1 for i in plans_manager.transpose_forward]])
if seg is not None:
seg = seg.transpose([0, *[i + 1 for i in plans_manager.transpose_forward]])
original_spacing = [properties['spacing'][i] for i in plans_manager.transpose_forward]
# crop, remember to store size before cropping!
shape_before_cropping = data.shape[1:]
properties['shape_before_cropping'] = shape_before_cropping
# this command will generate a segmentation. This is important because of the nonzero mask which we may need
data, seg, bbox = crop_to_nonzero(data, seg)
properties['bbox_used_for_cropping'] = bbox
# print(data.shape, seg.shape)
properties['shape_after_cropping_and_before_resampling'] = data.shape[1:]
# resample
target_spacing = configuration_manager.spacing # this should already be transposed
if len(target_spacing) < len(data.shape[1:]):
# target spacing for 2d has 2 entries but the data and original_spacing have three because everything is 3d
# in 2d configuration we do not change the spacing between slices
target_spacing = [original_spacing[0]] + target_spacing
new_shape = compute_new_shape(data.shape[1:], original_spacing, target_spacing)
# normalize
# normalization MUST happen before resampling or we get huge problems with resampled nonzero masks no
# longer fitting the images perfectly!
data = self._normalize(data, seg, configuration_manager,
plans_manager.foreground_intensity_properties_per_channel)
# print('current shape', data.shape[1:], 'current_spacing', original_spacing,
# '\ntarget shape', new_shape, 'target_spacing', target_spacing)
old_shape = data.shape[1:]
data = configuration_manager.resampling_fn_data(data, new_shape, original_spacing, target_spacing)
seg = configuration_manager.resampling_fn_seg(seg, new_shape, original_spacing, target_spacing)
if self.verbose:
print(f'old shape: {old_shape}, new_shape: {new_shape}, old_spacing: {original_spacing}, '
f'new_spacing: {target_spacing}, fn_data: {configuration_manager.resampling_fn_data}')
# if we have a segmentation, sample foreground locations for oversampling and add those to properties
if has_seg:
# reinstantiating LabelManager for each case is not ideal. We could replace the dataset_json argument
# with a LabelManager Instance in this function because that's all its used for. Dunno what's better.
# LabelManager is pretty light computation-wise.
label_manager = plans_manager.get_label_manager(dataset_json)
collect_for_this = label_manager.foreground_regions if label_manager.has_regions \
else label_manager.foreground_labels
# when using the ignore label we want to sample only from annotated regions. Therefore we also need to
# collect samples uniformly from all classes (incl background)
if label_manager.has_ignore_label:
collect_for_this.append(label_manager.all_labels)
# no need to filter background in regions because it is already filtered in handle_labels
# print(all_labels, regions)
properties['class_locations'] = self._sample_foreground_locations(seg, collect_for_this,
verbose=self.verbose)
seg = self.modify_seg_fn(seg, plans_manager, dataset_json, configuration_manager)
if np.max(seg) > 127:
seg = seg.astype(np.int16)
else:
seg = seg.astype(np.int8)
return data, seg
def run_case(self, image_files: List[str], seg_file: Union[str, None], plans_manager: PlansManager,
configuration_manager: ConfigurationManager,
dataset_json: Union[dict, str]):
"""
seg file can be none (test cases)
order of operations is: transpose -> crop -> resample
so when we export we need to run the following order: resample -> crop -> transpose (we could also run
transpose at a different place, but reverting the order of operations done during preprocessing seems cleaner)
"""
if isinstance(dataset_json, str):
dataset_json = load_json(dataset_json)
rw = plans_manager.image_reader_writer_class()
# load image(s)
data, data_properties = rw.read_images(image_files)
# if possible, load seg
if seg_file is not None:
seg, _ = rw.read_seg(seg_file)
else:
seg = None
data, seg = self.run_case_npy(data, seg, data_properties, plans_manager, configuration_manager,
dataset_json)
return data, seg, data_properties
def run_case_save(self, output_filename_truncated: str, image_files: List[str], seg_file: str,
plans_manager: PlansManager, configuration_manager: ConfigurationManager,
dataset_json: Union[dict, str]):
data, seg, properties = self.run_case(image_files, seg_file, plans_manager, configuration_manager, dataset_json)
# print('dtypes', data.dtype, seg.dtype)
np.savez_compressed(output_filename_truncated + '.npz', data=data, seg=seg)
write_pickle(properties, output_filename_truncated + '.pkl')
@staticmethod
def _sample_foreground_locations(seg: np.ndarray, classes_or_regions: Union[List[int], List[Tuple[int, ...]]],
seed: int = 1234, verbose: bool = False):
num_samples = 10000
min_percent_coverage = 0.01 # at least 1% of the class voxels need to be selected, otherwise it may be too
# sparse
rndst = np.random.RandomState(seed)
class_locs = {}
for c in classes_or_regions:
k = c if not isinstance(c, list) else tuple(c)
if isinstance(c, (tuple, list)):
mask = seg == c[0]
for cc in c[1:]:
mask = mask | (seg == cc)
all_locs = np.argwhere(mask)
else:
all_locs = np.argwhere(seg == c)
if len(all_locs) == 0:
class_locs[k] = []
continue
target_num_samples = min(num_samples, len(all_locs))
target_num_samples = max(target_num_samples, int(np.ceil(len(all_locs) * min_percent_coverage)))
selected = all_locs[rndst.choice(len(all_locs), target_num_samples, replace=False)]
class_locs[k] = selected
if verbose:
print(c, target_num_samples)
return class_locs
def _normalize(self, data: np.ndarray, seg: np.ndarray, configuration_manager: ConfigurationManager,
foreground_intensity_properties_per_channel: dict) -> np.ndarray:
for c in range(data.shape[0]):
scheme = configuration_manager.normalization_schemes[c]
normalizer_class = recursive_find_python_class(join(nnunetv2.__path__[0], "preprocessing", "normalization"),
scheme,
'nnunetv2.preprocessing.normalization')
if normalizer_class is None:
raise RuntimeError(f'Unable to locate class \'{scheme}\' for normalization')
normalizer = normalizer_class(use_mask_for_norm=configuration_manager.use_mask_for_norm[c],
intensityproperties=foreground_intensity_properties_per_channel[str(c)])
data[c] = normalizer.run(data[c], seg[0])
return data
def run(self, dataset_name_or_id: Union[int, str], configuration_name: str, plans_identifier: str,
num_processes: int):
"""
data identifier = configuration name in plans. EZ.
"""
dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)
assert isdir(join(nnUNet_raw, dataset_name)), "The requested dataset could not be found in nnUNet_raw"
plans_file = join(nnUNet_preprocessed, dataset_name, plans_identifier + '.json')
assert isfile(plans_file), "Expected plans file (%s) not found. Run corresponding nnUNet_plan_experiment " \
"first." % plans_file
plans = load_json(plans_file)
plans_manager = PlansManager(plans)
configuration_manager = plans_manager.get_configuration(configuration_name)
if self.verbose:
print(f'Preprocessing the following configuration: {configuration_name}')
if self.verbose:
print(configuration_manager)
dataset_json_file = join(nnUNet_preprocessed, dataset_name, 'dataset.json')
dataset_json = load_json(dataset_json_file)
output_directory = join(nnUNet_preprocessed, dataset_name, configuration_manager.data_identifier)
if isdir(output_directory):
shutil.rmtree(output_directory)
maybe_mkdir_p(output_directory)
dataset = get_filenames_of_train_images_and_targets(join(nnUNet_raw, dataset_name), dataset_json)
# identifiers = [os.path.basename(i[:-len(dataset_json['file_ending'])]) for i in seg_fnames]
# output_filenames_truncated = [join(output_directory, i) for i in identifiers]
# multiprocessing magic.
r = []
with multiprocessing.get_context("spawn").Pool(num_processes) as p:
for k in dataset.keys():
r.append(p.starmap_async(self.run_case_save,
((join(output_directory, k), dataset[k]['images'], dataset[k]['label'],
plans_manager, configuration_manager,
dataset_json),)))
remaining = list(range(len(dataset)))
# p is pretty nifti. If we kill workers they just respawn but don't do any work.
# So we need to store the original pool of workers.
workers = [j for j in p._pool]
with tqdm(desc=None, total=len(dataset), disable=self.verbose) as pbar:
while len(remaining) > 0:
all_alive = all([j.is_alive() for j in workers])
if not all_alive:
raise RuntimeError('Some background worker is 6 feet under. Yuck. \n'
'OK jokes aside.\n'
'One of your background processes is missing. This could be because of '
'an error (look for an error message) or because it was killed '
'by your OS due to running out of RAM. If you don\'t see '
'an error message, out of RAM is likely the problem. In that case '
'reducing the number of workers might help')
done = [i for i in remaining if r[i].ready()]
for _ in done:
pbar.update()
remaining = [i for i in remaining if i not in done]
sleep(0.1)
def modify_seg_fn(self, seg: np.ndarray, plans_manager: PlansManager, dataset_json: dict,
configuration_manager: ConfigurationManager) -> np.ndarray:
# this function will be called at the end of self.run_case. Can be used to change the segmentation
# after resampling. Useful for experimenting with sparse annotations: I can introduce sparsity after resampling
# and don't have to create a new dataset each time I modify my experiments
return seg
def example_test_case_preprocessing():
# (paths to files may need adaptations)
plans_file = '/home/isensee/drives/gpu_data/nnUNet_preprocessed/Dataset219_AMOS2022_postChallenge_task2/nnUNetPlans.json'
dataset_json_file = '/home/isensee/drives/gpu_data/nnUNet_preprocessed/Dataset219_AMOS2022_postChallenge_task2/dataset.json'
input_images = ['/home/isensee/drives/e132-rohdaten/nnUNetv2/Dataset219_AMOS2022_postChallenge_task2/imagesTr/amos_0600_0000.nii.gz', ] # if you only have one channel, you still need a list: ['case000_0000.nii.gz']
configuration = '3d_fullres'
pp = DefaultPreprocessor()
# _ because this position would be the segmentation if seg_file was not None (training case)
# even if you have the segmentation, don't put the file there! You should always evaluate in the original
# resolution. What comes out of the preprocessor might have been resampled to some other image resolution (as
# specified by plans)
plans_manager = PlansManager(plans_file)
data, _, properties = pp.run_case(input_images, seg_file=None, plans_manager=plans_manager,
configuration_manager=plans_manager.get_configuration(configuration),
dataset_json=dataset_json_file)
# voila. Now plug data into your prediction function of choice. We of course recommend nnU-Net's default (TODO)
return data
if __name__ == '__main__':
example_test_case_preprocessing()
# pp = DefaultPreprocessor()
# pp.run(2, '2d', 'nnUNetPlans', 8)
###########################################################################################################
# how to process a test cases? This is an example:
# example_test_case_preprocessing()
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/resampling/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/resampling/default_resampling.py
================================================
from collections import OrderedDict
from typing import Union, Tuple, List
import numpy as np
import pandas as pd
import torch
from batchgenerators.augmentations.utils import resize_segmentation
from scipy.ndimage.interpolation import map_coordinates
from skimage.transform import resize
from nnunetv2.configuration import ANISO_THRESHOLD
def get_do_separate_z(spacing: Union[Tuple[float, ...], List[float], np.ndarray], anisotropy_threshold=ANISO_THRESHOLD):
do_separate_z = (np.max(spacing) / np.min(spacing)) > anisotropy_threshold
return do_separate_z
def get_lowres_axis(new_spacing: Union[Tuple[float, ...], List[float], np.ndarray]):
axis = np.where(max(new_spacing) / np.array(new_spacing) == 1)[0] # find which axis is anisotropic
return axis
def compute_new_shape(old_shape: Union[Tuple[int, ...], List[int], np.ndarray],
old_spacing: Union[Tuple[float, ...], List[float], np.ndarray],
new_spacing: Union[Tuple[float, ...], List[float], np.ndarray]) -> np.ndarray:
assert len(old_spacing) == len(old_shape)
assert len(old_shape) == len(new_spacing)
new_shape = np.array([int(round(i / j * k)) for i, j, k in zip(old_spacing, new_spacing, old_shape)])
return new_shape
def resample_data_or_seg_to_spacing(data: np.ndarray,
current_spacing: Union[Tuple[float, ...], List[float], np.ndarray],
new_spacing: Union[Tuple[float, ...], List[float], np.ndarray],
is_seg: bool = False,
order: int = 3, order_z: int = 0,
force_separate_z: Union[bool, None] = False,
separate_z_anisotropy_threshold: float = ANISO_THRESHOLD):
if force_separate_z is not None:
do_separate_z = force_separate_z
if force_separate_z:
axis = get_lowres_axis(current_spacing)
else:
axis = None
else:
if get_do_separate_z(current_spacing, separate_z_anisotropy_threshold):
do_separate_z = True
axis = get_lowres_axis(current_spacing)
elif get_do_separate_z(new_spacing, separate_z_anisotropy_threshold):
do_separate_z = True
axis = get_lowres_axis(new_spacing)
else:
do_separate_z = False
axis = None
if axis is not None:
if len(axis) == 3:
# every axis has the same spacing, this should never happen, why is this code here?
do_separate_z = False
elif len(axis) == 2:
# this happens for spacings like (0.24, 1.25, 1.25) for example. In that case we do not want to resample
# separately in the out of plane axis
do_separate_z = False
else:
pass
if data is not None:
assert data.ndim == 4, "data must be c x y z"
shape = np.array(data[0].shape)
new_shape = compute_new_shape(shape[1:], current_spacing, new_spacing)
data_reshaped = resample_data_or_seg(data, new_shape, is_seg, axis, order, do_separate_z, order_z=order_z)
return data_reshaped
def resample_data_or_seg_to_shape(data: Union[torch.Tensor, np.ndarray],
new_shape: Union[Tuple[int, ...], List[int], np.ndarray],
current_spacing: Union[Tuple[float, ...], List[float], np.ndarray],
new_spacing: Union[Tuple[float, ...], List[float], np.ndarray],
is_seg: bool = False,
order: int = 3, order_z: int = 0,
force_separate_z: Union[bool, None] = False,
separate_z_anisotropy_threshold: float = ANISO_THRESHOLD):
"""
needed for segmentation export. Stupid, I know. Maybe we can fix that with Leos new resampling functions
"""
if isinstance(data, torch.Tensor):
data = data.cpu().numpy()
if force_separate_z is not None:
do_separate_z = force_separate_z
if force_separate_z:
axis = get_lowres_axis(current_spacing)
else:
axis = None
else:
if get_do_separate_z(current_spacing, separate_z_anisotropy_threshold):
do_separate_z = True
axis = get_lowres_axis(current_spacing)
elif get_do_separate_z(new_spacing, separate_z_anisotropy_threshold):
do_separate_z = True
axis = get_lowres_axis(new_spacing)
else:
do_separate_z = False
axis = None
if axis is not None:
if len(axis) == 3:
# every axis has the same spacing, this should never happen, why is this code here?
do_separate_z = False
elif len(axis) == 2:
# this happens for spacings like (0.24, 1.25, 1.25) for example. In that case we do not want to resample
# separately in the out of plane axis
do_separate_z = False
else:
pass
if data is not None:
assert data.ndim == 4, "data must be c x y z"
data_reshaped = resample_data_or_seg(data, new_shape, is_seg, axis, order, do_separate_z, order_z=order_z)
return data_reshaped
def resample_data_or_seg(data: np.ndarray, new_shape: Union[Tuple[float, ...], List[float], np.ndarray],
is_seg: bool = False, axis: Union[None, int] = None, order: int = 3,
do_separate_z: bool = False, order_z: int = 0):
"""
separate_z=True will resample with order 0 along z
:param data:
:param new_shape:
:param is_seg:
:param axis:
:param order:
:param do_separate_z:
:param order_z: only applies if do_separate_z is True
:return:
"""
assert data.ndim == 4, "data must be (c, x, y, z)"
assert len(new_shape) == data.ndim - 1
if is_seg:
resize_fn = resize_segmentation
kwargs = OrderedDict()
else:
resize_fn = resize
kwargs = {'mode': 'edge', 'anti_aliasing': False}
dtype_data = data.dtype
shape = np.array(data[0].shape)
new_shape = np.array(new_shape)
if np.any(shape != new_shape):
data = data.astype(float)
if do_separate_z:
# print("separate z, order in z is", order_z, "order inplane is", order)
assert len(axis) == 1, "only one anisotropic axis supported"
axis = axis[0]
if axis == 0:
new_shape_2d = new_shape[1:]
elif axis == 1:
new_shape_2d = new_shape[[0, 2]]
else:
new_shape_2d = new_shape[:-1]
reshaped_final_data = []
for c in range(data.shape[0]):
reshaped_data = []
for slice_id in range(shape[axis]):
if axis == 0:
reshaped_data.append(resize_fn(data[c, slice_id], new_shape_2d, order, **kwargs))
elif axis == 1:
reshaped_data.append(resize_fn(data[c, :, slice_id], new_shape_2d, order, **kwargs))
else:
reshaped_data.append(resize_fn(data[c, :, :, slice_id], new_shape_2d, order, **kwargs))
reshaped_data = np.stack(reshaped_data, axis)
if shape[axis] != new_shape[axis]:
# The following few lines are blatantly copied and modified from sklearn's resize()
rows, cols, dim = new_shape[0], new_shape[1], new_shape[2]
orig_rows, orig_cols, orig_dim = reshaped_data.shape
row_scale = float(orig_rows) / rows
col_scale = float(orig_cols) / cols
dim_scale = float(orig_dim) / dim
map_rows, map_cols, map_dims = np.mgrid[:rows, :cols, :dim]
map_rows = row_scale * (map_rows + 0.5) - 0.5
map_cols = col_scale * (map_cols + 0.5) - 0.5
map_dims = dim_scale * (map_dims + 0.5) - 0.5
coord_map = np.array([map_rows, map_cols, map_dims])
if not is_seg or order_z == 0:
reshaped_final_data.append(map_coordinates(reshaped_data, coord_map, order=order_z,
mode='nearest')[None])
else:
unique_labels = np.sort(pd.unique(reshaped_data.ravel())) # np.unique(reshaped_data)
reshaped = np.zeros(new_shape, dtype=dtype_data)
for i, cl in enumerate(unique_labels):
reshaped_multihot = np.round(
map_coordinates((reshaped_data == cl).astype(float), coord_map, order=order_z,
mode='nearest'))
reshaped[reshaped_multihot > 0.5] = cl
reshaped_final_data.append(reshaped[None])
else:
reshaped_final_data.append(reshaped_data[None])
reshaped_final_data = np.vstack(reshaped_final_data)
else:
# print("no separate z, order", order)
reshaped = []
for c in range(data.shape[0]):
reshaped.append(resize_fn(data[c], new_shape, order, **kwargs)[None])
reshaped_final_data = np.vstack(reshaped)
return reshaped_final_data.astype(dtype_data)
else:
# print("no resampling necessary")
return data
================================================
FILE: Finetune/nnUNet/nnunetv2/preprocessing/resampling/utils.py
================================================
from typing import Callable
import nnunetv2
from batchgenerators.utilities.file_and_folder_operations import join
from nnunetv2.utilities.find_class_by_name import recursive_find_python_class
def recursive_find_resampling_fn_by_name(resampling_fn: str) -> Callable:
ret = recursive_find_python_class(join(nnunetv2.__path__[0], "preprocessing", "resampling"), resampling_fn,
'nnunetv2.preprocessing.resampling')
if ret is None:
raise RuntimeError("Unable to find resampling function named '%s'. Please make sure this fn is located in the "
"nnunetv2.preprocessing.resampling module." % resampling_fn)
else:
return ret
================================================
FILE: Finetune/nnUNet/nnunetv2/run/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/run/load_pretrained_weights.py
================================================
import torch
from torch._dynamo import OptimizedModule
from torch.nn.parallel import DistributedDataParallel as DDP
def load_pretrained_weights(network, fname, verbose=False):
"""
Transfers all weights between matching keys in state_dicts. matching is done by name and we only transfer if the
shape is also the same. Segmentation layers (the 1x1(x1) layers that produce the segmentation maps)
identified by keys ending with '.seg_layers') are not transferred!
If the pretrained weights were obtained with a training outside nnU-Net and DDP or torch.optimize was used,
you need to change the keys of the pretrained state_dict. DDP adds a 'module.' prefix and torch.optim adds
'_orig_mod'. You DO NOT need to worry about this if pretraining was done with nnU-Net as
nnUNetTrainer.save_checkpoint takes care of that!
"""
saved_model = torch.load(fname)
pretrained_dict = saved_model['network_weights']
skip_strings_in_pretrained = [
'.seg_layers.',
]
if isinstance(network, DDP):
mod = network.module
else:
mod = network
if isinstance(mod, OptimizedModule):
mod = mod._orig_mod
model_dict = mod.state_dict()
# verify that all but the segmentation layers have the same shape
for key, _ in model_dict.items():
if all([i not in key for i in skip_strings_in_pretrained]):
assert key in pretrained_dict, \
f"Key {key} is missing in the pretrained model weights. The pretrained weights do not seem to be " \
f"compatible with your network."
assert model_dict[key].shape == pretrained_dict[key].shape, \
f"The shape of the parameters of key {key} is not the same. Pretrained model: " \
f"{pretrained_dict[key].shape}; your network: {model_dict[key]}. The pretrained model " \
f"does not seem to be compatible with your network."
# fun fact: in principle this allows loading from parameters that do not cover the entire network. For example pretrained
# encoders. Not supported by this function though (see assertions above)
# commenting out this abomination of a dict comprehension for preservation in the archives of 'what not to do'
# pretrained_dict = {'module.' + k if is_ddp else k: v
# for k, v in pretrained_dict.items()
# if (('module.' + k if is_ddp else k) in model_dict) and
# all([i not in k for i in skip_strings_in_pretrained])}
pretrained_dict = {k: v for k, v in pretrained_dict.items()
if k in model_dict.keys() and all([i not in k for i in skip_strings_in_pretrained])}
model_dict.update(pretrained_dict)
print("################### Loading pretrained weights from file ", fname, '###################')
if verbose:
print("Below is the list of overlapping blocks in pretrained model and nnUNet architecture:")
for key, value in pretrained_dict.items():
print(key, 'shape', value.shape)
print("################### Done ###################")
mod.load_state_dict(model_dict)
================================================
FILE: Finetune/nnUNet/nnunetv2/run/run_training.py
================================================
import os
import socket
from typing import Union, Optional
import nnunetv2
import torch.cuda
import torch.distributed as dist
import torch.multiprocessing as mp
from batchgenerators.utilities.file_and_folder_operations import join, isfile, load_json
from nnunetv2.paths import nnUNet_preprocessed
from nnunetv2.run.load_pretrained_weights import load_pretrained_weights
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.utilities.find_class_by_name import recursive_find_python_class
from torch.backends import cudnn
def find_free_network_port() -> int:
"""Finds a free port on localhost.
It is useful in single-node training when we don't want to connect to a real main node but have to set the
`MASTER_PORT` environment variable.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("", 0))
port = s.getsockname()[1]
s.close()
return port
def get_trainer_from_args(dataset_name_or_id: Union[int, str],
configuration: str,
fold: int,
trainer_name: str = 'nnUNetTrainer',
plans_identifier: str = 'nnUNetPlans',
use_compressed: bool = False,
device: torch.device = torch.device('cuda')):
# load nnunet class and do sanity checks
nnunet_trainer = recursive_find_python_class(join(nnunetv2.__path__[0], "training", "nnUNetTrainer"),
trainer_name, 'nnunetv2.training.nnUNetTrainer')
if nnunet_trainer is None:
raise RuntimeError(f'Could not find requested nnunet trainer {trainer_name} in '
f'nnunetv2.training.nnUNetTrainer ('
f'{join(nnunetv2.__path__[0], "training", "nnUNetTrainer")}). If it is located somewhere '
f'else, please move it there.')
assert issubclass(nnunet_trainer, nnUNetTrainer), 'The requested nnunet trainer class must inherit from ' \
'nnUNetTrainer'
# handle dataset input. If it's an ID we need to convert to int from string
if dataset_name_or_id.startswith('Dataset'):
pass
else:
try:
dataset_name_or_id = int(dataset_name_or_id)
except ValueError:
raise ValueError(f'dataset_name_or_id must either be an integer or a valid dataset name with the pattern '
f'DatasetXXX_YYY where XXX are the three(!) task ID digits. Your '
f'input: {dataset_name_or_id}')
# initialize nnunet trainer
preprocessed_dataset_folder_base = join(nnUNet_preprocessed, maybe_convert_to_dataset_name(dataset_name_or_id))
plans_file = join(preprocessed_dataset_folder_base, plans_identifier + '.json')
plans = load_json(plans_file)
dataset_json = load_json(join(preprocessed_dataset_folder_base, 'dataset.json'))
nnunet_trainer = nnunet_trainer(plans=plans, configuration=configuration, fold=fold,
dataset_json=dataset_json, unpack_dataset=not use_compressed, device=device)
return nnunet_trainer
def maybe_load_checkpoint(nnunet_trainer: nnUNetTrainer, continue_training: bool, validation_only: bool,
pretrained_weights_file: str = None):
if continue_training and pretrained_weights_file is not None:
raise RuntimeError('Cannot both continue a training AND load pretrained weights. Pretrained weights can only '
'be used at the beginning of the training.')
if continue_training:
expected_checkpoint_file = join(nnunet_trainer.output_folder, 'checkpoint_final.pth')
if not isfile(expected_checkpoint_file):
expected_checkpoint_file = join(nnunet_trainer.output_folder, 'checkpoint_latest.pth')
# special case where --c is used to run a previously aborted validation
if not isfile(expected_checkpoint_file):
expected_checkpoint_file = join(nnunet_trainer.output_folder, 'checkpoint_best.pth')
if not isfile(expected_checkpoint_file):
print(f"WARNING: Cannot continue training because there seems to be no checkpoint available to "
f"continue from. Starting a new training...")
expected_checkpoint_file = None
elif validation_only:
expected_checkpoint_file = join(nnunet_trainer.output_folder, 'checkpoint_final.pth')
if not isfile(expected_checkpoint_file):
raise RuntimeError(f"Cannot run validation because the training is not finished yet!")
else:
if pretrained_weights_file is not None:
if not nnunet_trainer.was_initialized:
nnunet_trainer.initialize()
load_pretrained_weights(nnunet_trainer.network, pretrained_weights_file, verbose=True)
expected_checkpoint_file = None
if expected_checkpoint_file is not None:
nnunet_trainer.load_checkpoint(expected_checkpoint_file)
def setup_ddp(rank, world_size):
# initialize the process group
dist.init_process_group("nccl", rank=rank, world_size=world_size)
def cleanup_ddp():
dist.destroy_process_group()
def run_ddp(rank, dataset_name_or_id, configuration, fold, tr, p, use_compressed, disable_checkpointing, c, val,
pretrained_weights, npz, val_with_best, world_size):
setup_ddp(rank, world_size)
torch.cuda.set_device(torch.device('cuda', dist.get_rank()))
nnunet_trainer = get_trainer_from_args(dataset_name_or_id, configuration, fold, tr, p,
use_compressed)
if disable_checkpointing:
nnunet_trainer.disable_checkpointing = disable_checkpointing
assert not (c and val), f'Cannot set --c and --val flag at the same time. Dummy.'
maybe_load_checkpoint(nnunet_trainer, c, val, pretrained_weights)
if torch.cuda.is_available():
cudnn.deterministic = False
cudnn.benchmark = True
if not val:
nnunet_trainer.run_training()
if val_with_best:
nnunet_trainer.load_checkpoint(join(nnunet_trainer.output_folder, 'checkpoint_best.pth'))
nnunet_trainer.perform_actual_validation(npz)
cleanup_ddp()
def run_training(dataset_name_or_id: Union[str, int],
configuration: str, fold: Union[int, str],
trainer_class_name: str = 'nnUNetTrainer',
plans_identifier: str = 'nnUNetPlans',
pretrained_weights: Optional[str] = None,
num_gpus: int = 1,
use_compressed_data: bool = False,
export_validation_probabilities: bool = False,
continue_training: bool = False,
only_run_validation: bool = False,
disable_checkpointing: bool = False,
val_with_best: bool = False,
device: torch.device = torch.device('cuda')):
if isinstance(fold, str):
if fold != 'all':
try:
fold = int(fold)
except ValueError as e:
print(f'Unable to convert given value for fold to int: {fold}. fold must bei either "all" or an integer!')
raise e
if val_with_best:
assert not disable_checkpointing, '--val_best is not compatible with --disable_checkpointing'
if num_gpus > 1:
assert device.type == 'cuda', f"DDP training (triggered by num_gpus > 1) is only implemented for cuda devices. Your device: {device}"
os.environ['MASTER_ADDR'] = 'localhost'
if 'MASTER_PORT' not in os.environ.keys():
port = str(find_free_network_port())
print(f"using port {port}")
os.environ['MASTER_PORT'] = port # str(port)
mp.spawn(run_ddp,
args=(
dataset_name_or_id,
configuration,
fold,
trainer_class_name,
plans_identifier,
use_compressed_data,
disable_checkpointing,
continue_training,
only_run_validation,
pretrained_weights,
export_validation_probabilities,
val_with_best,
num_gpus),
nprocs=num_gpus,
join=True)
else:
nnunet_trainer = get_trainer_from_args(dataset_name_or_id, configuration, fold, trainer_class_name,
plans_identifier, use_compressed_data, device=device)
if disable_checkpointing:
nnunet_trainer.disable_checkpointing = disable_checkpointing
assert not (continue_training and only_run_validation), f'Cannot set --c and --val flag at the same time. Dummy.'
maybe_load_checkpoint(nnunet_trainer, continue_training, only_run_validation, pretrained_weights)
if torch.cuda.is_available():
cudnn.deterministic = False
cudnn.benchmark = True
if not only_run_validation:
nnunet_trainer.run_training()
if val_with_best:
nnunet_trainer.load_checkpoint(join(nnunet_trainer.output_folder, 'checkpoint_best.pth'))
nnunet_trainer.perform_actual_validation(export_validation_probabilities)
def run_training_entry():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('dataset_name_or_id', type=str,
help="Dataset name or ID to train with")
parser.add_argument('configuration', type=str,
help="Configuration that should be trained")
parser.add_argument('fold', type=str,
help='Fold of the 5-fold cross-validation. Should be an int between 0 and 4.')
parser.add_argument('-tr', type=str, required=False, default='nnUNetTrainer',
help='[OPTIONAL] Use this flag to specify a custom trainer. Default: nnUNetTrainer')
parser.add_argument('-p', type=str, required=False, default='nnUNetPlans',
help='[OPTIONAL] Use this flag to specify a custom plans identifier. Default: nnUNetPlans')
parser.add_argument('-pretrained_weights', type=str, required=False, default=None,
help='[OPTIONAL] path to nnU-Net checkpoint file to be used as pretrained model. Will only '
'be used when actually training. Beta. Use with caution.')
parser.add_argument('-num_gpus', type=int, default=1, required=False,
help='Specify the number of GPUs to use for training')
parser.add_argument("--use_compressed", default=False, action="store_true", required=False,
help="[OPTIONAL] If you set this flag the training cases will not be decompressed. Reading compressed "
"data is much more CPU and (potentially) RAM intensive and should only be used if you "
"know what you are doing")
parser.add_argument('--npz', action='store_true', required=False,
help='[OPTIONAL] Save softmax predictions from final validation as npz files (in addition to predicted '
'segmentations). Needed for finding the best ensemble.')
parser.add_argument('--c', action='store_true', required=False,
help='[OPTIONAL] Continue training from latest checkpoint')
parser.add_argument('--val', action='store_true', required=False,
help='[OPTIONAL] Set this flag to only run the validation. Requires training to have finished.')
parser.add_argument('--val_best', action='store_true', required=False,
help='[OPTIONAL] If set, the validation will be performed with the checkpoint_best instead '
'of checkpoint_final. NOT COMPATIBLE with --disable_checkpointing! '
'WARNING: This will use the same \'validation\' folder as the regular validation '
'with no way of distinguishing the two!')
parser.add_argument('--disable_checkpointing', action='store_true', required=False,
help='[OPTIONAL] Set this flag to disable checkpointing. Ideal for testing things out and '
'you dont want to flood your hard drive with checkpoints.')
parser.add_argument('-device', type=str, default='cuda', required=False,
help="Use this to set the device the training should run with. Available options are 'cuda' "
"(GPU), 'cpu' (CPU) and 'mps' (Apple M1/M2). Do NOT use this to set which GPU ID! "
"Use CUDA_VISIBLE_DEVICES=X nnUNetv2_train [...] instead!")
args = parser.parse_args()
assert args.device in ['cpu', 'cuda', 'mps'], f'-device must be either cpu, mps or cuda. Other devices are not tested/supported. Got: {args.device}.'
if args.device == 'cpu':
# let's allow torch to use hella threads
import multiprocessing
torch.set_num_threads(multiprocessing.cpu_count())
device = torch.device('cpu')
elif args.device == 'cuda':
# multithreading in torch doesn't help nnU-Net if run on GPU
torch.set_num_threads(1)
torch.set_num_interop_threads(1)
device = torch.device('cuda')
else:
device = torch.device('mps')
run_training(args.dataset_name_or_id, args.configuration, args.fold, args.tr, args.p, args.pretrained_weights,
args.num_gpus, args.use_compressed, args.npz, args.c, args.val, args.disable_checkpointing, args.val_best,
device=device)
if __name__ == '__main__':
run_training_entry()
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/integration_tests/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/integration_tests/add_lowres_and_cascade.py
================================================
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.paths import nnUNet_preprocessed
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-d', nargs='+', type=int, help='List of dataset ids')
args = parser.parse_args()
for d in args.d:
dataset_name = maybe_convert_to_dataset_name(d)
plans = load_json(join(nnUNet_preprocessed, dataset_name, 'nnUNetPlans.json'))
plans['configurations']['3d_lowres'] = {
"data_identifier": "nnUNetPlans_3d_lowres", # do not be a dumbo and forget this. I was a dumbo. And I paid dearly with ~10 min debugging time
'inherits_from': '3d_fullres',
"patch_size": [20, 28, 20],
"median_image_size_in_voxels": [18.0, 25.0, 18.0],
"spacing": [2.0, 2.0, 2.0],
"n_conv_per_stage_encoder": [2, 2, 2],
"n_conv_per_stage_decoder": [2, 2],
"num_pool_per_axis": [2, 2, 2],
"pool_op_kernel_sizes": [[1, 1, 1], [2, 2, 2], [2, 2, 2]],
"conv_kernel_sizes": [[3, 3, 3], [3, 3, 3], [3, 3, 3]],
"next_stage": "3d_cascade_fullres"
}
plans['configurations']['3d_cascade_fullres'] = {
'inherits_from': '3d_fullres',
"previous_stage": "3d_lowres"
}
save_json(plans, join(nnUNet_preprocessed, dataset_name, 'nnUNetPlans.json'), sort_keys=False)
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/integration_tests/cleanup_integration_test.py
================================================
import shutil
from batchgenerators.utilities.file_and_folder_operations import isdir, join
from nnunetv2.paths import nnUNet_raw, nnUNet_results, nnUNet_preprocessed
if __name__ == '__main__':
# deletes everything!
dataset_names = [
'Dataset996_IntegrationTest_Hippocampus_regions_ignore',
'Dataset997_IntegrationTest_Hippocampus_regions',
'Dataset998_IntegrationTest_Hippocampus_ignore',
'Dataset999_IntegrationTest_Hippocampus',
]
for fld in [nnUNet_raw, nnUNet_preprocessed, nnUNet_results]:
for d in dataset_names:
if isdir(join(fld, d)):
shutil.rmtree(join(fld, d))
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/integration_tests/lsf_commands.sh
================================================
bsub -q gpu.legacy -gpu num=1:j_exclusive=yes:gmem=1G -L /bin/bash ". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test.sh 996"
bsub -q gpu.legacy -gpu num=1:j_exclusive=yes:gmem=1G -L /bin/bash ". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test.sh 997"
bsub -q gpu.legacy -gpu num=1:j_exclusive=yes:gmem=1G -L /bin/bash ". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test.sh 998"
bsub -q gpu.legacy -gpu num=1:j_exclusive=yes:gmem=1G -L /bin/bash ". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test.sh 999"
bsub -q gpu.legacy -gpu num=2:j_exclusive=yes:gmem=1G -L /bin/bash ". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh 996"
bsub -q gpu.legacy -gpu num=2:j_exclusive=yes:gmem=1G -L /bin/bash ". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh 997"
bsub -q gpu.legacy -gpu num=2:j_exclusive=yes:gmem=1G -L /bin/bash ". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh 998"
bsub -q gpu.legacy -gpu num=2:j_exclusive=yes:gmem=1G -L /bin/bash ". /home/isensee/load_env_cluster4.sh && cd /home/isensee/git_repos/nnunet_remake && export nnUNet_keep_files_open=True && . nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh 999"
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/integration_tests/prepare_integration_tests.sh
================================================
# assumes you are in the nnunet repo!
# prepare raw datasets
python nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset999_IntegrationTest_Hippocampus.py
python nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset998_IntegrationTest_Hippocampus_ignore.py
python nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset997_IntegrationTest_Hippocampus_regions.py
python nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset996_IntegrationTest_Hippocampus_regions_ignore.py
# now run experiment planning without preprocessing
nnUNetv2_plan_and_preprocess -d 996 997 998 999 --no_pp
# now add 3d lowres and cascade
python nnunetv2/tests/integration_tests/add_lowres_and_cascade.py -d 996 997 998 999
# now preprocess everything
nnUNetv2_preprocess -d 996 997 998 999 -c 2d 3d_lowres 3d_fullres -np 8 8 8 # no need to preprocess cascade as its the same data as 3d_fullres
# done
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/integration_tests/readme.md
================================================
# Preface
I am just a mortal with many tasks and limited time. Aint nobody got time for unittests.
HOWEVER, at least some integration tests should be performed testing nnU-Net from start to finish.
# Introduction - What the heck is happening?
This test covers all possible labeling scenarios (standard labels, regions, ignore labels and regions with
ignore labels). It runs the entire nnU-Net pipeline from start to finish:
- fingerprint extraction
- experiment planning
- preprocessing
- train all 4 configurations (2d, 3d_lowres, 3d_fullres, 3d_cascade_fullres) as 5-fold CV
- automatically find the best model or ensemble
- determine the postprocessing used for this
- predict some test set
- apply postprocessing to the test set
To speed things up, we do the following:
- pick Dataset004_Hippocampus because it is quadratisch praktisch gut. MNIST of medical image segmentation
- by default this dataset does not have 3d_lowres or cascade. We just manually add them (cool new feature, eh?). See `add_lowres_and_cascade.py` to learn more!
- we use nnUNetTrainer_5epochs for a short training
# How to run it?
Set your pwd to be the nnunet repo folder (the one where the `nnunetv2` folder and the `setup.py` are located!)
Now generate the 4 dummy datasets (ids 996, 997, 998, 999) from dataset 4. This will crash if you don't have Dataset004!
```commandline
bash nnunetv2/tests/integration_tests/prepare_integration_tests.sh
```
Now you can run the integration test for each of the datasets:
```commandline
bash nnunetv2/tests/integration_tests/run_integration_test.sh DATSET_ID
```
use DATSET_ID 996, 997, 998 and 999. You can run these independently on different GPUs/systems to speed things up.
This will take i dunno like 10-30 Minutes!?
Also run
```commandline
bash nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh DATSET_ID
```
to verify DDP is working (needs 2 GPUs!)
# How to check if the test was successful?
If I was not as lazy as I am I would have programmed some automatism that checks if Dice scores etc are in an acceptable range.
So you need to do the following:
1) check that none of your runs crashed (duh)
2) for each run, navigate to `nnUNet_results/DATASET_NAME` and take a look at the `inference_information.json` file.
Does it make sense? If so: NICE!
Once the integration test is completed you can delete all the temporary files associated with it by running:
```commandline
python nnunetv2/tests/integration_tests/cleanup_integration_test.py
```
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/integration_tests/run_integration_test.sh
================================================
nnUNetv2_train $1 3d_fullres 0 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_fullres 1 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_fullres 2 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_fullres 3 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_fullres 4 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 2d 0 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 2d 1 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 2d 2 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 2d 3 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 2d 4 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_lowres 0 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_lowres 1 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_lowres 2 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_lowres 3 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_lowres 4 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_cascade_fullres 0 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_cascade_fullres 1 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_cascade_fullres 2 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_cascade_fullres 3 -tr nnUNetTrainer_5epochs --npz
nnUNetv2_train $1 3d_cascade_fullres 4 -tr nnUNetTrainer_5epochs --npz
python nnunetv2/tests/integration_tests/run_integration_test_bestconfig_inference.py -d $1
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/integration_tests/run_integration_test_bestconfig_inference.py
================================================
import argparse
import torch
from batchgenerators.utilities.file_and_folder_operations import join, load_pickle
from nnunetv2.ensembling.ensemble import ensemble_folders
from nnunetv2.evaluation.find_best_configuration import find_best_configuration, \
dumb_trainer_config_plans_to_trained_models_dict
from nnunetv2.inference.predict_from_raw_data import nnUNetPredictor
from nnunetv2.paths import nnUNet_raw, nnUNet_results
from nnunetv2.postprocessing.remove_connected_components import apply_postprocessing_to_folder
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.utilities.file_path_utilities import get_output_folder
if __name__ == '__main__':
"""
Predicts the imagesTs folder with the best configuration and applies postprocessing
"""
torch.set_num_threads(1)
torch.set_num_interop_threads(1)
parser = argparse.ArgumentParser()
parser.add_argument('-d', type=int, help='dataset id')
args = parser.parse_args()
d = args.d
dataset_name = maybe_convert_to_dataset_name(d)
source_dir = join(nnUNet_raw, dataset_name, 'imagesTs')
target_dir_base = join(nnUNet_results, dataset_name)
models = dumb_trainer_config_plans_to_trained_models_dict(['nnUNetTrainer_5epochs'],
['2d',
'3d_lowres',
'3d_cascade_fullres',
'3d_fullres'],
['nnUNetPlans'])
ret = find_best_configuration(d, models, allow_ensembling=True, num_processes=8, overwrite=True,
folds=(0, 1, 2, 3, 4), strict=True)
has_ensemble = len(ret['best_model_or_ensemble']['selected_model_or_models']) > 1
# we don't use all folds to speed stuff up
used_folds = (0, 3)
output_folders = []
for im in ret['best_model_or_ensemble']['selected_model_or_models']:
output_dir = join(target_dir_base, f"pred_{im['configuration']}")
model_folder = get_output_folder(d, im['trainer'], im['plans_identifier'], im['configuration'])
# note that if the best model is the enseble of 3d_lowres and 3d cascade then 3d_lowres will be predicted
# twice (once standalone and once to generate the predictions for the cascade) because we don't reuse the
# prediction here. Proper way would be to check for that and
# then give the output of 3d_lowres inference to the folder_with_segs_from_prev_stage kwarg in
# predict_from_raw_data. Since we allow for
# dynamically setting 'previous_stage' in the plans I am too lazy to implement this here. This is just an
# integration test after all. Take a closer look at how this in handled in predict_from_raw_data
predictor = nnUNetPredictor(verbose=False, allow_tqdm=False)
predictor.initialize_from_trained_model_folder(model_folder, used_folds)
predictor.predict_from_files(source_dir, output_dir, has_ensemble, overwrite=True)
# predict_from_raw_data(list_of_lists_or_source_folder=source_dir, output_folder=output_dir,
# model_training_output_dir=model_folder, use_folds=used_folds,
# save_probabilities=has_ensemble, verbose=False, overwrite=True)
output_folders.append(output_dir)
# if we have an ensemble, we need to ensemble the results
if has_ensemble:
ensemble_folders(output_folders, join(target_dir_base, 'ensemble_predictions'), save_merged_probabilities=False)
folder_for_pp = join(target_dir_base, 'ensemble_predictions')
else:
folder_for_pp = output_folders[0]
# apply postprocessing
pp_fns, pp_fn_kwargs = load_pickle(ret['best_model_or_ensemble']['postprocessing_file'])
apply_postprocessing_to_folder(folder_for_pp, join(target_dir_base, 'ensemble_predictions_postprocessed'),
pp_fns,
pp_fn_kwargs, plans_file_or_dict=ret['best_model_or_ensemble']['some_plans_file'])
================================================
FILE: Finetune/nnUNet/nnunetv2/tests/integration_tests/run_integration_test_trainingOnly_DDP.sh
================================================
nnUNetv2_train $1 3d_fullres 0 -tr nnUNetTrainer_10epochs -num_gpus 2
================================================
FILE: Finetune/nnUNet/nnunetv2/training/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/compute_initial_patch_size.py
================================================
import numpy as np
def get_patch_size(final_patch_size, rot_x, rot_y, rot_z, scale_range):
if isinstance(rot_x, (tuple, list)):
rot_x = max(np.abs(rot_x))
if isinstance(rot_y, (tuple, list)):
rot_y = max(np.abs(rot_y))
if isinstance(rot_z, (tuple, list)):
rot_z = max(np.abs(rot_z))
rot_x = min(90 / 360 * 2. * np.pi, rot_x)
rot_y = min(90 / 360 * 2. * np.pi, rot_y)
rot_z = min(90 / 360 * 2. * np.pi, rot_z)
from batchgenerators.augmentations.utils import rotate_coords_3d, rotate_coords_2d
coords = np.array(final_patch_size)
final_shape = np.copy(coords)
if len(coords) == 3:
final_shape = np.max(np.vstack((np.abs(rotate_coords_3d(coords, rot_x, 0, 0)), final_shape)), 0)
final_shape = np.max(np.vstack((np.abs(rotate_coords_3d(coords, 0, rot_y, 0)), final_shape)), 0)
final_shape = np.max(np.vstack((np.abs(rotate_coords_3d(coords, 0, 0, rot_z)), final_shape)), 0)
elif len(coords) == 2:
final_shape = np.max(np.vstack((np.abs(rotate_coords_2d(coords, rot_x)), final_shape)), 0)
final_shape /= min(scale_range)
return final_shape.astype(int)
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/cascade_transforms.py
================================================
from typing import Union, List, Tuple, Callable
import numpy as np
from acvl_utils.morphology.morphology_helper import label_with_component_sizes
from batchgenerators.transforms.abstract_transforms import AbstractTransform
from skimage.morphology import ball
from skimage.morphology.binary import binary_erosion, binary_dilation, binary_closing, binary_opening
class MoveSegAsOneHotToData(AbstractTransform):
def __init__(self, index_in_origin: int, all_labels: Union[Tuple[int, ...], List[int]],
key_origin="seg", key_target="data", remove_from_origin=True):
"""
Takes data_dict[seg][:, index_in_origin], converts it to one hot encoding and appends it to
data_dict[key_target]. Optionally removes index_in_origin from data_dict[seg].
"""
self.remove_from_origin = remove_from_origin
self.all_labels = all_labels
self.key_target = key_target
self.key_origin = key_origin
self.index_in_origin = index_in_origin
def __call__(self, **data_dict):
seg = data_dict[self.key_origin][:, self.index_in_origin:self.index_in_origin+1]
seg_onehot = np.zeros((seg.shape[0], len(self.all_labels), *seg.shape[2:]),
dtype=data_dict[self.key_target].dtype)
for i, l in enumerate(self.all_labels):
seg_onehot[:, i][seg[:, 0] == l] = 1
data_dict[self.key_target] = np.concatenate((data_dict[self.key_target], seg_onehot), 1)
if self.remove_from_origin:
remaining_channels = [i for i in range(data_dict[self.key_origin].shape[1]) if i != self.index_in_origin]
data_dict[self.key_origin] = data_dict[self.key_origin][:, remaining_channels]
return data_dict
class RemoveRandomConnectedComponentFromOneHotEncodingTransform(AbstractTransform):
def __init__(self, channel_idx: Union[int, List[int]], key: str = "data", p_per_sample: float = 0.2,
fill_with_other_class_p: float = 0.25,
dont_do_if_covers_more_than_x_percent: float = 0.25, p_per_label: float = 1):
"""
Randomly removes connected components in the specified channel_idx of data_dict[key]. Only considers components
smaller than dont_do_if_covers_more_than_X_percent of the sample. Also has the option of simulating
misclassification as another class (fill_with_other_class_p)
"""
self.p_per_label = p_per_label
self.dont_do_if_covers_more_than_x_percent = dont_do_if_covers_more_than_x_percent
self.fill_with_other_class_p = fill_with_other_class_p
self.p_per_sample = p_per_sample
self.key = key
if not isinstance(channel_idx, (list, tuple)):
channel_idx = [channel_idx]
self.channel_idx = channel_idx
def __call__(self, **data_dict):
data = data_dict.get(self.key)
for b in range(data.shape[0]):
if np.random.uniform() < self.p_per_sample:
for c in self.channel_idx:
if np.random.uniform() < self.p_per_label:
# print(np.unique(data[b, c])) ## should be [0, 1]
workon = data[b, c].astype(bool)
if not np.any(workon):
continue
num_voxels = np.prod(workon.shape, dtype=np.uint64)
lab, component_sizes = label_with_component_sizes(workon.astype(bool))
if len(component_sizes) > 0:
valid_component_ids = [i for i, j in component_sizes.items() if j <
num_voxels*self.dont_do_if_covers_more_than_x_percent]
# print('RemoveRandomConnectedComponentFromOneHotEncodingTransform', c,
# np.unique(data[b, c]), len(component_sizes), valid_component_ids,
# len(valid_component_ids))
if len(valid_component_ids) > 0:
random_component = np.random.choice(valid_component_ids)
data[b, c][lab == random_component] = 0
if np.random.uniform() < self.fill_with_other_class_p:
other_ch = [i for i in self.channel_idx if i != c]
if len(other_ch) > 0:
other_class = np.random.choice(other_ch)
data[b, other_class][lab == random_component] = 1
data_dict[self.key] = data
return data_dict
class ApplyRandomBinaryOperatorTransform(AbstractTransform):
def __init__(self,
channel_idx: Union[int, List[int], Tuple[int, ...]],
p_per_sample: float = 0.3,
any_of_these: Tuple[Callable] = (binary_dilation, binary_erosion, binary_closing, binary_opening),
key: str = "data",
strel_size: Tuple[int, int] = (1, 10),
p_per_label: float = 1):
"""
Applies random binary operations (specified by any_of_these) with random ball size (radius is uniformly sampled
from interval strel_size) to specified channels. Expects the channel_idx to correspond to a hone hot encoded
segmentation (see for example MoveSegAsOneHotToData)
"""
self.p_per_label = p_per_label
self.strel_size = strel_size
self.key = key
self.any_of_these = any_of_these
self.p_per_sample = p_per_sample
if not isinstance(channel_idx, (list, tuple)):
channel_idx = [channel_idx]
self.channel_idx = channel_idx
def __call__(self, **data_dict):
for b in range(data_dict[self.key].shape[0]):
if np.random.uniform() < self.p_per_sample:
# this needs to be applied in random order to the channels
np.random.shuffle(self.channel_idx)
for c in self.channel_idx:
if np.random.uniform() < self.p_per_label:
operation = np.random.choice(self.any_of_these)
selem = ball(np.random.uniform(*self.strel_size))
workon = data_dict[self.key][b, c].astype(bool)
if not np.any(workon):
continue
# print(np.unique(workon))
res = operation(workon, selem).astype(data_dict[self.key].dtype)
# print('ApplyRandomBinaryOperatorTransform', c, operation, np.sum(workon), np.sum(res))
data_dict[self.key][b, c] = res
# if class was added, we need to remove it in ALL other channels to keep one hot encoding
# properties
other_ch = [i for i in self.channel_idx if i != c]
if len(other_ch) > 0:
was_added_mask = (res - workon) > 0
for oc in other_ch:
data_dict[self.key][b, oc][was_added_mask] = 0
# if class was removed, leave it at background
return data_dict
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/deep_supervision_donwsampling.py
================================================
from typing import Tuple, Union, List
from batchgenerators.augmentations.utils import resize_segmentation
from batchgenerators.transforms.abstract_transforms import AbstractTransform
import numpy as np
class DownsampleSegForDSTransform2(AbstractTransform):
'''
data_dict['output_key'] will be a list of segmentations scaled according to ds_scales
'''
def __init__(self, ds_scales: Union[List, Tuple],
order: int = 0, input_key: str = "seg",
output_key: str = "seg", axes: Tuple[int] = None):
"""
Downscales data_dict[input_key] according to ds_scales. Each entry in ds_scales specified one deep supervision
output and its resolution relative to the original data, for example 0.25 specifies 1/4 of the original shape.
ds_scales can also be a tuple of tuples, for example ((1, 1, 1), (0.5, 0.5, 0.5)) to specify the downsampling
for each axis independently
"""
self.axes = axes
self.output_key = output_key
self.input_key = input_key
self.order = order
self.ds_scales = ds_scales
def __call__(self, **data_dict):
if self.axes is None:
axes = list(range(2, data_dict[self.input_key].ndim))
else:
axes = self.axes
output = []
for s in self.ds_scales:
if not isinstance(s, (tuple, list)):
s = [s] * len(axes)
else:
assert len(s) == len(axes), f'If ds_scales is a tuple for each resolution (one downsampling factor ' \
f'for each axis) then the number of entried in that tuple (here ' \
f'{len(s)}) must be the same as the number of axes (here {len(axes)}).'
if all([i == 1 for i in s]):
output.append(data_dict[self.input_key])
else:
new_shape = np.array(data_dict[self.input_key].shape).astype(float)
for i, a in enumerate(axes):
new_shape[a] *= s[i]
new_shape = np.round(new_shape).astype(int)
out_seg = np.zeros(new_shape, dtype=data_dict[self.input_key].dtype)
for b in range(data_dict[self.input_key].shape[0]):
for c in range(data_dict[self.input_key].shape[1]):
out_seg[b, c] = resize_segmentation(data_dict[self.input_key][b, c], new_shape[2:], self.order)
output.append(out_seg)
data_dict[self.output_key] = output
return data_dict
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/limited_length_multithreaded_augmenter.py
================================================
from batchgenerators.dataloading.nondet_multi_threaded_augmenter import NonDetMultiThreadedAugmenter
class LimitedLenWrapper(NonDetMultiThreadedAugmenter):
def __init__(self, my_imaginary_length, *args, **kwargs):
super().__init__(*args, **kwargs)
self.len = my_imaginary_length
def __len__(self):
return self.len
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/manipulating_data_dict.py
================================================
from batchgenerators.transforms.abstract_transforms import AbstractTransform
class RemoveKeyTransform(AbstractTransform):
def __init__(self, key_to_remove: str):
self.key_to_remove = key_to_remove
def __call__(self, **data_dict):
_ = data_dict.pop(self.key_to_remove, None)
return data_dict
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/masking.py
================================================
from typing import List
from batchgenerators.transforms.abstract_transforms import AbstractTransform
class MaskTransform(AbstractTransform):
def __init__(self, apply_to_channels: List[int], mask_idx_in_seg: int = 0, set_outside_to: int = 0,
data_key: str = "data", seg_key: str = "seg"):
"""
Sets everything outside the mask to 0. CAREFUL! outside is defined as < 0, not =0 (in the Mask)!!!
"""
self.apply_to_channels = apply_to_channels
self.seg_key = seg_key
self.data_key = data_key
self.set_outside_to = set_outside_to
self.mask_idx_in_seg = mask_idx_in_seg
def __call__(self, **data_dict):
mask = data_dict[self.seg_key][:, self.mask_idx_in_seg] < 0
for c in self.apply_to_channels:
data_dict[self.data_key][:, c][mask] = self.set_outside_to
return data_dict
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/region_based_training.py
================================================
from typing import List, Tuple, Union
from batchgenerators.transforms.abstract_transforms import AbstractTransform
import numpy as np
class ConvertSegmentationToRegionsTransform(AbstractTransform):
def __init__(self, regions: Union[List, Tuple],
seg_key: str = "seg", output_key: str = "seg", seg_channel: int = 0):
"""
regions are tuple of tuples where each inner tuple holds the class indices that are merged into one region,
example:
regions= ((1, 2), (2, )) will result in 2 regions: one covering the region of labels 1&2 and the other just 2
:param regions:
:param seg_key:
:param output_key:
"""
self.seg_channel = seg_channel
self.output_key = output_key
self.seg_key = seg_key
self.regions = regions
def __call__(self, **data_dict):
seg = data_dict.get(self.seg_key)
num_regions = len(self.regions)
if seg is not None:
seg_shp = seg.shape
output_shape = list(seg_shp)
output_shape[1] = num_regions
region_output = np.zeros(output_shape, dtype=seg.dtype)
for b in range(seg_shp[0]):
for region_id, region_source_labels in enumerate(self.regions):
if not isinstance(region_source_labels, (list, tuple)):
region_source_labels = (region_source_labels, )
for label_value in region_source_labels:
region_output[b, region_id][seg[b, self.seg_channel] == label_value] = 1
data_dict[self.output_key] = region_output
return data_dict
================================================
FILE: Finetune/nnUNet/nnunetv2/training/data_augmentation/custom_transforms/transforms_for_dummy_2d.py
================================================
from typing import Tuple, Union, List
from batchgenerators.transforms.abstract_transforms import AbstractTransform
class Convert3DTo2DTransform(AbstractTransform):
def __init__(self, apply_to_keys: Union[List[str], Tuple[str]] = ('data', 'seg')):
"""
Transforms a 5D array (b, c, x, y, z) to a 4D array (b, c * x, y, z) by overloading the color channel
"""
self.apply_to_keys = apply_to_keys
def __call__(self, **data_dict):
for k in self.apply_to_keys:
shp = data_dict[k].shape
assert len(shp) == 5, 'This transform only works on 3D data, so expects 5D tensor (b, c, x, y, z) as input.'
data_dict[k] = data_dict[k].reshape((shp[0], shp[1] * shp[2], shp[3], shp[4]))
shape_key = f'orig_shape_{k}'
assert shape_key not in data_dict.keys(), f'Convert3DTo2DTransform needs to store the original shape. ' \
f'It does that using the {shape_key} key. That key is ' \
f'already taken. Bummer.'
data_dict[shape_key] = shp
return data_dict
class Convert2DTo3DTransform(AbstractTransform):
def __init__(self, apply_to_keys: Union[List[str], Tuple[str]] = ('data', 'seg')):
"""
Reverts Convert3DTo2DTransform by transforming a 4D array (b, c * x, y, z) back to 5D (b, c, x, y, z)
"""
self.apply_to_keys = apply_to_keys
def __call__(self, **data_dict):
for k in self.apply_to_keys:
shape_key = f'orig_shape_{k}'
assert shape_key in data_dict.keys(), f'Did not find key {shape_key} in data_dict. Shitty. ' \
f'Convert2DTo3DTransform only works in tandem with ' \
f'Convert3DTo2DTransform and you probably forgot to add ' \
f'Convert3DTo2DTransform to your pipeline. (Convert3DTo2DTransform ' \
f'is where the missing key is generated)'
original_shape = data_dict[shape_key]
current_shape = data_dict[k].shape
data_dict[k] = data_dict[k].reshape((original_shape[0], original_shape[1], original_shape[2],
current_shape[-2], current_shape[-1]))
return data_dict
================================================
FILE: Finetune/nnUNet/nnunetv2/training/dataloading/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/dataloading/base_data_loader.py
================================================
from typing import Union, Tuple
from batchgenerators.dataloading.data_loader import DataLoader
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.training.dataloading.nnunet_dataset import nnUNetDataset
from nnunetv2.utilities.label_handling.label_handling import LabelManager
class nnUNetDataLoaderBase(DataLoader):
def __init__(self,
data: nnUNetDataset,
batch_size: int,
patch_size: Union[List[int], Tuple[int, ...], np.ndarray],
final_patch_size: Union[List[int], Tuple[int, ...], np.ndarray],
label_manager: LabelManager,
oversample_foreground_percent: float = 0.0,
sampling_probabilities: Union[List[int], Tuple[int, ...], np.ndarray] = None,
pad_sides: Union[List[int], Tuple[int, ...], np.ndarray] = None,
probabilistic_oversampling: bool = False):
super().__init__(data, batch_size, 1, None, True, False, True, sampling_probabilities)
assert isinstance(data, nnUNetDataset), 'nnUNetDataLoaderBase only supports dictionaries as data'
self.indices = list(data.keys())
self.oversample_foreground_percent = oversample_foreground_percent
self.final_patch_size = final_patch_size
self.patch_size = patch_size
self.list_of_keys = list(self._data.keys())
# need_to_pad denotes by how much we need to pad the data so that if we sample a patch of size final_patch_size
# (which is what the network will get) these patches will also cover the border of the images
self.need_to_pad = (np.array(patch_size) - np.array(final_patch_size)).astype(int)
if pad_sides is not None:
if not isinstance(pad_sides, np.ndarray):
pad_sides = np.array(pad_sides)
self.need_to_pad += pad_sides
self.num_channels = None
self.pad_sides = pad_sides
self.data_shape, self.seg_shape = self.determine_shapes()
self.sampling_probabilities = sampling_probabilities
self.annotated_classes_key = tuple(label_manager.all_labels)
self.has_ignore = label_manager.has_ignore_label
self.get_do_oversample = self._oversample_last_XX_percent if not probabilistic_oversampling \
else self._probabilistic_oversampling
def _oversample_last_XX_percent(self, sample_idx: int) -> bool:
"""
determines whether sample sample_idx in a minibatch needs to be guaranteed foreground
"""
return not sample_idx < round(self.batch_size * (1 - self.oversample_foreground_percent))
def _probabilistic_oversampling(self, sample_idx: int) -> bool:
# print('YEAH BOIIIIII')
return np.random.uniform() < self.oversample_foreground_percent
def determine_shapes(self):
# load one case
data, seg, properties = self._data.load_case(self.indices[0])
num_color_channels = data.shape[0]
data_shape = (self.batch_size, num_color_channels, *self.patch_size)
seg_shape = (self.batch_size, seg.shape[0], *self.patch_size)
return data_shape, seg_shape
def get_bbox(self, data_shape: np.ndarray, force_fg: bool, class_locations: Union[dict, None],
overwrite_class: Union[int, Tuple[int, ...]] = None, verbose: bool = False):
# in dataloader 2d we need to select the slice prior to this and also modify the class_locations to only have
# locations for the given slice
need_to_pad = self.need_to_pad.copy()
dim = len(data_shape)
for d in range(dim):
# if case_all_data.shape + need_to_pad is still < patch size we need to pad more! We pad on both sides
# always
if need_to_pad[d] + data_shape[d] < self.patch_size[d]:
need_to_pad[d] = self.patch_size[d] - data_shape[d]
# we can now choose the bbox from -need_to_pad // 2 to shape - patch_size + need_to_pad // 2. Here we
# define what the upper and lower bound can be to then sample form them with np.random.randint
lbs = [- need_to_pad[i] // 2 for i in range(dim)]
ubs = [data_shape[i] + need_to_pad[i] // 2 + need_to_pad[i] % 2 - self.patch_size[i] for i in range(dim)]
# if not force_fg then we can just sample the bbox randomly from lb and ub. Else we need to make sure we get
# at least one of the foreground classes in the patch
if not force_fg and not self.has_ignore:
bbox_lbs = [np.random.randint(lbs[i], ubs[i] + 1) for i in range(dim)]
# print('I want a random location')
else:
if not force_fg and self.has_ignore:
selected_class = self.annotated_classes_key
if len(class_locations[selected_class]) == 0:
# no annotated pixels in this case. Not good. But we can hardly skip it here
print('Warning! No annotated pixels in image!')
selected_class = None
# print(f'I have ignore labels and want to pick a labeled area. annotated_classes_key: {self.annotated_classes_key}')
elif force_fg:
assert class_locations is not None, 'if force_fg is set class_locations cannot be None'
if overwrite_class is not None:
assert overwrite_class in class_locations.keys(), 'desired class ("overwrite_class") does not ' \
'have class_locations (missing key)'
# this saves us a np.unique. Preprocessing already did that for all cases. Neat.
# class_locations keys can also be tuple
eligible_classes_or_regions = [i for i in class_locations.keys() if len(class_locations[i]) > 0]
# if we have annotated_classes_key locations and other classes are present, remove the annotated_classes_key from the list
# strange formulation needed to circumvent
# ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
tmp = [i == self.annotated_classes_key if isinstance(i, tuple) else False for i in eligible_classes_or_regions]
if any(tmp):
if len(eligible_classes_or_regions) > 1:
eligible_classes_or_regions.pop(np.where(tmp)[0][0])
if len(eligible_classes_or_regions) == 0:
# this only happens if some image does not contain foreground voxels at all
selected_class = None
if verbose:
print('case does not contain any foreground classes')
else:
# I hate myself. Future me aint gonna be happy to read this
# 2022_11_25: had to read it today. Wasn't too bad
selected_class = eligible_classes_or_regions[np.random.choice(len(eligible_classes_or_regions))] if \
(overwrite_class is None or (overwrite_class not in eligible_classes_or_regions)) else overwrite_class
# print(f'I want to have foreground, selected class: {selected_class}')
else:
raise RuntimeError('lol what!?')
voxels_of_that_class = class_locations[selected_class] if selected_class is not None else None
if voxels_of_that_class is not None and len(voxels_of_that_class) > 0:
selected_voxel = voxels_of_that_class[np.random.choice(len(voxels_of_that_class))]
# selected voxel is center voxel. Subtract half the patch size to get lower bbox voxel.
# Make sure it is within the bounds of lb and ub
# i + 1 because we have first dimension 0!
bbox_lbs = [max(lbs[i], selected_voxel[i + 1] - self.patch_size[i] // 2) for i in range(dim)]
else:
# If the image does not contain any foreground classes, we fall back to random cropping
bbox_lbs = [np.random.randint(lbs[i], ubs[i] + 1) for i in range(dim)]
bbox_ubs = [bbox_lbs[i] + self.patch_size[i] for i in range(dim)]
return bbox_lbs, bbox_ubs
================================================
FILE: Finetune/nnUNet/nnunetv2/training/dataloading/data_loader_2d.py
================================================
import numpy as np
from nnunetv2.training.dataloading.base_data_loader import nnUNetDataLoaderBase
from nnunetv2.training.dataloading.nnunet_dataset import nnUNetDataset
class nnUNetDataLoader2D(nnUNetDataLoaderBase):
def generate_train_batch(self):
selected_keys = self.get_indices()
# preallocate memory for data and seg
data_all = np.zeros(self.data_shape, dtype=np.float32)
seg_all = np.zeros(self.seg_shape, dtype=np.int16)
case_properties = []
for j, current_key in enumerate(selected_keys):
# oversampling foreground will improve stability of model training, especially if many patches are empty
# (Lung for example)
force_fg = self.get_do_oversample(j)
data, seg, properties = self._data.load_case(current_key)
case_properties.append(properties)
# select a class/region first, then a slice where this class is present, then crop to that area
if not force_fg:
if self.has_ignore:
selected_class_or_region = self.annotated_classes_key
else:
selected_class_or_region = None
else:
# filter out all classes that are not present here
eligible_classes_or_regions = [i for i in properties['class_locations'].keys() if len(properties['class_locations'][i]) > 0]
# if we have annotated_classes_key locations and other classes are present, remove the annotated_classes_key from the list
# strange formulation needed to circumvent
# ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
tmp = [i == self.annotated_classes_key if isinstance(i, tuple) else False for i in eligible_classes_or_regions]
if any(tmp):
if len(eligible_classes_or_regions) > 1:
eligible_classes_or_regions.pop(np.where(tmp)[0][0])
selected_class_or_region = eligible_classes_or_regions[np.random.choice(len(eligible_classes_or_regions))] if \
len(eligible_classes_or_regions) > 0 else None
if selected_class_or_region is not None:
selected_slice = np.random.choice(properties['class_locations'][selected_class_or_region][:, 1])
else:
selected_slice = np.random.choice(len(data[0]))
data = data[:, selected_slice]
seg = seg[:, selected_slice]
# the line of death lol
# this needs to be a separate variable because we could otherwise permanently overwrite
# properties['class_locations']
# selected_class_or_region is:
# - None if we do not have an ignore label and force_fg is False OR if force_fg is True but there is no foreground in the image
# - A tuple of all (non-ignore) labels if there is an ignore label and force_fg is False
# - a class or region if force_fg is True
class_locations = {
selected_class_or_region: properties['class_locations'][selected_class_or_region][properties['class_locations'][selected_class_or_region][:, 1] == selected_slice][:, (0, 2, 3)]
} if (selected_class_or_region is not None) else None
# print(properties)
shape = data.shape[1:]
dim = len(shape)
bbox_lbs, bbox_ubs = self.get_bbox(shape, force_fg if selected_class_or_region is not None else None,
class_locations, overwrite_class=selected_class_or_region)
# whoever wrote this knew what he was doing (hint: it was me). We first crop the data to the region of the
# bbox that actually lies within the data. This will result in a smaller array which is then faster to pad.
# valid_bbox is just the coord that lied within the data cube. It will be padded to match the patch size
# later
valid_bbox_lbs = [max(0, bbox_lbs[i]) for i in range(dim)]
valid_bbox_ubs = [min(shape[i], bbox_ubs[i]) for i in range(dim)]
# At this point you might ask yourself why we would treat seg differently from seg_from_previous_stage.
# Why not just concatenate them here and forget about the if statements? Well that's because segneeds to
# be padded with -1 constant whereas seg_from_previous_stage needs to be padded with 0s (we could also
# remove label -1 in the data augmentation but this way it is less error prone)
this_slice = tuple([slice(0, data.shape[0])] + [slice(i, j) for i, j in zip(valid_bbox_lbs, valid_bbox_ubs)])
data = data[this_slice]
this_slice = tuple([slice(0, seg.shape[0])] + [slice(i, j) for i, j in zip(valid_bbox_lbs, valid_bbox_ubs)])
seg = seg[this_slice]
padding = [(-min(0, bbox_lbs[i]), max(bbox_ubs[i] - shape[i], 0)) for i in range(dim)]
data_all[j] = np.pad(data, ((0, 0), *padding), 'constant', constant_values=0)
seg_all[j] = np.pad(seg, ((0, 0), *padding), 'constant', constant_values=-1)
return {'data': data_all, 'seg': seg_all, 'properties': case_properties, 'keys': selected_keys}
if __name__ == '__main__':
folder = '/media/fabian/data/nnUNet_preprocessed/Dataset004_Hippocampus/2d'
ds = nnUNetDataset(folder, None, 1000) # this should not load the properties!
dl = nnUNetDataLoader2D(ds, 366, (65, 65), (56, 40), 0.33, None, None)
a = next(dl)
================================================
FILE: Finetune/nnUNet/nnunetv2/training/dataloading/data_loader_3d.py
================================================
import numpy as np
from nnunetv2.training.dataloading.base_data_loader import nnUNetDataLoaderBase
from nnunetv2.training.dataloading.nnunet_dataset import nnUNetDataset
class nnUNetDataLoader3D(nnUNetDataLoaderBase):
def generate_train_batch(self):
selected_keys = self.get_indices()
# preallocate memory for data and seg
data_all = np.zeros(self.data_shape, dtype=np.float32)
seg_all = np.zeros(self.seg_shape, dtype=np.int16)
case_properties = []
for j, i in enumerate(selected_keys):
# oversampling foreground will improve stability of model training, especially if many patches are empty
# (Lung for example)
force_fg = self.get_do_oversample(j)
data, seg, properties = self._data.load_case(i)
case_properties.append(properties)
# If we are doing the cascade then the segmentation from the previous stage will already have been loaded by
# self._data.load_case(i) (see nnUNetDataset.load_case)
shape = data.shape[1:]
dim = len(shape)
bbox_lbs, bbox_ubs = self.get_bbox(shape, force_fg, properties['class_locations'])
# whoever wrote this knew what he was doing (hint: it was me). We first crop the data to the region of the
# bbox that actually lies within the data. This will result in a smaller array which is then faster to pad.
# valid_bbox is just the coord that lied within the data cube. It will be padded to match the patch size
# later
valid_bbox_lbs = [max(0, bbox_lbs[i]) for i in range(dim)]
valid_bbox_ubs = [min(shape[i], bbox_ubs[i]) for i in range(dim)]
# At this point you might ask yourself why we would treat seg differently from seg_from_previous_stage.
# Why not just concatenate them here and forget about the if statements? Well that's because segneeds to
# be padded with -1 constant whereas seg_from_previous_stage needs to be padded with 0s (we could also
# remove label -1 in the data augmentation but this way it is less error prone)
this_slice = tuple([slice(0, data.shape[0])] + [slice(i, j) for i, j in zip(valid_bbox_lbs, valid_bbox_ubs)])
data = data[this_slice]
this_slice = tuple([slice(0, seg.shape[0])] + [slice(i, j) for i, j in zip(valid_bbox_lbs, valid_bbox_ubs)])
seg = seg[this_slice]
padding = [(-min(0, bbox_lbs[i]), max(bbox_ubs[i] - shape[i], 0)) for i in range(dim)]
data_all[j] = np.pad(data, ((0, 0), *padding), 'constant', constant_values=0)
seg_all[j] = np.pad(seg, ((0, 0), *padding), 'constant', constant_values=-1)
return {'data': data_all, 'seg': seg_all, 'properties': case_properties, 'keys': selected_keys}
if __name__ == '__main__':
folder = '/media/fabian/data/nnUNet_preprocessed/Dataset002_Heart/3d_fullres'
ds = nnUNetDataset(folder, 0) # this should not load the properties!
dl = nnUNetDataLoader3D(ds, 5, (16, 16, 16), (16, 16, 16), 0.33, None, None)
a = next(dl)
================================================
FILE: Finetune/nnUNet/nnunetv2/training/dataloading/nnunet_dataset.py
================================================
import os
from typing import List
import numpy as np
import shutil
from batchgenerators.utilities.file_and_folder_operations import join, load_pickle, isfile
from nnunetv2.training.dataloading.utils import get_case_identifiers
class nnUNetDataset(object):
def __init__(self, folder: str, case_identifiers: List[str] = None,
num_images_properties_loading_threshold: int = 0,
folder_with_segs_from_previous_stage: str = None):
"""
This does not actually load the dataset. It merely creates a dictionary where the keys are training case names and
the values are dictionaries containing the relevant information for that case.
dataset[training_case] -> info
Info has the following key:value pairs:
- dataset[case_identifier]['properties']['data_file'] -> the full path to the npz file associated with the training case
- dataset[case_identifier]['properties']['properties_file'] -> the pkl file containing the case properties
In addition, if the total number of cases is < num_images_properties_loading_threshold we load all the pickle files
(containing auxiliary information). This is done for small datasets so that we don't spend too much CPU time on
reading pkl files on the fly during training. However, for large datasets storing all the aux info (which also
contains locations of foreground voxels in the images) can cause too much RAM utilization. In that
case is it better to load on the fly.
If properties are loaded into the RAM, the info dicts each will have an additional entry:
- dataset[case_identifier]['properties'] -> pkl file content
IMPORTANT! THIS CLASS ITSELF IS READ-ONLY. YOU CANNOT ADD KEY:VALUE PAIRS WITH nnUNetDataset[key] = value
USE THIS INSTEAD:
nnUNetDataset.dataset[key] = value
(not sure why you'd want to do that though. So don't do it)
"""
super().__init__()
# print('loading dataset')
if case_identifiers is None:
case_identifiers = get_case_identifiers(folder)
case_identifiers.sort()
self.dataset = {}
for c in case_identifiers:
self.dataset[c] = {}
self.dataset[c]['data_file'] = join(folder, f"{c}.npz")
self.dataset[c]['properties_file'] = join(folder, f"{c}.pkl")
if folder_with_segs_from_previous_stage is not None:
self.dataset[c]['seg_from_prev_stage_file'] = join(folder_with_segs_from_previous_stage, f"{c}.npz")
if len(case_identifiers) <= num_images_properties_loading_threshold:
for i in self.dataset.keys():
self.dataset[i]['properties'] = load_pickle(self.dataset[i]['properties_file'])
self.keep_files_open = ('nnUNet_keep_files_open' in os.environ.keys()) and \
(os.environ['nnUNet_keep_files_open'].lower() in ('true', '1', 't'))
# print(f'nnUNetDataset.keep_files_open: {self.keep_files_open}')
def __getitem__(self, key):
ret = {**self.dataset[key]}
if 'properties' not in ret.keys():
ret['properties'] = load_pickle(ret['properties_file'])
return ret
def __setitem__(self, key, value):
return self.dataset.__setitem__(key, value)
def keys(self):
return self.dataset.keys()
def __len__(self):
return self.dataset.__len__()
def items(self):
return self.dataset.items()
def values(self):
return self.dataset.values()
def load_case(self, key):
entry = self[key]
if 'open_data_file' in entry.keys():
data = entry['open_data_file']
# print('using open data file')
elif isfile(entry['data_file'][:-4] + ".npy"):
data = np.load(entry['data_file'][:-4] + ".npy", 'r')
if self.keep_files_open:
self.dataset[key]['open_data_file'] = data
# print('saving open data file')
else:
data = np.load(entry['data_file'])['data']
if 'open_seg_file' in entry.keys():
seg = entry['open_seg_file']
# print('using open data file')
elif isfile(entry['data_file'][:-4] + "_seg.npy"):
seg = np.load(entry['data_file'][:-4] + "_seg.npy", 'r')
if self.keep_files_open:
self.dataset[key]['open_seg_file'] = seg
# print('saving open seg file')
else:
seg = np.load(entry['data_file'])['seg']
if 'seg_from_prev_stage_file' in entry.keys():
if isfile(entry['seg_from_prev_stage_file'][:-4] + ".npy"):
seg_prev = np.load(entry['seg_from_prev_stage_file'][:-4] + ".npy", 'r')
else:
seg_prev = np.load(entry['seg_from_prev_stage_file'])['seg']
seg = np.vstack((seg, seg_prev[None]))
return data, seg, entry['properties']
if __name__ == '__main__':
# this is a mini test. Todo: We can move this to tests in the future (requires simulated dataset)
folder = '/media/fabian/data/nnUNet_preprocessed/Dataset003_Liver/3d_lowres'
ds = nnUNetDataset(folder, num_images_properties_loading_threshold=0) # this should not load the properties!
# this SHOULD HAVE the properties
ks = ds['liver_0'].keys()
assert 'properties' in ks
# amazing. I am the best.
# this should have the properties
ds = nnUNetDataset(folder, num_images_properties_loading_threshold=1000)
# now rename the properties file so that it does not exist anymore
shutil.move(join(folder, 'liver_0.pkl'), join(folder, 'liver_XXX.pkl'))
# now we should still be able to access the properties because they have already been loaded
ks = ds['liver_0'].keys()
assert 'properties' in ks
# move file back
shutil.move(join(folder, 'liver_XXX.pkl'), join(folder, 'liver_0.pkl'))
# this should not have the properties
ds = nnUNetDataset(folder, num_images_properties_loading_threshold=0)
# now rename the properties file so that it does not exist anymore
shutil.move(join(folder, 'liver_0.pkl'), join(folder, 'liver_XXX.pkl'))
# now this should crash
try:
ks = ds['liver_0'].keys()
raise RuntimeError('we should not have come here')
except FileNotFoundError:
print('all good')
# move file back
shutil.move(join(folder, 'liver_XXX.pkl'), join(folder, 'liver_0.pkl'))
================================================
FILE: Finetune/nnUNet/nnunetv2/training/dataloading/utils.py
================================================
from __future__ import annotations
import multiprocessing
import os
from typing import List
from pathlib import Path
from warnings import warn
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import isfile, subfiles
from nnunetv2.configuration import default_num_processes
def find_broken_image_and_labels(
path_to_data_dir: str | Path,
) -> tuple[set[str], set[str]]:
"""
Iterates through all numpys and tries to read them once to see if a ValueError is raised.
If so, the case id is added to the respective set and returned for potential fixing.
:path_to_data_dir: Path/str to the preprocessed directory containing the npys and npzs.
:returns: Tuple of a set containing the case ids of the broken npy images and a set of the case ids of broken npy segmentations.
"""
content = os.listdir(path_to_data_dir)
unique_ids = [c[:-4] for c in content if c.endswith(".npz")]
failed_data_ids = set()
failed_seg_ids = set()
for unique_id in unique_ids:
# Try reading data
try:
np.load(path_to_data_dir / (unique_id + ".npy"), "r")
except ValueError:
failed_data_ids.add(unique_id)
# Try reading seg
try:
np.load(path_to_data_dir / (unique_id + "_seg.npy"), "r")
except ValueError:
failed_seg_ids.add(unique_id)
return failed_data_ids, failed_seg_ids
def try_fix_broken_npy(path_do_data_dir: Path, case_ids: set[str], fix_image: bool):
"""
Receives broken case ids and tries to fix them by re-extracting the npz file (up to 5 times).
:param case_ids: Set of case ids that are broken.
:param path_do_data_dir: Path to the preprocessed directory containing the npys and npzs.
:raises ValueError: If the npy file could not be unpacked after 5 tries. --
"""
for case_id in case_ids:
for i in range(5):
try:
key = "data" if fix_image else "seg"
suffix = ".npy" if fix_image else "_seg.npy"
read_npz = np.load(path_do_data_dir / (case_id + ".npz"), "r")[key]
np.save(path_do_data_dir / (case_id + suffix), read_npz)
# Try loading the just saved image.
np.load(path_do_data_dir / (case_id + suffix), "r")
break
except ValueError:
if i == 4:
raise ValueError(
f"Could not unpack {case_id + suffix} after 5 tries!"
)
continue
def verify_or_stratify_npys(path_to_data_dir: str | Path) -> None:
"""
This re-reads the npy files after unpacking. Should there be a loading issue with any, it will try to unpack this file again and overwrites the existing.
If the new file does not get saved correctly 5 times, it will raise an error with the file name to the user. Does the same for images and segmentations.
:param path_to_data_dir: Path to the preprocessed directory containing the npys and npzs.
:raises ValueError: If the npy file could not be unpacked after 5 tries. --
Otherwise an obscured error will be raised later during training (depending when the broken file is sampled)
"""
path_to_data_dir = Path(path_to_data_dir)
# Check for broken image and segmentation npys
failed_data_ids, failed_seg_ids = find_broken_image_and_labels(path_to_data_dir)
if len(failed_data_ids) != 0 or len(failed_seg_ids) != 0:
warn(
f"Found {len(failed_data_ids)} faulty data npys and {len(failed_seg_ids)}!\n"
+ f"Faulty images: {failed_data_ids}; Faulty segmentations: {failed_seg_ids})\n"
+ "Trying to fix them now."
)
# Try to fix the broken npys by reextracting the npz. If that fails, raise error
try_fix_broken_npy(path_to_data_dir, failed_data_ids, fix_image=True)
try_fix_broken_npy(path_to_data_dir, failed_seg_ids, fix_image=False)
def _convert_to_npy(npz_file: str, unpack_segmentation: bool = True, overwrite_existing: bool = False) -> None:
try:
a = np.load(npz_file) # inexpensive, no compression is done here. This just reads metadata
if overwrite_existing or not isfile(npz_file[:-3] + "npy"):
np.save(npz_file[:-3] + "npy", a['data'])
if unpack_segmentation and (overwrite_existing or not isfile(npz_file[:-4] + "_seg.npy")):
np.save(npz_file[:-4] + "_seg.npy", a['seg'])
except KeyboardInterrupt:
if isfile(npz_file[:-3] + "npy"):
os.remove(npz_file[:-3] + "npy")
if isfile(npz_file[:-4] + "_seg.npy"):
os.remove(npz_file[:-4] + "_seg.npy")
raise KeyboardInterrupt
def unpack_dataset(folder: str, unpack_segmentation: bool = True, overwrite_existing: bool = False,
num_processes: int = default_num_processes):
"""
all npz files in this folder belong to the dataset, unpack them all
"""
with multiprocessing.get_context("spawn").Pool(num_processes) as p:
npz_files = subfiles(folder, True, None, ".npz", True)
p.starmap(_convert_to_npy, zip(npz_files,
[unpack_segmentation] * len(npz_files),
[overwrite_existing] * len(npz_files))
)
def get_case_identifiers(folder: str) -> List[str]:
"""
finds all npz files in the given folder and reconstructs the training case names from them
"""
case_identifiers = [i[:-4] for i in os.listdir(folder) if i.endswith("npz") and (i.find("segFromPrevStage") == -1)]
return case_identifiers
if __name__ == '__main__':
unpack_dataset('/media/fabian/data/nnUNet_preprocessed/Dataset002_Heart/2d')
================================================
FILE: Finetune/nnUNet/nnunetv2/training/logging/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/logging/nnunet_logger.py
================================================
import matplotlib
from batchgenerators.utilities.file_and_folder_operations import join
matplotlib.use('agg')
import seaborn as sns
import matplotlib.pyplot as plt
class nnUNetLogger(object):
"""
This class is really trivial. Don't expect cool functionality here. This is my makeshift solution to problems
arising from out-of-sync epoch numbers and numbers of logged loss values. It also simplifies the trainer class a
little
YOU MUST LOG EXACTLY ONE VALUE PER EPOCH FOR EACH OF THE LOGGING ITEMS! DONT FUCK IT UP
"""
def __init__(self, verbose: bool = False):
self.my_fantastic_logging = {
'mean_fg_dice': list(),
'ema_fg_dice': list(),
'dice_per_class_or_region': list(),
'train_losses': list(),
'val_losses': list(),
'lrs': list(),
'epoch_start_timestamps': list(),
'epoch_end_timestamps': list()
}
self.verbose = verbose
# shut up, this logging is great
def log(self, key, value, epoch: int):
"""
sometimes shit gets messed up. We try to catch that here
"""
assert key in self.my_fantastic_logging.keys() and isinstance(self.my_fantastic_logging[key], list), \
'This function is only intended to log stuff to lists and to have one entry per epoch'
if self.verbose: print(f'logging {key}: {value} for epoch {epoch}')
if len(self.my_fantastic_logging[key]) < (epoch + 1):
self.my_fantastic_logging[key].append(value)
else:
assert len(self.my_fantastic_logging[key]) == (epoch + 1), 'something went horribly wrong. My logging ' \
'lists length is off by more than 1'
print(f'maybe some logging issue!? logging {key} and {value}')
self.my_fantastic_logging[key][epoch] = value
# handle the ema_fg_dice special case! It is automatically logged when we add a new mean_fg_dice
if key == 'mean_fg_dice':
new_ema_pseudo_dice = self.my_fantastic_logging['ema_fg_dice'][epoch - 1] * 0.9 + 0.1 * value \
if len(self.my_fantastic_logging['ema_fg_dice']) > 0 else value
self.log('ema_fg_dice', new_ema_pseudo_dice, epoch)
def plot_progress_png(self, output_folder):
# we infer the epoch form our internal logging
epoch = min([len(i) for i in self.my_fantastic_logging.values()]) - 1 # lists of epoch 0 have len 1
sns.set(font_scale=2.5)
fig, ax_all = plt.subplots(3, 1, figsize=(30, 54))
# regular progress.png as we are used to from previous nnU-Net versions
ax = ax_all[0]
ax2 = ax.twinx()
x_values = list(range(epoch + 1))
ax.plot(x_values, self.my_fantastic_logging['train_losses'][:epoch + 1], color='b', ls='-', label="loss_tr", linewidth=4)
ax.plot(x_values, self.my_fantastic_logging['val_losses'][:epoch + 1], color='r', ls='-', label="loss_val", linewidth=4)
ax2.plot(x_values, self.my_fantastic_logging['mean_fg_dice'][:epoch + 1], color='g', ls='dotted', label="pseudo dice",
linewidth=3)
ax2.plot(x_values, self.my_fantastic_logging['ema_fg_dice'][:epoch + 1], color='g', ls='-', label="pseudo dice (mov. avg.)",
linewidth=4)
ax.set_xlabel("epoch")
ax.set_ylabel("loss")
ax2.set_ylabel("pseudo dice")
ax.legend(loc=(0, 1))
ax2.legend(loc=(0.2, 1))
# epoch times to see whether the training speed is consistent (inconsistent means there are other jobs
# clogging up the system)
ax = ax_all[1]
ax.plot(x_values, [i - j for i, j in zip(self.my_fantastic_logging['epoch_end_timestamps'][:epoch + 1],
self.my_fantastic_logging['epoch_start_timestamps'])][:epoch + 1], color='b',
ls='-', label="epoch duration", linewidth=4)
ylim = [0] + [ax.get_ylim()[1]]
ax.set(ylim=ylim)
ax.set_xlabel("epoch")
ax.set_ylabel("time [s]")
ax.legend(loc=(0, 1))
# learning rate
ax = ax_all[2]
ax.plot(x_values, self.my_fantastic_logging['lrs'][:epoch + 1], color='b', ls='-', label="learning rate", linewidth=4)
ax.set_xlabel("epoch")
ax.set_ylabel("learning rate")
ax.legend(loc=(0, 1))
plt.tight_layout()
fig.savefig(join(output_folder, "progress.png"))
plt.close()
def get_checkpoint(self):
return self.my_fantastic_logging
def load_checkpoint(self, checkpoint: dict):
self.my_fantastic_logging = checkpoint
================================================
FILE: Finetune/nnUNet/nnunetv2/training/loss/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/loss/compound_losses.py
================================================
import torch
from nnunetv2.training.loss.dice import SoftDiceLoss, MemoryEfficientSoftDiceLoss
from nnunetv2.training.loss.robust_ce_loss import RobustCrossEntropyLoss, TopKLoss
from nnunetv2.utilities.helpers import softmax_helper_dim1
from torch import nn
class DC_and_CE_loss(nn.Module):
def __init__(self, soft_dice_kwargs, ce_kwargs, weight_ce=1, weight_dice=1, ignore_label=None,
dice_class=SoftDiceLoss):
"""
Weights for CE and Dice do not need to sum to one. You can set whatever you want.
:param soft_dice_kwargs:
:param ce_kwargs:
:param aggregate:
:param square_dice:
:param weight_ce:
:param weight_dice:
"""
super(DC_and_CE_loss, self).__init__()
if ignore_label is not None:
ce_kwargs['ignore_index'] = ignore_label
self.weight_dice = weight_dice
self.weight_ce = weight_ce
self.ignore_label = ignore_label
self.ce = RobustCrossEntropyLoss(**ce_kwargs)
self.dc = dice_class(apply_nonlin=softmax_helper_dim1, **soft_dice_kwargs)
def forward(self, net_output: torch.Tensor, target: torch.Tensor):
"""
target must be b, c, x, y(, z) with c=1
:param net_output:
:param target:
:return:
"""
if self.ignore_label is not None:
assert target.shape[1] == 1, 'ignore label is not implemented for one hot encoded target variables ' \
'(DC_and_CE_loss)'
mask = target != self.ignore_label
# remove ignore label from target, replace with one of the known labels. It doesn't matter because we
# ignore gradients in those areas anyway
target_dice = torch.where(mask, target, 0)
num_fg = mask.sum()
else:
target_dice = target
mask = None
dc_loss = self.dc(net_output, target_dice, loss_mask=mask) \
if self.weight_dice != 0 else 0
ce_loss = self.ce(net_output, target[:, 0]) \
if self.weight_ce != 0 and (self.ignore_label is None or num_fg > 0) else 0
result = self.weight_ce * ce_loss + self.weight_dice * dc_loss
return result
class DC_and_BCE_loss(nn.Module):
def __init__(self, bce_kwargs, soft_dice_kwargs, weight_ce=1, weight_dice=1, use_ignore_label: bool = False,
dice_class=MemoryEfficientSoftDiceLoss):
"""
DO NOT APPLY NONLINEARITY IN YOUR NETWORK!
target mut be one hot encoded
IMPORTANT: We assume use_ignore_label is located in target[:, -1]!!!
:param soft_dice_kwargs:
:param bce_kwargs:
:param aggregate:
"""
super(DC_and_BCE_loss, self).__init__()
if use_ignore_label:
bce_kwargs['reduction'] = 'none'
self.weight_dice = weight_dice
self.weight_ce = weight_ce
self.use_ignore_label = use_ignore_label
self.ce = nn.BCEWithLogitsLoss(**bce_kwargs)
self.dc = dice_class(apply_nonlin=torch.sigmoid, **soft_dice_kwargs)
def forward(self, net_output: torch.Tensor, target: torch.Tensor):
if self.use_ignore_label:
# target is one hot encoded here. invert it so that it is True wherever we can compute the loss
mask = (1 - target[:, -1:]).bool()
# remove ignore channel now that we have the mask
target_regions = torch.clone(target[:, :-1])
else:
target_regions = target
mask = None
dc_loss = self.dc(net_output, target_regions, loss_mask=mask)
if mask is not None:
ce_loss = (self.ce(net_output, target_regions) * mask).sum() / torch.clip(mask.sum(), min=1e-8)
else:
ce_loss = self.ce(net_output, target_regions)
result = self.weight_ce * ce_loss + self.weight_dice * dc_loss
return result
class DC_and_topk_loss(nn.Module):
def __init__(self, soft_dice_kwargs, ce_kwargs, weight_ce=1, weight_dice=1, ignore_label=None):
"""
Weights for CE and Dice do not need to sum to one. You can set whatever you want.
:param soft_dice_kwargs:
:param ce_kwargs:
:param aggregate:
:param square_dice:
:param weight_ce:
:param weight_dice:
"""
super().__init__()
if ignore_label is not None:
ce_kwargs['ignore_index'] = ignore_label
self.weight_dice = weight_dice
self.weight_ce = weight_ce
self.ignore_label = ignore_label
self.ce = TopKLoss(**ce_kwargs)
self.dc = SoftDiceLoss(apply_nonlin=softmax_helper_dim1, **soft_dice_kwargs)
def forward(self, net_output: torch.Tensor, target: torch.Tensor):
"""
target must be b, c, x, y(, z) with c=1
:param net_output:
:param target:
:return:
"""
if self.ignore_label is not None:
assert target.shape[1] == 1, 'ignore label is not implemented for one hot encoded target variables ' \
'(DC_and_CE_loss)'
mask = (target != self.ignore_label).bool()
# remove ignore label from target, replace with one of the known labels. It doesn't matter because we
# ignore gradients in those areas anyway
target_dice = torch.clone(target)
target_dice[target == self.ignore_label] = 0
num_fg = mask.sum()
else:
target_dice = target
mask = None
dc_loss = self.dc(net_output, target_dice, loss_mask=mask) \
if self.weight_dice != 0 else 0
ce_loss = self.ce(net_output, target) \
if self.weight_ce != 0 and (self.ignore_label is None or num_fg > 0) else 0
result = self.weight_ce * ce_loss + self.weight_dice * dc_loss
return result
================================================
FILE: Finetune/nnUNet/nnunetv2/training/loss/deep_supervision.py
================================================
import torch
from torch import nn
class DeepSupervisionWrapper(nn.Module):
def __init__(self, loss, weight_factors=None):
"""
Wraps a loss function so that it can be applied to multiple outputs. Forward accepts an arbitrary number of
inputs. Each input is expected to be a tuple/list. Each tuple/list must have the same length. The loss is then
applied to each entry like this:
l = w0 * loss(input0[0], input1[0], ...) + w1 * loss(input0[1], input1[1], ...) + ...
If weights are None, all w will be 1.
"""
super(DeepSupervisionWrapper, self).__init__()
assert any([x != 0 for x in weight_factors]), "At least one weight factor should be != 0.0"
self.weight_factors = tuple(weight_factors)
self.loss = loss
def forward(self, *args):
assert all([isinstance(i, (tuple, list)) for i in args]), \
f"all args must be either tuple or list, got {[type(i) for i in args]}"
# we could check for equal lengths here as well, but we really shouldn't overdo it with checks because
# this code is executed a lot of times!
if self.weight_factors is None:
weights = (1, ) * len(args[0])
else:
weights = self.weight_factors
return sum([weights[i] * self.loss(*inputs) for i, inputs in enumerate(zip(*args)) if weights[i] != 0.0])
================================================
FILE: Finetune/nnUNet/nnunetv2/training/loss/dice.py
================================================
from typing import Callable
import torch
from nnunetv2.utilities.ddp_allgather import AllGatherGrad
from torch import nn
class SoftDiceLoss(nn.Module):
def __init__(self, apply_nonlin: Callable = None, batch_dice: bool = False, do_bg: bool = True, smooth: float = 1.,
ddp: bool = True, clip_tp: float = None):
"""
"""
super(SoftDiceLoss, self).__init__()
self.do_bg = do_bg
self.batch_dice = batch_dice
self.apply_nonlin = apply_nonlin
self.smooth = smooth
self.clip_tp = clip_tp
self.ddp = ddp
def forward(self, x, y, loss_mask=None):
shp_x = x.shape
if self.batch_dice:
axes = [0] + list(range(2, len(shp_x)))
else:
axes = list(range(2, len(shp_x)))
if self.apply_nonlin is not None:
x = self.apply_nonlin(x)
tp, fp, fn, _ = get_tp_fp_fn_tn(x, y, axes, loss_mask, False)
if self.ddp and self.batch_dice:
tp = AllGatherGrad.apply(tp).sum(0)
fp = AllGatherGrad.apply(fp).sum(0)
fn = AllGatherGrad.apply(fn).sum(0)
if self.clip_tp is not None:
tp = torch.clip(tp, min=self.clip_tp , max=None)
nominator = 2 * tp
denominator = 2 * tp + fp + fn
dc = (nominator + self.smooth) / (torch.clip(denominator + self.smooth, 1e-8))
if not self.do_bg:
if self.batch_dice:
dc = dc[1:]
else:
dc = dc[:, 1:]
dc = dc.mean()
return -dc
class MemoryEfficientSoftDiceLoss(nn.Module):
def __init__(self, apply_nonlin: Callable = None, batch_dice: bool = False, do_bg: bool = True, smooth: float = 1.,
ddp: bool = True):
"""
saves 1.6 GB on Dataset017 3d_lowres
"""
super(MemoryEfficientSoftDiceLoss, self).__init__()
self.do_bg = do_bg
self.batch_dice = batch_dice
self.apply_nonlin = apply_nonlin
self.smooth = smooth
self.ddp = ddp
def forward(self, x, y, loss_mask=None):
if self.apply_nonlin is not None:
x = self.apply_nonlin(x)
# make everything shape (b, c)
axes = tuple(range(2, x.ndim))
with torch.no_grad():
if x.ndim != y.ndim:
y = y.view((y.shape[0], 1, *y.shape[1:]))
if x.shape == y.shape:
# if this is the case then gt is probably already a one hot encoding
y_onehot = y
else:
y_onehot = torch.zeros(x.shape, device=x.device, dtype=torch.bool)
y_onehot.scatter_(1, y.long(), 1)
if not self.do_bg:
y_onehot = y_onehot[:, 1:]
sum_gt = y_onehot.sum(axes) if loss_mask is None else (y_onehot * loss_mask).sum(axes)
# this one MUST be outside the with torch.no_grad(): context. Otherwise no gradients for you
if not self.do_bg:
x = x[:, 1:]
if loss_mask is None:
intersect = (x * y_onehot).sum(axes)
sum_pred = x.sum(axes)
else:
intersect = (x * y_onehot * loss_mask).sum(axes)
sum_pred = (x * loss_mask).sum(axes)
if self.batch_dice:
if self.ddp:
intersect = AllGatherGrad.apply(intersect).sum(0)
sum_pred = AllGatherGrad.apply(sum_pred).sum(0)
sum_gt = AllGatherGrad.apply(sum_gt).sum(0)
intersect = intersect.sum(0)
sum_pred = sum_pred.sum(0)
sum_gt = sum_gt.sum(0)
dc = (2 * intersect + self.smooth) / (torch.clip(sum_gt + sum_pred + self.smooth, 1e-8))
dc = dc.mean()
return -dc
def get_tp_fp_fn_tn(net_output, gt, axes=None, mask=None, square=False):
"""
net_output must be (b, c, x, y(, z)))
gt must be a label map (shape (b, 1, x, y(, z)) OR shape (b, x, y(, z))) or one hot encoding (b, c, x, y(, z))
if mask is provided it must have shape (b, 1, x, y(, z)))
:param net_output:
:param gt:
:param axes: can be (, ) = no summation
:param mask: mask must be 1 for valid pixels and 0 for invalid pixels
:param square: if True then fp, tp and fn will be squared before summation
:return:
"""
if axes is None:
axes = tuple(range(2, net_output.ndim))
with torch.no_grad():
if net_output.ndim != gt.ndim:
gt = gt.view((gt.shape[0], 1, *gt.shape[1:]))
if net_output.shape == gt.shape:
# if this is the case then gt is probably already a one hot encoding
y_onehot = gt
else:
y_onehot = torch.zeros(net_output.shape, device=net_output.device)
y_onehot.scatter_(1, gt.long(), 1)
tp = net_output * y_onehot
fp = net_output * (1 - y_onehot)
fn = (1 - net_output) * y_onehot
tn = (1 - net_output) * (1 - y_onehot)
if mask is not None:
with torch.no_grad():
mask_here = torch.tile(mask, (1, tp.shape[1], *[1 for _ in range(2, tp.ndim)]))
tp *= mask_here
fp *= mask_here
fn *= mask_here
tn *= mask_here
# benchmark whether tiling the mask would be faster (torch.tile). It probably is for large batch sizes
# OK it barely makes a difference but the implementation above is a tiny bit faster + uses less vram
# (using nnUNetv2_train 998 3d_fullres 0)
# tp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(tp, dim=1)), dim=1)
# fp = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fp, dim=1)), dim=1)
# fn = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(fn, dim=1)), dim=1)
# tn = torch.stack(tuple(x_i * mask[:, 0] for x_i in torch.unbind(tn, dim=1)), dim=1)
if square:
tp = tp ** 2
fp = fp ** 2
fn = fn ** 2
tn = tn ** 2
if len(axes) > 0:
tp = tp.sum(dim=axes, keepdim=False)
fp = fp.sum(dim=axes, keepdim=False)
fn = fn.sum(dim=axes, keepdim=False)
tn = tn.sum(dim=axes, keepdim=False)
return tp, fp, fn, tn
if __name__ == '__main__':
from nnunetv2.utilities.helpers import softmax_helper_dim1
pred = torch.rand((2, 3, 32, 32, 32))
ref = torch.randint(0, 3, (2, 32, 32, 32))
dl_old = SoftDiceLoss(apply_nonlin=softmax_helper_dim1, batch_dice=True, do_bg=False, smooth=0, ddp=False)
dl_new = MemoryEfficientSoftDiceLoss(apply_nonlin=softmax_helper_dim1, batch_dice=True, do_bg=False, smooth=0, ddp=False)
res_old = dl_old(pred, ref)
res_new = dl_new(pred, ref)
print(res_old, res_new)
================================================
FILE: Finetune/nnUNet/nnunetv2/training/loss/robust_ce_loss.py
================================================
import torch
from torch import nn, Tensor
import numpy as np
class RobustCrossEntropyLoss(nn.CrossEntropyLoss):
"""
this is just a compatibility layer because my target tensor is float and has an extra dimension
input must be logits, not probabilities!
"""
def forward(self, input: Tensor, target: Tensor) -> Tensor:
if target.ndim == input.ndim:
assert target.shape[1] == 1
target = target[:, 0]
return super().forward(input, target.long())
class TopKLoss(RobustCrossEntropyLoss):
"""
input must be logits, not probabilities!
"""
def __init__(self, weight=None, ignore_index: int = -100, k: float = 10, label_smoothing: float = 0):
self.k = k
super(TopKLoss, self).__init__(weight, False, ignore_index, reduce=False, label_smoothing=label_smoothing)
def forward(self, inp, target):
target = target[:, 0].long()
res = super(TopKLoss, self).forward(inp, target)
num_voxels = np.prod(res.shape, dtype=np.int64)
res, _ = torch.topk(res.view((-1, )), int(num_voxels * self.k / 100), sorted=False)
return res.mean()
================================================
FILE: Finetune/nnUNet/nnunetv2/training/lr_scheduler/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/lr_scheduler/polylr.py
================================================
from torch.optim.lr_scheduler import _LRScheduler
class PolyLRScheduler(_LRScheduler):
def __init__(self, optimizer, initial_lr: float, max_steps: int, exponent: float = 0.9, current_step: int = None):
self.optimizer = optimizer
self.initial_lr = initial_lr
self.max_steps = max_steps
self.exponent = exponent
self.ctr = 0
super().__init__(optimizer, current_step if current_step is not None else -1, False)
def step(self, current_step=None):
if current_step is None or current_step == -1:
current_step = self.ctr
self.ctr += 1
new_lr = self.initial_lr * (1 - current_step / self.max_steps) ** self.exponent
for param_group in self.optimizer.param_groups:
param_group['lr'] = new_lr
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/nnUNetTrainer.py
================================================
import inspect
import multiprocessing
import os
import shutil
import sys
import warnings
from copy import deepcopy
from datetime import datetime
from time import time, sleep
from typing import Union, Tuple, List
import numpy as np
import torch
from batchgenerators.dataloading.single_threaded_augmenter import SingleThreadedAugmenter
from batchgenerators.transforms.abstract_transforms import AbstractTransform, Compose
from batchgenerators.transforms.color_transforms import BrightnessMultiplicativeTransform, \
ContrastAugmentationTransform, GammaTransform
from batchgenerators.transforms.noise_transforms import GaussianNoiseTransform, GaussianBlurTransform
from batchgenerators.transforms.resample_transforms import SimulateLowResolutionTransform
from batchgenerators.transforms.spatial_transforms import SpatialTransform, MirrorTransform
from batchgenerators.transforms.utility_transforms import RemoveLabelTransform, RenameTransform, NumpyToTensor
from batchgenerators.utilities.file_and_folder_operations import join, load_json, isfile, save_json, maybe_mkdir_p
from torch._dynamo import OptimizedModule
from nnunetv2.configuration import ANISO_THRESHOLD, default_num_processes
from nnunetv2.evaluation.evaluate_predictions import compute_metrics_on_folder
from nnunetv2.inference.export_prediction import export_prediction_from_logits, resample_and_save
from nnunetv2.inference.predict_from_raw_data import nnUNetPredictor
from nnunetv2.inference.sliding_window_prediction import compute_gaussian
from nnunetv2.paths import nnUNet_preprocessed, nnUNet_results
from nnunetv2.training.data_augmentation.compute_initial_patch_size import get_patch_size
from nnunetv2.training.data_augmentation.custom_transforms.cascade_transforms import MoveSegAsOneHotToData, \
ApplyRandomBinaryOperatorTransform, RemoveRandomConnectedComponentFromOneHotEncodingTransform
from nnunetv2.training.data_augmentation.custom_transforms.deep_supervision_donwsampling import \
DownsampleSegForDSTransform2
from nnunetv2.training.data_augmentation.custom_transforms.limited_length_multithreaded_augmenter import \
LimitedLenWrapper
from nnunetv2.training.data_augmentation.custom_transforms.masking import MaskTransform
from nnunetv2.training.data_augmentation.custom_transforms.region_based_training import \
ConvertSegmentationToRegionsTransform
from nnunetv2.training.data_augmentation.custom_transforms.transforms_for_dummy_2d import Convert2DTo3DTransform, \
Convert3DTo2DTransform
from nnunetv2.training.dataloading.data_loader_2d import nnUNetDataLoader2D
from nnunetv2.training.dataloading.data_loader_3d import nnUNetDataLoader3D
from nnunetv2.training.dataloading.nnunet_dataset import nnUNetDataset
from nnunetv2.training.dataloading.utils import get_case_identifiers, unpack_dataset
from nnunetv2.training.logging.nnunet_logger import nnUNetLogger
from nnunetv2.training.loss.compound_losses import DC_and_CE_loss, DC_and_BCE_loss
from nnunetv2.training.loss.deep_supervision import DeepSupervisionWrapper
from nnunetv2.training.loss.dice import get_tp_fp_fn_tn, MemoryEfficientSoftDiceLoss
from nnunetv2.training.lr_scheduler.polylr import PolyLRScheduler
from nnunetv2.utilities.collate_outputs import collate_outputs
from nnunetv2.utilities.default_n_proc_DA import get_allowed_n_proc_DA
from nnunetv2.utilities.file_path_utilities import check_workers_alive_and_busy
from nnunetv2.utilities.get_network_from_plans import get_network_from_plans
from nnunetv2.utilities.helpers import empty_cache, dummy_context
from nnunetv2.utilities.label_handling.label_handling import convert_labelmap_to_one_hot, determine_num_input_channels
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager
from sklearn.model_selection import KFold
from torch import autocast, nn
from torch import distributed as dist
from torch.cuda import device_count
from torch.cuda.amp import GradScaler
from torch.nn.parallel import DistributedDataParallel as DDP
class nnUNetTrainer(object):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
# From https://grugbrain.dev/. Worth a read ya big brains ;-)
# apex predator of grug is complexity
# complexity bad
# say again:
# complexity very bad
# you say now:
# complexity very, very bad
# given choice between complexity or one on one against t-rex, grug take t-rex: at least grug see t-rex
# complexity is spirit demon that enter codebase through well-meaning but ultimately very clubbable non grug-brain developers and project managers who not fear complexity spirit demon or even know about sometime
# one day code base understandable and grug can get work done, everything good!
# next day impossible: complexity demon spirit has entered code and very dangerous situation!
# OK OK I am guilty. But I tried.
# https://www.osnews.com/images/comics/wtfm.jpg
# https://i.pinimg.com/originals/26/b2/50/26b250a738ea4abc7a5af4d42ad93af0.jpg
self.is_ddp = dist.is_available() and dist.is_initialized()
self.local_rank = 0 if not self.is_ddp else dist.get_rank()
self.device = device
# print what device we are using
if self.is_ddp: # implicitly it's clear that we use cuda in this case
print(f"I am local rank {self.local_rank}. {device_count()} GPUs are available. The world size is "
f"{dist.get_world_size()}."
f"Setting device to {self.device}")
self.device = torch.device(type='cuda', index=self.local_rank)
else:
if self.device.type == 'cuda':
# we might want to let the user pick this but for now please pick the correct GPU with CUDA_VISIBLE_DEVICES=X
self.device = torch.device(type='cuda', index=0)
print(f"Using device: {self.device}")
# loading and saving this class for continuing from checkpoint should not happen based on pickling. This
# would also pickle the network etc. Bad, bad. Instead we just reinstantiate and then load the checkpoint we
# need. So let's save the init args
self.my_init_kwargs = {}
for k in inspect.signature(self.__init__).parameters.keys():
self.my_init_kwargs[k] = locals()[k]
### Saving all the init args into class variables for later access
self.plans_manager = PlansManager(plans)
self.configuration_manager = self.plans_manager.get_configuration(configuration)
self.configuration_name = configuration
self.dataset_json = dataset_json
self.fold = fold
self.unpack_dataset = unpack_dataset
### Setting all the folder names. We need to make sure things don't crash in case we are just running
# inference and some of the folders may not be defined!
self.preprocessed_dataset_folder_base = join(nnUNet_preprocessed, self.plans_manager.dataset_name) \
if nnUNet_preprocessed is not None else None
self.output_folder_base = join(nnUNet_results, self.plans_manager.dataset_name,
self.__class__.__name__ + '__' + self.plans_manager.plans_name + "__" + configuration) \
if nnUNet_results is not None else None
self.output_folder = join(self.output_folder_base, f'fold_{fold}')
self.preprocessed_dataset_folder = join(self.preprocessed_dataset_folder_base,
self.configuration_manager.data_identifier)
# unlike the previous nnunet folder_with_segs_from_previous_stage is now part of the plans. For now it has to
# be a different configuration in the same plans
# IMPORTANT! the mapping must be bijective, so lowres must point to fullres and vice versa (using
# "previous_stage" and "next_stage"). Otherwise it won't work!
self.is_cascaded = self.configuration_manager.previous_stage_name is not None
self.folder_with_segs_from_previous_stage = \
join(nnUNet_results, self.plans_manager.dataset_name,
self.__class__.__name__ + '__' + self.plans_manager.plans_name + "__" +
self.configuration_manager.previous_stage_name, 'predicted_next_stage', self.configuration_name) \
if self.is_cascaded else None
### Some hyperparameters for you to fiddle with
self.initial_lr = 1e-2
self.weight_decay = 3e-5
self.oversample_foreground_percent = 0.33
self.num_iterations_per_epoch = 250
self.num_val_iterations_per_epoch = 50
self.num_epochs = 1000
self.current_epoch = 0
self.enable_deep_supervision = True
### Dealing with labels/regions
self.label_manager = self.plans_manager.get_label_manager(dataset_json)
# labels can either be a list of int (regular training) or a list of tuples of int (region-based training)
# needed for predictions. We do sigmoid in case of (overlapping) regions
self.num_input_channels = None # -> self.initialize()
self.network = None # -> self._get_network()
self.optimizer = self.lr_scheduler = None # -> self.initialize
self.grad_scaler = GradScaler() if self.device.type == 'cuda' else None
self.loss = None # -> self.initialize
### Simple logging. Don't take that away from me!
# initialize log file. This is just our log for the print statements etc. Not to be confused with lightning
# logging
timestamp = datetime.now()
maybe_mkdir_p(self.output_folder)
self.log_file = join(self.output_folder, "training_log_%d_%d_%d_%02.0d_%02.0d_%02.0d.txt" %
(timestamp.year, timestamp.month, timestamp.day, timestamp.hour, timestamp.minute,
timestamp.second))
self.logger = nnUNetLogger()
### placeholders
self.dataloader_train = self.dataloader_val = None # see on_train_start
### initializing stuff for remembering things and such
self._best_ema = None
### inference things
self.inference_allowed_mirroring_axes = None # this variable is set in
# self.configure_rotation_dummyDA_mirroring_and_inital_patch_size and will be saved in checkpoints
### checkpoint saving stuff
self.save_every = 50
self.disable_checkpointing = False
## DDP batch size and oversampling can differ between workers and needs adaptation
# we need to change the batch size in DDP because we don't use any of those distributed samplers
self._set_batch_size_and_oversample()
self.was_initialized = False
self.print_to_log_file("\n#######################################################################\n"
"Please cite the following paper when using nnU-Net:\n"
"Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). "
"nnU-Net: a self-configuring method for deep learning-based biomedical image segmentation. "
"Nature methods, 18(2), 203-211.\n"
"#######################################################################\n",
also_print_to_console=True, add_timestamp=False)
def initialize(self):
if not self.was_initialized:
self.num_input_channels = determine_num_input_channels(self.plans_manager, self.configuration_manager,
self.dataset_json)
self.network = self.build_network_architecture(
self.plans_manager,
self.dataset_json,
self.configuration_manager,
self.num_input_channels,
self.enable_deep_supervision,
).to(self.device)
# compile network for free speedup
if self._do_i_compile():
self.print_to_log_file('Compiling network...')
self.network = torch.compile(self.network)
self.optimizer, self.lr_scheduler = self.configure_optimizers()
# if ddp, wrap in DDP wrapper
if self.is_ddp:
self.network = torch.nn.SyncBatchNorm.convert_sync_batchnorm(self.network)
self.network = DDP(self.network, device_ids=[self.local_rank])
self.loss = self._build_loss()
self.was_initialized = True
else:
raise RuntimeError("You have called self.initialize even though the trainer was already initialized. "
"That should not happen.")
def _do_i_compile(self):
return ('nnUNet_compile' in os.environ.keys()) and (os.environ['nnUNet_compile'].lower() in ('true', '1', 't'))
def _save_debug_information(self):
# saving some debug information
if self.local_rank == 0:
dct = {}
for k in self.__dir__():
if not k.startswith("__"):
if not callable(getattr(self, k)) or k in ['loss', ]:
dct[k] = str(getattr(self, k))
elif k in ['network', ]:
dct[k] = str(getattr(self, k).__class__.__name__)
else:
# print(k)
pass
if k in ['dataloader_train', 'dataloader_val']:
if hasattr(getattr(self, k), 'generator'):
dct[k + '.generator'] = str(getattr(self, k).generator)
if hasattr(getattr(self, k), 'num_processes'):
dct[k + '.num_processes'] = str(getattr(self, k).num_processes)
if hasattr(getattr(self, k), 'transform'):
dct[k + '.transform'] = str(getattr(self, k).transform)
import subprocess
hostname = subprocess.getoutput(['hostname'])
dct['hostname'] = hostname
torch_version = torch.__version__
if self.device.type == 'cuda':
gpu_name = torch.cuda.get_device_name()
dct['gpu_name'] = gpu_name
cudnn_version = torch.backends.cudnn.version()
else:
cudnn_version = 'None'
dct['device'] = str(self.device)
dct['torch_version'] = torch_version
dct['cudnn_version'] = cudnn_version
save_json(dct, join(self.output_folder, "debug.json"))
@staticmethod
def build_network_architecture(plans_manager: PlansManager,
dataset_json,
configuration_manager: ConfigurationManager,
num_input_channels,
enable_deep_supervision: bool = True) -> nn.Module:
"""
This is where you build the architecture according to the plans. There is no obligation to use
get_network_from_plans, this is just a utility we use for the nnU-Net default architectures. You can do what
you want. Even ignore the plans and just return something static (as long as it can process the requested
patch size)
but don't bug us with your bugs arising from fiddling with this :-P
This is the function that is called in inference as well! This is needed so that all network architecture
variants can be loaded at inference time (inference will use the same nnUNetTrainer that was used for
training, so if you change the network architecture during training by deriving a new trainer class then
inference will know about it).
If you need to know how many segmentation outputs your custom architecture needs to have, use the following snippet:
> label_manager = plans_manager.get_label_manager(dataset_json)
> label_manager.num_segmentation_heads
(why so complicated? -> We can have either classical training (classes) or regions. If we have regions,
the number of outputs is != the number of classes. Also there is the ignore label for which no output
should be generated. label_manager takes care of all that for you.)
"""
return get_network_from_plans(plans_manager, dataset_json, configuration_manager,
num_input_channels, deep_supervision=enable_deep_supervision)
def _get_deep_supervision_scales(self):
if self.enable_deep_supervision:
deep_supervision_scales = list(list(i) for i in 1 / np.cumprod(np.vstack(
self.configuration_manager.pool_op_kernel_sizes), axis=0))[:-1]
else:
deep_supervision_scales = None # for train and val_transforms
return deep_supervision_scales
def _set_batch_size_and_oversample(self):
if not self.is_ddp:
# set batch size to what the plan says, leave oversample untouched
self.batch_size = self.configuration_manager.batch_size
else:
# batch size is distributed over DDP workers and we need to change oversample_percent for each worker
batch_sizes = []
oversample_percents = []
world_size = dist.get_world_size()
my_rank = dist.get_rank()
global_batch_size = self.configuration_manager.batch_size
assert global_batch_size >= world_size, 'Cannot run DDP if the batch size is smaller than the number of ' \
'GPUs... Duh.'
batch_size_per_GPU = np.ceil(global_batch_size / world_size).astype(int)
for rank in range(world_size):
if (rank + 1) * batch_size_per_GPU > global_batch_size:
batch_size = batch_size_per_GPU - ((rank + 1) * batch_size_per_GPU - global_batch_size)
else:
batch_size = batch_size_per_GPU
batch_sizes.append(batch_size)
sample_id_low = 0 if len(batch_sizes) == 0 else np.sum(batch_sizes[:-1])
sample_id_high = np.sum(batch_sizes)
if sample_id_high / global_batch_size < (1 - self.oversample_foreground_percent):
oversample_percents.append(0.0)
elif sample_id_low / global_batch_size > (1 - self.oversample_foreground_percent):
oversample_percents.append(1.0)
else:
percent_covered_by_this_rank = sample_id_high / global_batch_size - sample_id_low / global_batch_size
oversample_percent_here = 1 - (((1 - self.oversample_foreground_percent) -
sample_id_low / global_batch_size) / percent_covered_by_this_rank)
oversample_percents.append(oversample_percent_here)
print("worker", my_rank, "oversample", oversample_percents[my_rank])
print("worker", my_rank, "batch_size", batch_sizes[my_rank])
# self.print_to_log_file("worker", my_rank, "oversample", oversample_percents[my_rank])
# self.print_to_log_file("worker", my_rank, "batch_size", batch_sizes[my_rank])
self.batch_size = batch_sizes[my_rank]
self.oversample_foreground_percent = oversample_percents[my_rank]
def _build_loss(self):
if self.label_manager.has_regions:
loss = DC_and_BCE_loss({},
{'batch_dice': self.configuration_manager.batch_dice,
'do_bg': True, 'smooth': 1e-5, 'ddp': self.is_ddp},
use_ignore_label=self.label_manager.ignore_label is not None,
dice_class=MemoryEfficientSoftDiceLoss)
else:
loss = DC_and_CE_loss({'batch_dice': self.configuration_manager.batch_dice,
'smooth': 1e-5, 'do_bg': False, 'ddp': self.is_ddp}, {}, weight_ce=1, weight_dice=1,
ignore_label=self.label_manager.ignore_label, dice_class=MemoryEfficientSoftDiceLoss)
# we give each output a weight which decreases exponentially (division by 2) as the resolution decreases
# this gives higher resolution outputs more weight in the loss
if self.enable_deep_supervision:
deep_supervision_scales = self._get_deep_supervision_scales()
weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])
weights[-1] = 0
# we don't use the lowest 2 outputs. Normalize weights so that they sum to 1
weights = weights / weights.sum()
# now wrap the loss
loss = DeepSupervisionWrapper(loss, weights)
return loss
def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):
"""
This function is stupid and certainly one of the weakest spots of this implementation. Not entirely sure how we can fix it.
"""
patch_size = self.configuration_manager.patch_size
dim = len(patch_size)
# todo rotation should be defined dynamically based on patch size (more isotropic patch sizes = more rotation)
if dim == 2:
do_dummy_2d_data_aug = False
# todo revisit this parametrization
if max(patch_size) / min(patch_size) > 1.5:
rotation_for_DA = {
'x': (-15. / 360 * 2. * np.pi, 15. / 360 * 2. * np.pi),
'y': (0, 0),
'z': (0, 0)
}
else:
rotation_for_DA = {
'x': (-180. / 360 * 2. * np.pi, 180. / 360 * 2. * np.pi),
'y': (0, 0),
'z': (0, 0)
}
mirror_axes = (0, 1)
elif dim == 3:
# todo this is not ideal. We could also have patch_size (64, 16, 128) in which case a full 180deg 2d rot would be bad
# order of the axes is determined by spacing, not image size
do_dummy_2d_data_aug = (max(patch_size) / patch_size[0]) > ANISO_THRESHOLD
if do_dummy_2d_data_aug:
# why do we rotate 180 deg here all the time? We should also restrict it
rotation_for_DA = {
'x': (-180. / 360 * 2. * np.pi, 180. / 360 * 2. * np.pi),
'y': (0, 0),
'z': (0, 0)
}
else:
rotation_for_DA = {
'x': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),
'y': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),
'z': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),
}
mirror_axes = (0, 1, 2)
else:
raise RuntimeError()
# todo this function is stupid. It doesn't even use the correct scale range (we keep things as they were in the
# old nnunet for now)
initial_patch_size = get_patch_size(patch_size[-dim:],
*rotation_for_DA.values(),
(0.85, 1.25))
if do_dummy_2d_data_aug:
initial_patch_size[0] = patch_size[0]
self.print_to_log_file(f'do_dummy_2d_data_aug: {do_dummy_2d_data_aug}')
self.inference_allowed_mirroring_axes = mirror_axes
return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes
def print_to_log_file(self, *args, also_print_to_console=True, add_timestamp=True):
if self.local_rank == 0:
timestamp = time()
dt_object = datetime.fromtimestamp(timestamp)
if add_timestamp:
args = (f"{dt_object}:", *args)
successful = False
max_attempts = 5
ctr = 0
while not successful and ctr < max_attempts:
try:
with open(self.log_file, 'a+') as f:
for a in args:
f.write(str(a))
f.write(" ")
f.write("\n")
successful = True
except IOError:
print(f"{datetime.fromtimestamp(timestamp)}: failed to log: ", sys.exc_info())
sleep(0.5)
ctr += 1
if also_print_to_console:
print(*args)
elif also_print_to_console:
print(*args)
def print_plans(self):
if self.local_rank == 0:
dct = deepcopy(self.plans_manager.plans)
del dct['configurations']
self.print_to_log_file(f"\nThis is the configuration used by this "
f"training:\nConfiguration name: {self.configuration_name}\n",
self.configuration_manager, '\n', add_timestamp=False)
self.print_to_log_file('These are the global plan.json settings:\n', dct, '\n', add_timestamp=False)
def configure_optimizers(self):
optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,
momentum=0.99, nesterov=True)
lr_scheduler = PolyLRScheduler(optimizer, self.initial_lr, self.num_epochs)
return optimizer, lr_scheduler
def plot_network_architecture(self):
if self._do_i_compile():
self.print_to_log_file("Unable to plot network architecture: nnUNet_compile is enabled!")
return
if self.local_rank == 0:
try:
# raise NotImplementedError('hiddenlayer no longer works and we do not have a viable alternative :-(')
# pip install git+https://github.com/saugatkandel/hiddenlayer.git
# from torchviz import make_dot
# # not viable.
# make_dot(tuple(self.network(torch.rand((1, self.num_input_channels,
# *self.configuration_manager.patch_size),
# device=self.device)))).render(
# join(self.output_folder, "network_architecture.pdf"), format='pdf')
# self.optimizer.zero_grad()
# broken.
import hiddenlayer as hl
g = hl.build_graph(self.network,
torch.rand((1, self.num_input_channels,
*self.configuration_manager.patch_size),
device=self.device),
transforms=None)
g.save(join(self.output_folder, "network_architecture.pdf"))
del g
except Exception as e:
self.print_to_log_file("Unable to plot network architecture:")
self.print_to_log_file(e)
# self.print_to_log_file("\nprinting the network instead:\n")
# self.print_to_log_file(self.network)
# self.print_to_log_file("\n")
finally:
empty_cache(self.device)
def do_split(self):
"""
The default split is a 5 fold CV on all available training cases. nnU-Net will create a split (it is seeded,
so always the same) and save it as splits_final.pkl file in the preprocessed data directory.
Sometimes you may want to create your own split for various reasons. For this you will need to create your own
splits_final.pkl file. If this file is present, nnU-Net is going to use it and whatever splits are defined in
it. You can create as many splits in this file as you want. Note that if you define only 4 splits (fold 0-3)
and then set fold=4 when training (that would be the fifth split), nnU-Net will print a warning and proceed to
use a random 80:20 data split.
:return:
"""
if self.fold == "all":
# if fold==all then we use all images for training and validation
case_identifiers = get_case_identifiers(self.preprocessed_dataset_folder)
tr_keys = case_identifiers
val_keys = tr_keys
else:
splits_file = join(self.preprocessed_dataset_folder_base, "splits_final.json")
dataset = nnUNetDataset(self.preprocessed_dataset_folder, case_identifiers=None,
num_images_properties_loading_threshold=0,
folder_with_segs_from_previous_stage=self.folder_with_segs_from_previous_stage)
# if the split file does not exist we need to create it
if not isfile(splits_file):
self.print_to_log_file("Creating new 5-fold cross-validation split...")
splits = []
all_keys_sorted = np.sort(list(dataset.keys()))
kfold = KFold(n_splits=5, shuffle=True, random_state=12345)
for i, (train_idx, test_idx) in enumerate(kfold.split(all_keys_sorted)):
train_keys = np.array(all_keys_sorted)[train_idx]
test_keys = np.array(all_keys_sorted)[test_idx]
splits.append({})
splits[-1]['train'] = list(train_keys)
splits[-1]['val'] = list(test_keys)
save_json(splits, splits_file)
else:
self.print_to_log_file("Using splits from existing split file:", splits_file)
splits = load_json(splits_file)
self.print_to_log_file(f"The split file contains {len(splits)} splits.")
self.print_to_log_file("Desired fold for training: %d" % self.fold)
if self.fold < len(splits):
tr_keys = splits[self.fold]['train']
val_keys = splits[self.fold]['val']
self.print_to_log_file("This split has %d training and %d validation cases."
% (len(tr_keys), len(val_keys)))
else:
self.print_to_log_file("INFO: You requested fold %d for training but splits "
"contain only %d folds. I am now creating a "
"random (but seeded) 80:20 split!" % (self.fold, len(splits)))
# if we request a fold that is not in the split file, create a random 80:20 split
rnd = np.random.RandomState(seed=12345 + self.fold)
keys = np.sort(list(dataset.keys()))
idx_tr = rnd.choice(len(keys), int(len(keys) * 0.8), replace=False)
idx_val = [i for i in range(len(keys)) if i not in idx_tr]
tr_keys = [keys[i] for i in idx_tr]
val_keys = [keys[i] for i in idx_val]
self.print_to_log_file("This random 80:20 split has %d training and %d validation cases."
% (len(tr_keys), len(val_keys)))
if any([i in val_keys for i in tr_keys]):
self.print_to_log_file('WARNING: Some validation cases are also in the training set. Please check the '
'splits.json or ignore if this is intentional.')
return tr_keys, val_keys
def get_tr_and_val_datasets(self):
# create dataset split
tr_keys, val_keys = self.do_split()
# load the datasets for training and validation. Note that we always draw random samples so we really don't
# care about distributing training cases across GPUs.
dataset_tr = nnUNetDataset(self.preprocessed_dataset_folder, tr_keys,
folder_with_segs_from_previous_stage=self.folder_with_segs_from_previous_stage,
num_images_properties_loading_threshold=0)
dataset_val = nnUNetDataset(self.preprocessed_dataset_folder, val_keys,
folder_with_segs_from_previous_stage=self.folder_with_segs_from_previous_stage,
num_images_properties_loading_threshold=0)
return dataset_tr, dataset_val
def get_dataloaders(self):
# we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether
# we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be
patch_size = self.configuration_manager.patch_size
dim = len(patch_size)
# needed for deep supervision: how much do we need to downscale the segmentation targets for the different
# outputs?
deep_supervision_scales = self._get_deep_supervision_scales()
(
rotation_for_DA,
do_dummy_2d_data_aug,
initial_patch_size,
mirror_axes,
) = self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()
# training pipeline
tr_transforms = self.get_training_transforms(
patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,
order_resampling_data=3, order_resampling_seg=1,
use_mask_for_norm=self.configuration_manager.use_mask_for_norm,
is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.foreground_labels,
regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
# validation pipeline
val_transforms = self.get_validation_transforms(deep_supervision_scales,
is_cascaded=self.is_cascaded,
foreground_labels=self.label_manager.foreground_labels,
regions=self.label_manager.foreground_regions if
self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)
allowed_num_processes = get_allowed_n_proc_DA()
if allowed_num_processes == 0:
mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)
mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)
else:
mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, data_loader=dl_tr, transform=tr_transforms,
num_processes=allowed_num_processes, num_cached=6, seeds=None,
pin_memory=self.device.type == 'cuda', wait_time=0.02)
mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, data_loader=dl_val,
transform=val_transforms, num_processes=max(1, allowed_num_processes // 2),
num_cached=3, seeds=None, pin_memory=self.device.type == 'cuda',
wait_time=0.02)
return mt_gen_train, mt_gen_val
def get_plain_dataloaders(self, initial_patch_size: Tuple[int, ...], dim: int):
dataset_tr, dataset_val = self.get_tr_and_val_datasets()
if dim == 2:
dl_tr = nnUNetDataLoader2D(dataset_tr, self.batch_size,
initial_patch_size,
self.configuration_manager.patch_size,
self.label_manager,
oversample_foreground_percent=self.oversample_foreground_percent,
sampling_probabilities=None, pad_sides=None)
dl_val = nnUNetDataLoader2D(dataset_val, self.batch_size,
self.configuration_manager.patch_size,
self.configuration_manager.patch_size,
self.label_manager,
oversample_foreground_percent=self.oversample_foreground_percent,
sampling_probabilities=None, pad_sides=None)
else:
dl_tr = nnUNetDataLoader3D(dataset_tr, self.batch_size,
initial_patch_size,
self.configuration_manager.patch_size,
self.label_manager,
oversample_foreground_percent=self.oversample_foreground_percent,
sampling_probabilities=None, pad_sides=None)
dl_val = nnUNetDataLoader3D(dataset_val, self.batch_size,
self.configuration_manager.patch_size,
self.configuration_manager.patch_size,
self.label_manager,
oversample_foreground_percent=self.oversample_foreground_percent,
sampling_probabilities=None, pad_sides=None)
return dl_tr, dl_val
@staticmethod
def get_training_transforms(
patch_size: Union[np.ndarray, Tuple[int]],
rotation_for_DA: dict,
deep_supervision_scales: Union[List, Tuple, None],
mirror_axes: Tuple[int, ...],
do_dummy_2d_data_aug: bool,
order_resampling_data: int = 3,
order_resampling_seg: int = 1,
border_val_seg: int = -1,
use_mask_for_norm: List[bool] = None,
is_cascaded: bool = False,
foreground_labels: Union[Tuple[int, ...], List[int]] = None,
regions: List[Union[List[int], Tuple[int, ...], int]] = None,
ignore_label: int = None,
) -> AbstractTransform:
tr_transforms = []
if do_dummy_2d_data_aug:
ignore_axes = (0,)
tr_transforms.append(Convert3DTo2DTransform())
patch_size_spatial = patch_size[1:]
else:
patch_size_spatial = patch_size
ignore_axes = None
tr_transforms.append(SpatialTransform(
patch_size_spatial, patch_center_dist_from_border=None,
do_elastic_deform=False, alpha=(0, 0), sigma=(0, 0),
do_rotation=True, angle_x=rotation_for_DA['x'], angle_y=rotation_for_DA['y'], angle_z=rotation_for_DA['z'],
p_rot_per_axis=1, # todo experiment with this
do_scale=True, scale=(0.7, 1.4),
border_mode_data="constant", border_cval_data=0, order_data=order_resampling_data,
border_mode_seg="constant", border_cval_seg=border_val_seg, order_seg=order_resampling_seg,
random_crop=False, # random cropping is part of our dataloaders
p_el_per_sample=0, p_scale_per_sample=0.2, p_rot_per_sample=0.2,
independent_scale_for_each_axis=False # todo experiment with this
))
if do_dummy_2d_data_aug:
tr_transforms.append(Convert2DTo3DTransform())
tr_transforms.append(GaussianNoiseTransform(p_per_sample=0.1))
tr_transforms.append(GaussianBlurTransform((0.5, 1.), different_sigma_per_channel=True, p_per_sample=0.2,
p_per_channel=0.5))
tr_transforms.append(BrightnessMultiplicativeTransform(multiplier_range=(0.75, 1.25), p_per_sample=0.15))
tr_transforms.append(ContrastAugmentationTransform(p_per_sample=0.15))
tr_transforms.append(SimulateLowResolutionTransform(zoom_range=(0.5, 1), per_channel=True,
p_per_channel=0.5,
order_downsample=0, order_upsample=3, p_per_sample=0.25,
ignore_axes=ignore_axes))
tr_transforms.append(GammaTransform((0.7, 1.5), True, True, retain_stats=True, p_per_sample=0.1))
tr_transforms.append(GammaTransform((0.7, 1.5), False, True, retain_stats=True, p_per_sample=0.3))
if mirror_axes is not None and len(mirror_axes) > 0:
tr_transforms.append(MirrorTransform(mirror_axes))
if use_mask_for_norm is not None and any(use_mask_for_norm):
tr_transforms.append(MaskTransform([i for i in range(len(use_mask_for_norm)) if use_mask_for_norm[i]],
mask_idx_in_seg=0, set_outside_to=0))
tr_transforms.append(RemoveLabelTransform(-1, 0))
if is_cascaded:
assert foreground_labels is not None, 'We need foreground_labels for cascade augmentations'
tr_transforms.append(MoveSegAsOneHotToData(1, foreground_labels, 'seg', 'data'))
tr_transforms.append(ApplyRandomBinaryOperatorTransform(
channel_idx=list(range(-len(foreground_labels), 0)),
p_per_sample=0.4,
key="data",
strel_size=(1, 8),
p_per_label=1))
tr_transforms.append(
RemoveRandomConnectedComponentFromOneHotEncodingTransform(
channel_idx=list(range(-len(foreground_labels), 0)),
key="data",
p_per_sample=0.2,
fill_with_other_class_p=0,
dont_do_if_covers_more_than_x_percent=0.15))
tr_transforms.append(RenameTransform('seg', 'target', True))
if regions is not None:
# the ignore label must also be converted
tr_transforms.append(ConvertSegmentationToRegionsTransform(list(regions) + [ignore_label]
if ignore_label is not None else regions,
'target', 'target'))
if deep_supervision_scales is not None:
tr_transforms.append(DownsampleSegForDSTransform2(deep_supervision_scales, 0, input_key='target',
output_key='target'))
tr_transforms.append(NumpyToTensor(['data', 'target'], 'float'))
tr_transforms = Compose(tr_transforms)
return tr_transforms
@staticmethod
def get_validation_transforms(
deep_supervision_scales: Union[List, Tuple, None],
is_cascaded: bool = False,
foreground_labels: Union[Tuple[int, ...], List[int]] = None,
regions: List[Union[List[int], Tuple[int, ...], int]] = None,
ignore_label: int = None,
) -> AbstractTransform:
val_transforms = []
val_transforms.append(RemoveLabelTransform(-1, 0))
if is_cascaded:
val_transforms.append(MoveSegAsOneHotToData(1, foreground_labels, 'seg', 'data'))
val_transforms.append(RenameTransform('seg', 'target', True))
if regions is not None:
# the ignore label must also be converted
val_transforms.append(ConvertSegmentationToRegionsTransform(list(regions) + [ignore_label]
if ignore_label is not None else regions,
'target', 'target'))
if deep_supervision_scales is not None:
val_transforms.append(DownsampleSegForDSTransform2(deep_supervision_scales, 0, input_key='target',
output_key='target'))
val_transforms.append(NumpyToTensor(['data', 'target'], 'float'))
val_transforms = Compose(val_transforms)
return val_transforms
def set_deep_supervision_enabled(self, enabled: bool):
"""
This function is specific for the default architecture in nnU-Net. If you change the architecture, there are
chances you need to change this as well!
"""
if self.is_ddp:
self.network.module.decoder.deep_supervision = enabled
else:
self.network.decoder.deep_supervision = enabled
def on_train_start(self):
if not self.was_initialized:
self.initialize()
maybe_mkdir_p(self.output_folder)
# make sure deep supervision is on in the network
self.set_deep_supervision_enabled(self.enable_deep_supervision)
self.print_plans()
empty_cache(self.device)
# maybe unpack
if self.unpack_dataset and self.local_rank == 0:
self.print_to_log_file('unpacking dataset...')
unpack_dataset(self.preprocessed_dataset_folder, unpack_segmentation=True, overwrite_existing=False,
num_processes=max(1, round(get_allowed_n_proc_DA() // 2)))
self.print_to_log_file('unpacking done...')
if self.is_ddp:
dist.barrier()
# dataloaders must be instantiated here because they need access to the training data which may not be present
# when doing inference
self.dataloader_train, self.dataloader_val = self.get_dataloaders()
# copy plans and dataset.json so that they can be used for restoring everything we need for inference
save_json(self.plans_manager.plans, join(self.output_folder_base, 'plans.json'), sort_keys=False)
save_json(self.dataset_json, join(self.output_folder_base, 'dataset.json'), sort_keys=False)
# we don't really need the fingerprint but its still handy to have it with the others
shutil.copy(join(self.preprocessed_dataset_folder_base, 'dataset_fingerprint.json'),
join(self.output_folder_base, 'dataset_fingerprint.json'))
# produces a pdf in output folder
self.plot_network_architecture()
self._save_debug_information()
# print(f"batch size: {self.batch_size}")
# print(f"oversample: {self.oversample_foreground_percent}")
def on_train_end(self):
# dirty hack because on_epoch_end increments the epoch counter and this is executed afterwards.
# This will lead to the wrong current epoch to be stored
self.current_epoch -= 1
self.save_checkpoint(join(self.output_folder, "checkpoint_final.pth"))
self.current_epoch += 1
# now we can delete latest
if self.local_rank == 0 and isfile(join(self.output_folder, "checkpoint_latest.pth")):
os.remove(join(self.output_folder, "checkpoint_latest.pth"))
# shut down dataloaders
old_stdout = sys.stdout
with open(os.devnull, 'w') as f:
sys.stdout = f
if self.dataloader_train is not None:
self.dataloader_train._finish()
if self.dataloader_val is not None:
self.dataloader_val._finish()
sys.stdout = old_stdout
empty_cache(self.device)
self.print_to_log_file("Training done.")
def on_train_epoch_start(self):
self.network.train()
self.lr_scheduler.step(self.current_epoch)
self.print_to_log_file('')
self.print_to_log_file(f'Epoch {self.current_epoch}')
self.print_to_log_file(
f"Current learning rate: {np.round(self.optimizer.param_groups[0]['lr'], decimals=5)}")
# lrs are the same for all workers so we don't need to gather them in case of DDP training
self.logger.log('lrs', self.optimizer.param_groups[0]['lr'], self.current_epoch)
def train_step(self, batch: dict) -> dict:
data = batch['data']
target = batch['target']
data = data.to(self.device, non_blocking=True)
if isinstance(target, list):
target = [i.to(self.device, non_blocking=True) for i in target]
else:
target = target.to(self.device, non_blocking=True)
self.optimizer.zero_grad(set_to_none=True)
# Autocast is a little bitch.
# If the device_type is 'cpu' then it's slow as heck and needs to be disabled.
# If the device_type is 'mps' then it will complain that mps is not implemented, even if enabled=False is set. Whyyyyyyy. (this is why we don't make use of enabled=False)
# So autocast will only be active if we have a cuda device.
with autocast(self.device.type, enabled=True) if self.device.type == 'cuda' else dummy_context():
output = self.network(data)
# del data
l = self.loss(output, target)
if self.grad_scaler is not None:
self.grad_scaler.scale(l).backward()
self.grad_scaler.unscale_(self.optimizer)
torch.nn.utils.clip_grad_norm_(self.network.parameters(), 12)
self.grad_scaler.step(self.optimizer)
self.grad_scaler.update()
else:
l.backward()
torch.nn.utils.clip_grad_norm_(self.network.parameters(), 12)
self.optimizer.step()
return {'loss': l.detach().cpu().numpy()}
def on_train_epoch_end(self, train_outputs: List[dict]):
outputs = collate_outputs(train_outputs)
if self.is_ddp:
losses_tr = [None for _ in range(dist.get_world_size())]
dist.all_gather_object(losses_tr, outputs['loss'])
loss_here = np.vstack(losses_tr).mean()
else:
loss_here = np.mean(outputs['loss'])
self.logger.log('train_losses', loss_here, self.current_epoch)
def on_validation_epoch_start(self):
self.network.eval()
def validation_step(self, batch: dict) -> dict:
data = batch['data']
target = batch['target']
data = data.to(self.device, non_blocking=True)
if isinstance(target, list):
target = [i.to(self.device, non_blocking=True) for i in target]
else:
target = target.to(self.device, non_blocking=True)
# Autocast is a little bitch.
# If the device_type is 'cpu' then it's slow as heck and needs to be disabled.
# If the device_type is 'mps' then it will complain that mps is not implemented, even if enabled=False is set. Whyyyyyyy. (this is why we don't make use of enabled=False)
# So autocast will only be active if we have a cuda device.
with autocast(self.device.type, enabled=True) if self.device.type == 'cuda' else dummy_context():
output = self.network(data)
del data
l = self.loss(output, target)
# we only need the output with the highest output resolution (if DS enabled)
if self.enable_deep_supervision:
output = output[0]
target = target[0]
# the following is needed for online evaluation. Fake dice (green line)
axes = [0] + list(range(2, output.ndim))
if self.label_manager.has_regions:
predicted_segmentation_onehot = (torch.sigmoid(output) > 0.5).long()
else:
# no need for softmax
output_seg = output.argmax(1)[:, None]
predicted_segmentation_onehot = torch.zeros(output.shape, device=output.device, dtype=torch.float32)
predicted_segmentation_onehot.scatter_(1, output_seg, 1)
del output_seg
if self.label_manager.has_ignore_label:
if not self.label_manager.has_regions:
mask = (target != self.label_manager.ignore_label).float()
# CAREFUL that you don't rely on target after this line!
target[target == self.label_manager.ignore_label] = 0
else:
mask = 1 - target[:, -1:]
# CAREFUL that you don't rely on target after this line!
target = target[:, :-1]
else:
mask = None
tp, fp, fn, _ = get_tp_fp_fn_tn(predicted_segmentation_onehot, target, axes=axes, mask=mask)
tp_hard = tp.detach().cpu().numpy()
fp_hard = fp.detach().cpu().numpy()
fn_hard = fn.detach().cpu().numpy()
if not self.label_manager.has_regions:
# if we train with regions all segmentation heads predict some kind of foreground. In conventional
# (softmax training) there needs tobe one output for the background. We are not interested in the
# background Dice
# [1:] in order to remove background
tp_hard = tp_hard[1:]
fp_hard = fp_hard[1:]
fn_hard = fn_hard[1:]
return {'loss': l.detach().cpu().numpy(), 'tp_hard': tp_hard, 'fp_hard': fp_hard, 'fn_hard': fn_hard}
def on_validation_epoch_end(self, val_outputs: List[dict]):
outputs_collated = collate_outputs(val_outputs)
tp = np.sum(outputs_collated['tp_hard'], 0)
fp = np.sum(outputs_collated['fp_hard'], 0)
fn = np.sum(outputs_collated['fn_hard'], 0)
if self.is_ddp:
world_size = dist.get_world_size()
tps = [None for _ in range(world_size)]
dist.all_gather_object(tps, tp)
tp = np.vstack([i[None] for i in tps]).sum(0)
fps = [None for _ in range(world_size)]
dist.all_gather_object(fps, fp)
fp = np.vstack([i[None] for i in fps]).sum(0)
fns = [None for _ in range(world_size)]
dist.all_gather_object(fns, fn)
fn = np.vstack([i[None] for i in fns]).sum(0)
losses_val = [None for _ in range(world_size)]
dist.all_gather_object(losses_val, outputs_collated['loss'])
loss_here = np.vstack(losses_val).mean()
else:
loss_here = np.mean(outputs_collated['loss'])
global_dc_per_class = [i for i in [2 * i / (2 * i + j + k) for i, j, k in zip(tp, fp, fn)]]
mean_fg_dice = np.nanmean(global_dc_per_class)
self.logger.log('mean_fg_dice', mean_fg_dice, self.current_epoch)
self.logger.log('dice_per_class_or_region', global_dc_per_class, self.current_epoch)
self.logger.log('val_losses', loss_here, self.current_epoch)
def on_epoch_start(self):
self.logger.log('epoch_start_timestamps', time(), self.current_epoch)
def on_epoch_end(self):
self.logger.log('epoch_end_timestamps', time(), self.current_epoch)
self.print_to_log_file('train_loss', np.round(self.logger.my_fantastic_logging['train_losses'][-1], decimals=4))
self.print_to_log_file('val_loss', np.round(self.logger.my_fantastic_logging['val_losses'][-1], decimals=4))
self.print_to_log_file('Pseudo dice', [np.round(i, decimals=4) for i in
self.logger.my_fantastic_logging['dice_per_class_or_region'][-1]])
self.print_to_log_file(
f"Epoch time: {np.round(self.logger.my_fantastic_logging['epoch_end_timestamps'][-1] - self.logger.my_fantastic_logging['epoch_start_timestamps'][-1], decimals=2)} s")
# handling periodic checkpointing
current_epoch = self.current_epoch
if (current_epoch + 1) % self.save_every == 0 and current_epoch != (self.num_epochs - 1):
self.save_checkpoint(join(self.output_folder, 'checkpoint_latest.pth'))
# handle 'best' checkpointing. ema_fg_dice is computed by the logger and can be accessed like this
if self._best_ema is None or self.logger.my_fantastic_logging['ema_fg_dice'][-1] > self._best_ema:
self._best_ema = self.logger.my_fantastic_logging['ema_fg_dice'][-1]
self.print_to_log_file(f"Yayy! New best EMA pseudo Dice: {np.round(self._best_ema, decimals=4)}")
self.save_checkpoint(join(self.output_folder, 'checkpoint_best.pth'))
if self.local_rank == 0:
self.logger.plot_progress_png(self.output_folder)
self.current_epoch += 1
def save_checkpoint(self, filename: str) -> None:
if self.local_rank == 0:
if not self.disable_checkpointing:
if self.is_ddp:
mod = self.network.module
else:
mod = self.network
if isinstance(mod, OptimizedModule):
mod = mod._orig_mod
checkpoint = {
'network_weights': mod.state_dict(),
'optimizer_state': self.optimizer.state_dict(),
'grad_scaler_state': self.grad_scaler.state_dict() if self.grad_scaler is not None else None,
'logging': self.logger.get_checkpoint(),
'_best_ema': self._best_ema,
'current_epoch': self.current_epoch + 1,
'init_args': self.my_init_kwargs,
'trainer_name': self.__class__.__name__,
'inference_allowed_mirroring_axes': self.inference_allowed_mirroring_axes,
}
torch.save(checkpoint, filename)
else:
self.print_to_log_file('No checkpoint written, checkpointing is disabled')
def load_checkpoint(self, filename_or_checkpoint: Union[dict, str]) -> None:
if not self.was_initialized:
self.initialize()
if isinstance(filename_or_checkpoint, str):
checkpoint = torch.load(filename_or_checkpoint, map_location=self.device)
# if state dict comes from nn.DataParallel but we use non-parallel model here then the state dict keys do not
# match. Use heuristic to make it match
new_state_dict = {}
for k, value in checkpoint['network_weights'].items():
key = k
if key not in self.network.state_dict().keys() and key.startswith('module.'):
key = key[7:]
new_state_dict[key] = value
self.my_init_kwargs = checkpoint['init_args']
self.current_epoch = checkpoint['current_epoch']
self.logger.load_checkpoint(checkpoint['logging'])
self._best_ema = checkpoint['_best_ema']
self.inference_allowed_mirroring_axes = checkpoint[
'inference_allowed_mirroring_axes'] if 'inference_allowed_mirroring_axes' in checkpoint.keys() else self.inference_allowed_mirroring_axes
# messing with state dict naming schemes. Facepalm.
if self.is_ddp:
if isinstance(self.network.module, OptimizedModule):
self.network.module._orig_mod.load_state_dict(new_state_dict)
else:
self.network.module.load_state_dict(new_state_dict)
else:
if isinstance(self.network, OptimizedModule):
self.network._orig_mod.load_state_dict(new_state_dict)
else:
self.network.load_state_dict(new_state_dict)
self.optimizer.load_state_dict(checkpoint['optimizer_state'])
if self.grad_scaler is not None:
if checkpoint['grad_scaler_state'] is not None:
self.grad_scaler.load_state_dict(checkpoint['grad_scaler_state'])
def perform_actual_validation(self, save_probabilities: bool = False):
self.set_deep_supervision_enabled(False)
self.network.eval()
predictor = nnUNetPredictor(tile_step_size=0.5, use_gaussian=True, use_mirroring=True,
perform_everything_on_gpu=True, device=self.device, verbose=False,
verbose_preprocessing=False, allow_tqdm=False)
predictor.manual_initialization(self.network, self.plans_manager, self.configuration_manager, None,
self.dataset_json, self.__class__.__name__,
self.inference_allowed_mirroring_axes)
with multiprocessing.get_context("spawn").Pool(default_num_processes) as segmentation_export_pool:
worker_list = [i for i in segmentation_export_pool._pool]
validation_output_folder = join(self.output_folder, 'validation')
maybe_mkdir_p(validation_output_folder)
# we cannot use self.get_tr_and_val_datasets() here because we might be DDP and then we have to distribute
# the validation keys across the workers.
_, val_keys = self.do_split()
if self.is_ddp:
val_keys = val_keys[self.local_rank:: dist.get_world_size()]
dataset_val = nnUNetDataset(self.preprocessed_dataset_folder, val_keys,
folder_with_segs_from_previous_stage=self.folder_with_segs_from_previous_stage,
num_images_properties_loading_threshold=0)
next_stages = self.configuration_manager.next_stage_names
if next_stages is not None:
_ = [maybe_mkdir_p(join(self.output_folder_base, 'predicted_next_stage', n)) for n in next_stages]
results = []
for k in dataset_val.keys():
proceed = not check_workers_alive_and_busy(segmentation_export_pool, worker_list, results,
allowed_num_queued=2)
while not proceed:
sleep(0.1)
proceed = not check_workers_alive_and_busy(segmentation_export_pool, worker_list, results,
allowed_num_queued=2)
self.print_to_log_file(f"predicting {k}")
data, seg, properties = dataset_val.load_case(k)
if self.is_cascaded:
data = np.vstack((data, convert_labelmap_to_one_hot(seg[-1], self.label_manager.foreground_labels,
output_dtype=data.dtype)))
with warnings.catch_warnings():
# ignore 'The given NumPy array is not writable' warning
warnings.simplefilter("ignore")
data = torch.from_numpy(data)
output_filename_truncated = join(validation_output_folder, k)
try:
prediction = predictor.predict_sliding_window_return_logits(data)
except RuntimeError:
predictor.perform_everything_on_gpu = False
prediction = predictor.predict_sliding_window_return_logits(data)
predictor.perform_everything_on_gpu = True
prediction = prediction.cpu()
# this needs to go into background processes
results.append(
segmentation_export_pool.starmap_async(
export_prediction_from_logits, (
(prediction, properties, self.configuration_manager, self.plans_manager,
self.dataset_json, output_filename_truncated, save_probabilities),
)
)
)
# for debug purposes
# export_prediction(prediction_for_export, properties, self.configuration, self.plans, self.dataset_json,
# output_filename_truncated, save_probabilities)
# if needed, export the softmax prediction for the next stage
if next_stages is not None:
for n in next_stages:
next_stage_config_manager = self.plans_manager.get_configuration(n)
expected_preprocessed_folder = join(nnUNet_preprocessed, self.plans_manager.dataset_name,
next_stage_config_manager.data_identifier)
try:
# we do this so that we can use load_case and do not have to hard code how loading training cases is implemented
tmp = nnUNetDataset(expected_preprocessed_folder, [k],
num_images_properties_loading_threshold=0)
d, s, p = tmp.load_case(k)
except FileNotFoundError:
self.print_to_log_file(
f"Predicting next stage {n} failed for case {k} because the preprocessed file is missing! "
f"Run the preprocessing for this configuration first!")
continue
target_shape = d.shape[1:]
output_folder = join(self.output_folder_base, 'predicted_next_stage', n)
output_file = join(output_folder, k + '.npz')
# resample_and_save(prediction, target_shape, output_file, self.plans_manager, self.configuration_manager, properties,
# self.dataset_json)
results.append(segmentation_export_pool.starmap_async(
resample_and_save, (
(prediction, target_shape, output_file, self.plans_manager,
self.configuration_manager,
properties,
self.dataset_json),
)
))
_ = [r.get() for r in results]
if self.is_ddp:
dist.barrier()
if self.local_rank == 0:
metrics = compute_metrics_on_folder(join(self.preprocessed_dataset_folder_base, 'gt_segmentations'),
validation_output_folder,
join(validation_output_folder, 'summary.json'),
self.plans_manager.image_reader_writer_class(),
self.dataset_json["file_ending"],
self.label_manager.foreground_regions if self.label_manager.has_regions else
self.label_manager.foreground_labels,
self.label_manager.ignore_label, chill=True)
self.print_to_log_file("Validation complete", also_print_to_console=True)
self.print_to_log_file("Mean Validation Dice: ", (metrics['foreground_mean']["Dice"]), also_print_to_console=True)
self.set_deep_supervision_enabled(True)
compute_gaussian.cache_clear()
def run_training(self):
self.on_train_start()
for epoch in range(self.current_epoch, self.num_epochs):
self.on_epoch_start()
self.on_train_epoch_start()
train_outputs = []
for batch_id in range(self.num_iterations_per_epoch):
train_outputs.append(self.train_step(next(self.dataloader_train)))
self.on_train_epoch_end(train_outputs)
with torch.no_grad():
self.on_validation_epoch_start()
val_outputs = []
for batch_id in range(self.num_val_iterations_per_epoch):
val_outputs.append(self.validation_step(next(self.dataloader_val)))
self.on_validation_epoch_end(val_outputs)
self.on_epoch_end()
self.on_train_end()
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/nnUNetTrainer_swin.py
================================================
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
import torch
from dynamic_network_architectures.architectures.unet import ResidualEncoderUNet, PlainConvUNet
from dynamic_network_architectures.building_blocks.helper import convert_dim_to_conv_op, get_matching_batchnorm
from dynamic_network_architectures.initialization.weight_init import init_last_bn_before_add_to_0, InitWeights_He
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
from nnunetv2.utilities.plans_handling.plans_handler import ConfigurationManager, PlansManager
from torch import nn
from nnunetv2.training.nnUNetTrainer.vit import Swin
class nnUNetTrainer_swin(nnUNetTrainer):
def __init__(
self,
plans: dict,
configuration: str,
fold: int,
dataset_json: dict,
unpack_dataset: bool = True,
device: torch.device = torch.device("cuda"),
):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.enable_deep_supervision = False
self.num_epochs = 250
@staticmethod
def build_network_architecture(plans_manager: PlansManager,
dataset_json,
configuration_manager: ConfigurationManager,
num_input_channels,
enable_deep_supervision=False) -> nn.Module:
num_stages = len(configuration_manager.conv_kernel_sizes)
dim = len(configuration_manager.conv_kernel_sizes[0])
conv_op = convert_dim_to_conv_op(dim)
label_manager = plans_manager.get_label_manager(dataset_json)
segmentation_network_class_name = 'swin'
mapping = {
'swin': Swin
}
assert segmentation_network_class_name in mapping.keys(), 'The network architecture specified by the plans file ' \
'is non-standard (maybe your own?). Yo\'ll have to dive ' \
'into either this ' \
'function (get_network_from_plans) or ' \
'the init of your nnUNetModule to accommodate that.'
network_class = mapping[segmentation_network_class_name]
conv_or_blocks_per_stage = {
'n_conv_per_stage'
if network_class != ResidualEncoderUNet else 'n_blocks_per_stage': configuration_manager.n_conv_per_stage_encoder,
'n_conv_per_stage_decoder': configuration_manager.n_conv_per_stage_decoder
}
# network class name!!
model = network_class(
input_channels=num_input_channels,
num_classes=label_manager.num_segmentation_heads,
)
model.apply(InitWeights_He(1e-2))
if network_class == ResidualEncoderUNet:
model.apply(init_last_bn_before_add_to_0)
return model
def set_deep_supervision_enabled(self, enabled: bool):
return
class nnUNetTrainer_swin_pre(nnUNetTrainer):
def __init__(
self,
plans: dict,
configuration: str,
fold: int,
dataset_json: dict,
unpack_dataset: bool = True,
device: torch.device = torch.device("cuda"),
):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.enable_deep_supervision = False
self.num_epochs = 250
@staticmethod
def build_network_architecture(plans_manager: PlansManager,
dataset_json,
configuration_manager: ConfigurationManager,
num_input_channels,
enable_deep_supervision: bool = False) -> nn.Module:
num_stages = len(configuration_manager.conv_kernel_sizes)
dim = len(configuration_manager.conv_kernel_sizes[0])
conv_op = convert_dim_to_conv_op(dim)
label_manager = plans_manager.get_label_manager(dataset_json)
segmentation_network_class_name = 'swin'
mapping = {
'swin': Swin
}
assert segmentation_network_class_name in mapping.keys(), 'The network architecture specified by the plans file ' \
'is non-standard (maybe your own?). Yo\'ll have to dive ' \
'into either this ' \
'function (get_network_from_plans) or ' \
'the init of your nnUNetModule to accommodate that.'
network_class = mapping[segmentation_network_class_name]
conv_or_blocks_per_stage = {
'n_conv_per_stage'
if network_class != ResidualEncoderUNet else 'n_blocks_per_stage': configuration_manager.n_conv_per_stage_encoder,
'n_conv_per_stage_decoder': configuration_manager.n_conv_per_stage_decoder
}
# network class name!!
model = network_class(
input_channels=num_input_channels,
num_classes=label_manager.num_segmentation_heads,
)
try:
model_dict = torch.load("/home/linshan/VoCo/runs/logs_10k_swinv2_abdomen/current_model.pth",
map_location=torch.device('cpu'))
try:
state_dict = model_dict # ["state_dict"]
except:
state_dict = model_dict["state_dict"]
# if mri
state_dict = delete_patch_embed(state_dict)
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
# We now load model weights, setting param `strict` to False, i.e.:
# this load the encoder weights (Swin-ViT, SSL pre-trained), but leaves
# the decoder weights untouched (CNN UNet decoder).
model.load_state_dict(state_dict, strict=False)
print("Using pretrained voco ema self-supervised Swin UNETR backbone weights !")
except ValueError:
raise ValueError("Self-supervised pre-trained weights not available")
return model
def set_deep_supervision_enabled(self, enabled: bool):
return
def delete_patch_embed(state_dict):
for key in list(state_dict.keys()):
state_dict[key.replace("swinViT.patch_embed", "bad")] = state_dict.pop(key)
for key in list(state_dict.keys()):
state_dict[key.replace("encoder1.layer", "bad")] = state_dict.pop(key)
return state_dict
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/benchmarking/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/benchmarking/nnUNetTrainerBenchmark_5epochs.py
================================================
import torch
from batchgenerators.utilities.file_and_folder_operations import save_json, join, isfile, load_json
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
from torch import distributed as dist
class nnUNetTrainerBenchmark_5epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
assert self.fold == 0, "It makes absolutely no sense to specify a certain fold. Stick with 0 so that we can parse the results."
self.disable_checkpointing = True
self.num_epochs = 5
assert torch.cuda.is_available(), "This only works on GPU"
self.crashed_with_runtime_error = False
def perform_actual_validation(self, save_probabilities: bool = False):
pass
def save_checkpoint(self, filename: str) -> None:
# do not trust people to remember that self.disable_checkpointing must be True for this trainer
pass
def run_training(self):
try:
super().run_training()
except RuntimeError:
self.crashed_with_runtime_error = True
def on_train_end(self):
super().on_train_end()
if not self.is_ddp or self.local_rank == 0:
torch_version = torch.__version__
cudnn_version = torch.backends.cudnn.version()
gpu_name = torch.cuda.get_device_name()
if self.crashed_with_runtime_error:
fastest_epoch = 'Not enough VRAM!'
else:
epoch_times = [i - j for i, j in zip(self.logger.my_fantastic_logging['epoch_end_timestamps'],
self.logger.my_fantastic_logging['epoch_start_timestamps'])]
fastest_epoch = min(epoch_times)
if self.is_ddp:
num_gpus = dist.get_world_size()
else:
num_gpus = 1
benchmark_result_file = join(self.output_folder, 'benchmark_result.json')
if isfile(benchmark_result_file):
old_results = load_json(benchmark_result_file)
else:
old_results = {}
# generate some unique key
my_key = f"{cudnn_version}__{torch_version.replace(' ', '')}__{gpu_name.replace(' ', '')}__gpus_{num_gpus}"
old_results[my_key] = {
'torch_version': torch_version,
'cudnn_version': cudnn_version,
'gpu_name': gpu_name,
'fastest_epoch': fastest_epoch,
'num_gpus': num_gpus,
}
save_json(old_results,
join(self.output_folder, 'benchmark_result.json'))
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/benchmarking/nnUNetTrainerBenchmark_5epochs_noDataLoading.py
================================================
import torch
from nnunetv2.training.nnUNetTrainer.variants.benchmarking.nnUNetTrainerBenchmark_5epochs import (
nnUNetTrainerBenchmark_5epochs,
)
from nnunetv2.utilities.label_handling.label_handling import determine_num_input_channels
class nnUNetTrainerBenchmark_5epochs_noDataLoading(nnUNetTrainerBenchmark_5epochs):
def __init__(
self,
plans: dict,
configuration: str,
fold: int,
dataset_json: dict,
unpack_dataset: bool = True,
device: torch.device = torch.device("cuda"),
):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self._set_batch_size_and_oversample()
num_input_channels = determine_num_input_channels(
self.plans_manager, self.configuration_manager, self.dataset_json
)
patch_size = self.configuration_manager.patch_size
dummy_data = torch.rand((self.batch_size, num_input_channels, *patch_size), device=self.device)
if self.enable_deep_supervision:
dummy_target = [
torch.round(
torch.rand((self.batch_size, 1, *[int(i * j) for i, j in zip(patch_size, k)]), device=self.device)
* max(self.label_manager.all_labels)
)
for k in self._get_deep_supervision_scales()
]
else:
raise NotImplementedError("This trainer does not support deep supervision")
self.dummy_batch = {"data": dummy_data, "target": dummy_target}
def get_dataloaders(self):
return None, None
def run_training(self):
try:
self.on_train_start()
for epoch in range(self.current_epoch, self.num_epochs):
self.on_epoch_start()
self.on_train_epoch_start()
train_outputs = []
for batch_id in range(self.num_iterations_per_epoch):
train_outputs.append(self.train_step(self.dummy_batch))
self.on_train_epoch_end(train_outputs)
with torch.no_grad():
self.on_validation_epoch_start()
val_outputs = []
for batch_id in range(self.num_val_iterations_per_epoch):
val_outputs.append(self.validation_step(self.dummy_batch))
self.on_validation_epoch_end(val_outputs)
self.on_epoch_end()
self.on_train_end()
except RuntimeError:
self.crashed_with_runtime_error = True
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDA5.py
================================================
from typing import List, Union, Tuple
import numpy as np
import torch
from batchgenerators.dataloading.single_threaded_augmenter import SingleThreadedAugmenter
from batchgenerators.transforms.abstract_transforms import AbstractTransform, Compose
from batchgenerators.transforms.color_transforms import BrightnessTransform, ContrastAugmentationTransform, \
GammaTransform
from batchgenerators.transforms.local_transforms import BrightnessGradientAdditiveTransform, LocalGammaTransform
from batchgenerators.transforms.noise_transforms import MedianFilterTransform, GaussianBlurTransform, \
GaussianNoiseTransform, BlankRectangleTransform, SharpeningTransform
from batchgenerators.transforms.resample_transforms import SimulateLowResolutionTransform
from batchgenerators.transforms.spatial_transforms import SpatialTransform, Rot90Transform, TransposeAxesTransform, \
MirrorTransform
from batchgenerators.transforms.utility_transforms import OneOfTransform, RemoveLabelTransform, RenameTransform, \
NumpyToTensor
from nnunetv2.configuration import ANISO_THRESHOLD
from nnunetv2.training.data_augmentation.compute_initial_patch_size import get_patch_size
from nnunetv2.training.data_augmentation.custom_transforms.cascade_transforms import MoveSegAsOneHotToData, \
ApplyRandomBinaryOperatorTransform, RemoveRandomConnectedComponentFromOneHotEncodingTransform
from nnunetv2.training.data_augmentation.custom_transforms.deep_supervision_donwsampling import \
DownsampleSegForDSTransform2
from nnunetv2.training.data_augmentation.custom_transforms.limited_length_multithreaded_augmenter import \
LimitedLenWrapper
from nnunetv2.training.data_augmentation.custom_transforms.masking import MaskTransform
from nnunetv2.training.data_augmentation.custom_transforms.region_based_training import \
ConvertSegmentationToRegionsTransform
from nnunetv2.training.data_augmentation.custom_transforms.transforms_for_dummy_2d import Convert3DTo2DTransform, \
Convert2DTo3DTransform
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
from nnunetv2.utilities.default_n_proc_DA import get_allowed_n_proc_DA
class nnUNetTrainerDA5(nnUNetTrainer):
def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):
"""
This function is stupid and certainly one of the weakest spots of this implementation. Not entirely sure how we can fix it.
"""
patch_size = self.configuration_manager.patch_size
dim = len(patch_size)
# todo rotation should be defined dynamically based on patch size (more isotropic patch sizes = more rotation)
if dim == 2:
do_dummy_2d_data_aug = False
# todo revisit this parametrization
if max(patch_size) / min(patch_size) > 1.5:
rotation_for_DA = {
'x': (-15. / 360 * 2. * np.pi, 15. / 360 * 2. * np.pi),
'y': (0, 0),
'z': (0, 0)
}
else:
rotation_for_DA = {
'x': (-180. / 360 * 2. * np.pi, 180. / 360 * 2. * np.pi),
'y': (0, 0),
'z': (0, 0)
}
mirror_axes = (0, 1)
elif dim == 3:
# todo this is not ideal. We could also have patch_size (64, 16, 128) in which case a full 180deg 2d rot would be bad
# order of the axes is determined by spacing, not image size
do_dummy_2d_data_aug = (max(patch_size) / patch_size[0]) > ANISO_THRESHOLD
if do_dummy_2d_data_aug:
# why do we rotate 180 deg here all the time? We should also restrict it
rotation_for_DA = {
'x': (-180. / 360 * 2. * np.pi, 180. / 360 * 2. * np.pi),
'y': (0, 0),
'z': (0, 0)
}
else:
rotation_for_DA = {
'x': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),
'y': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),
'z': (-30. / 360 * 2. * np.pi, 30. / 360 * 2. * np.pi),
}
mirror_axes = (0, 1, 2)
else:
raise RuntimeError()
# todo this function is stupid. It doesn't even use the correct scale range (we keep things as they were in the
# old nnunet for now)
initial_patch_size = get_patch_size(patch_size[-dim:],
*rotation_for_DA.values(),
(0.7, 1.43))
if do_dummy_2d_data_aug:
initial_patch_size[0] = patch_size[0]
self.print_to_log_file(f'do_dummy_2d_data_aug: {do_dummy_2d_data_aug}')
self.inference_allowed_mirroring_axes = mirror_axes
return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes
@staticmethod
def get_training_transforms(patch_size: Union[np.ndarray, Tuple[int]],
rotation_for_DA: dict,
deep_supervision_scales: Union[List, Tuple, None],
mirror_axes: Tuple[int, ...],
do_dummy_2d_data_aug: bool,
order_resampling_data: int = 3,
order_resampling_seg: int = 1,
border_val_seg: int = -1,
use_mask_for_norm: List[bool] = None,
is_cascaded: bool = False,
foreground_labels: Union[Tuple[int, ...], List[int]] = None,
regions: List[Union[List[int], Tuple[int, ...], int]] = None,
ignore_label: int = None) -> AbstractTransform:
matching_axes = np.array([sum([i == j for j in patch_size]) for i in patch_size])
valid_axes = list(np.where(matching_axes == np.max(matching_axes))[0])
tr_transforms = []
if do_dummy_2d_data_aug:
ignore_axes = (0,)
tr_transforms.append(Convert3DTo2DTransform())
patch_size_spatial = patch_size[1:]
else:
patch_size_spatial = patch_size
ignore_axes = None
tr_transforms.append(
SpatialTransform(
patch_size_spatial,
patch_center_dist_from_border=None,
do_elastic_deform=False,
do_rotation=True,
angle_x=rotation_for_DA['x'],
angle_y=rotation_for_DA['y'],
angle_z=rotation_for_DA['z'],
p_rot_per_axis=0.5,
do_scale=True,
scale=(0.7, 1.43),
border_mode_data="constant",
border_cval_data=0,
order_data=order_resampling_data,
border_mode_seg="constant",
border_cval_seg=-1,
order_seg=order_resampling_seg,
random_crop=False,
p_el_per_sample=0.2,
p_scale_per_sample=0.2,
p_rot_per_sample=0.4,
independent_scale_for_each_axis=True,
)
)
if do_dummy_2d_data_aug:
tr_transforms.append(Convert2DTo3DTransform())
if np.any(matching_axes > 1):
tr_transforms.append(
Rot90Transform(
(0, 1, 2, 3), axes=valid_axes, data_key='data', label_key='seg', p_per_sample=0.5
),
)
if np.any(matching_axes > 1):
tr_transforms.append(
TransposeAxesTransform(valid_axes, data_key='data', label_key='seg', p_per_sample=0.5)
)
tr_transforms.append(OneOfTransform([
MedianFilterTransform(
(2, 8),
same_for_each_channel=False,
p_per_sample=0.2,
p_per_channel=0.5
),
GaussianBlurTransform((0.3, 1.5),
different_sigma_per_channel=True,
p_per_sample=0.2,
p_per_channel=0.5)
]))
tr_transforms.append(GaussianNoiseTransform(p_per_sample=0.1))
tr_transforms.append(BrightnessTransform(0,
0.5,
per_channel=True,
p_per_sample=0.1,
p_per_channel=0.5
)
)
tr_transforms.append(OneOfTransform(
[
ContrastAugmentationTransform(
contrast_range=(0.5, 2),
preserve_range=True,
per_channel=True,
data_key='data',
p_per_sample=0.2,
p_per_channel=0.5
),
ContrastAugmentationTransform(
contrast_range=(0.5, 2),
preserve_range=False,
per_channel=True,
data_key='data',
p_per_sample=0.2,
p_per_channel=0.5
),
]
))
tr_transforms.append(
SimulateLowResolutionTransform(zoom_range=(0.25, 1),
per_channel=True,
p_per_channel=0.5,
order_downsample=0,
order_upsample=3,
p_per_sample=0.15,
ignore_axes=ignore_axes
)
)
tr_transforms.append(
GammaTransform((0.7, 1.5), invert_image=True, per_channel=True, retain_stats=True, p_per_sample=0.1))
tr_transforms.append(
GammaTransform((0.7, 1.5), invert_image=True, per_channel=True, retain_stats=True, p_per_sample=0.1))
if mirror_axes is not None and len(mirror_axes) > 0:
tr_transforms.append(MirrorTransform(mirror_axes))
tr_transforms.append(
BlankRectangleTransform([[max(1, p // 10), p // 3] for p in patch_size],
rectangle_value=np.mean,
num_rectangles=(1, 5),
force_square=False,
p_per_sample=0.4,
p_per_channel=0.5
)
)
tr_transforms.append(
BrightnessGradientAdditiveTransform(
_brightnessadditive_localgamma_transform_scale,
(-0.5, 1.5),
max_strength=_brightness_gradient_additive_max_strength,
mean_centered=False,
same_for_all_channels=False,
p_per_sample=0.3,
p_per_channel=0.5
)
)
tr_transforms.append(
LocalGammaTransform(
_brightnessadditive_localgamma_transform_scale,
(-0.5, 1.5),
_local_gamma_gamma,
same_for_all_channels=False,
p_per_sample=0.3,
p_per_channel=0.5
)
)
tr_transforms.append(
SharpeningTransform(
strength=(0.1, 1),
same_for_each_channel=False,
p_per_sample=0.2,
p_per_channel=0.5
)
)
if use_mask_for_norm is not None and any(use_mask_for_norm):
tr_transforms.append(MaskTransform([i for i in range(len(use_mask_for_norm)) if use_mask_for_norm[i]],
mask_idx_in_seg=0, set_outside_to=0))
tr_transforms.append(RemoveLabelTransform(-1, 0))
if is_cascaded:
if ignore_label is not None:
raise NotImplementedError('ignore label not yet supported in cascade')
assert foreground_labels is not None, 'We need all_labels for cascade augmentations'
use_labels = [i for i in foreground_labels if i != 0]
tr_transforms.append(MoveSegAsOneHotToData(1, use_labels, 'seg', 'data'))
tr_transforms.append(ApplyRandomBinaryOperatorTransform(
channel_idx=list(range(-len(use_labels), 0)),
p_per_sample=0.4,
key="data",
strel_size=(1, 8),
p_per_label=1))
tr_transforms.append(
RemoveRandomConnectedComponentFromOneHotEncodingTransform(
channel_idx=list(range(-len(use_labels), 0)),
key="data",
p_per_sample=0.2,
fill_with_other_class_p=0,
dont_do_if_covers_more_than_x_percent=0.15))
tr_transforms.append(RenameTransform('seg', 'target', True))
if regions is not None:
# the ignore label must also be converted
tr_transforms.append(ConvertSegmentationToRegionsTransform(list(regions) + [ignore_label]
if ignore_label is not None else regions,
'target', 'target'))
if deep_supervision_scales is not None:
tr_transforms.append(DownsampleSegForDSTransform2(deep_supervision_scales, 0, input_key='target',
output_key='target'))
tr_transforms.append(NumpyToTensor(['data', 'target'], 'float'))
tr_transforms = Compose(tr_transforms)
return tr_transforms
class nnUNetTrainerDA5ord0(nnUNetTrainerDA5):
def get_dataloaders(self):
"""
changed order_resampling_data, order_resampling_seg
"""
# we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether
# we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be
patch_size = self.configuration_manager.patch_size
dim = len(patch_size)
# needed for deep supervision: how much do we need to downscale the segmentation targets for the different
# outputs?
deep_supervision_scales = self._get_deep_supervision_scales()
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()
# training pipeline
tr_transforms = self.get_training_transforms(
patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,
order_resampling_data=0, order_resampling_seg=0,
use_mask_for_norm=self.configuration_manager.use_mask_for_norm,
is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.all_labels,
regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
# validation pipeline
val_transforms = self.get_validation_transforms(deep_supervision_scales,
is_cascaded=self.is_cascaded,
foreground_labels=self.label_manager.all_labels,
regions=self.label_manager.foreground_regions if
self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)
allowed_num_processes = get_allowed_n_proc_DA()
if allowed_num_processes == 0:
mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)
mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)
else:
mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, dl_tr, tr_transforms,
allowed_num_processes, 6, None, True, 0.02)
mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, dl_val, val_transforms,
max(1, allowed_num_processes // 2), 3, None, True, 0.02)
return mt_gen_train, mt_gen_val
def _brightnessadditive_localgamma_transform_scale(x, y):
return np.exp(np.random.uniform(np.log(x[y] // 6), np.log(x[y])))
def _brightness_gradient_additive_max_strength(_x, _y):
return np.random.uniform(-5, -1) if np.random.uniform() < 0.5 else np.random.uniform(1, 5)
def _local_gamma_gamma():
return np.random.uniform(0.01, 0.8) if np.random.uniform() < 0.5 else np.random.uniform(1.5, 4)
class nnUNetTrainerDA5Segord0(nnUNetTrainerDA5):
def get_dataloaders(self):
"""
changed order_resampling_data, order_resampling_seg
"""
# we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether
# we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be
patch_size = self.configuration_manager.patch_size
dim = len(patch_size)
# needed for deep supervision: how much do we need to downscale the segmentation targets for the different
# outputs?
deep_supervision_scales = self._get_deep_supervision_scales()
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()
# training pipeline
tr_transforms = self.get_training_transforms(
patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,
order_resampling_data=3, order_resampling_seg=0,
use_mask_for_norm=self.configuration_manager.use_mask_for_norm,
is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.all_labels,
regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
# validation pipeline
val_transforms = self.get_validation_transforms(deep_supervision_scales,
is_cascaded=self.is_cascaded,
foreground_labels=self.label_manager.all_labels,
regions=self.label_manager.foreground_regions if
self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)
allowed_num_processes = get_allowed_n_proc_DA()
if allowed_num_processes == 0:
mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)
mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)
else:
mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, dl_tr, tr_transforms,
allowed_num_processes, 6, None, True, 0.02)
mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, dl_val, val_transforms,
max(1, allowed_num_processes // 2), 3, None, True, 0.02)
return mt_gen_train, mt_gen_val
class nnUNetTrainerDA5_10epochs(nnUNetTrainerDA5):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 10
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDAOrd0.py
================================================
from batchgenerators.dataloading.single_threaded_augmenter import SingleThreadedAugmenter
from nnunetv2.training.data_augmentation.custom_transforms.limited_length_multithreaded_augmenter import \
LimitedLenWrapper
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
from nnunetv2.utilities.default_n_proc_DA import get_allowed_n_proc_DA
class nnUNetTrainerDAOrd0(nnUNetTrainer):
def get_dataloaders(self):
"""
changed order_resampling_data, order_resampling_seg
"""
# we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether
# we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be
patch_size = self.configuration_manager.patch_size
dim = len(patch_size)
# needed for deep supervision: how much do we need to downscale the segmentation targets for the different
# outputs?
deep_supervision_scales = self._get_deep_supervision_scales()
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()
# training pipeline
tr_transforms = self.get_training_transforms(
patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,
order_resampling_data=0, order_resampling_seg=0,
use_mask_for_norm=self.configuration_manager.use_mask_for_norm,
is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.all_labels,
regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
# validation pipeline
val_transforms = self.get_validation_transforms(deep_supervision_scales,
is_cascaded=self.is_cascaded,
foreground_labels=self.label_manager.all_labels,
regions=self.label_manager.foreground_regions if
self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)
allowed_num_processes = get_allowed_n_proc_DA()
if allowed_num_processes == 0:
mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)
mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)
else:
mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, dl_tr, tr_transforms,
allowed_num_processes, 6, None, True, 0.02)
mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, dl_val, val_transforms,
max(1, allowed_num_processes // 2), 3, None, True, 0.02)
return mt_gen_train, mt_gen_val
class nnUNetTrainer_DASegOrd0(nnUNetTrainer):
def get_dataloaders(self):
"""
changed order_resampling_data, order_resampling_seg
"""
# we use the patch size to determine whether we need 2D or 3D dataloaders. We also use it to determine whether
# we need to use dummy 2D augmentation (in case of 3D training) and what our initial patch size should be
patch_size = self.configuration_manager.patch_size
dim = len(patch_size)
# needed for deep supervision: how much do we need to downscale the segmentation targets for the different
# outputs?
deep_supervision_scales = self._get_deep_supervision_scales()
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
self.configure_rotation_dummyDA_mirroring_and_inital_patch_size()
# training pipeline
tr_transforms = self.get_training_transforms(
patch_size, rotation_for_DA, deep_supervision_scales, mirror_axes, do_dummy_2d_data_aug,
order_resampling_data=3, order_resampling_seg=0,
use_mask_for_norm=self.configuration_manager.use_mask_for_norm,
is_cascaded=self.is_cascaded, foreground_labels=self.label_manager.all_labels,
regions=self.label_manager.foreground_regions if self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
# validation pipeline
val_transforms = self.get_validation_transforms(deep_supervision_scales,
is_cascaded=self.is_cascaded,
foreground_labels=self.label_manager.all_labels,
regions=self.label_manager.foreground_regions if
self.label_manager.has_regions else None,
ignore_label=self.label_manager.ignore_label)
dl_tr, dl_val = self.get_plain_dataloaders(initial_patch_size, dim)
allowed_num_processes = get_allowed_n_proc_DA()
if allowed_num_processes == 0:
mt_gen_train = SingleThreadedAugmenter(dl_tr, tr_transforms)
mt_gen_val = SingleThreadedAugmenter(dl_val, val_transforms)
else:
mt_gen_train = LimitedLenWrapper(self.num_iterations_per_epoch, dl_tr, tr_transforms,
allowed_num_processes, 6, None, True, 0.02)
mt_gen_val = LimitedLenWrapper(self.num_val_iterations_per_epoch, dl_val, val_transforms,
max(1, allowed_num_processes // 2), 3, None, True, 0.02)
return mt_gen_train, mt_gen_val
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerNoDA.py
================================================
from typing import Union, Tuple, List
from batchgenerators.transforms.abstract_transforms import AbstractTransform
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
import numpy as np
class nnUNetTrainerNoDA(nnUNetTrainer):
@staticmethod
def get_training_transforms(patch_size: Union[np.ndarray, Tuple[int]],
rotation_for_DA: dict,
deep_supervision_scales: Union[List, Tuple, None],
mirror_axes: Tuple[int, ...],
do_dummy_2d_data_aug: bool,
order_resampling_data: int = 1,
order_resampling_seg: int = 0,
border_val_seg: int = -1,
use_mask_for_norm: List[bool] = None,
is_cascaded: bool = False,
foreground_labels: Union[Tuple[int, ...], List[int]] = None,
regions: List[Union[List[int], Tuple[int, ...], int]] = None,
ignore_label: int = None) -> AbstractTransform:
return nnUNetTrainer.get_validation_transforms(deep_supervision_scales, is_cascaded, foreground_labels,
regions, ignore_label)
def get_plain_dataloaders(self, initial_patch_size: Tuple[int, ...], dim: int):
return super().get_plain_dataloaders(
initial_patch_size=self.configuration_manager.patch_size,
dim=dim
)
def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):
# we need to disable mirroring here so that no mirroring will be applied in inferene!
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()
mirror_axes = None
self.inference_allowed_mirroring_axes = None
return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerNoMirroring.py
================================================
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
class nnUNetTrainerNoMirroring(nnUNetTrainer):
def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()
mirror_axes = None
self.inference_allowed_mirroring_axes = None
return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes
class nnUNetTrainer_onlyMirror01(nnUNetTrainer):
"""
Only mirrors along spatial axes 0 and 1 for 3D and 0 for 2D
"""
def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()
patch_size = self.configuration_manager.patch_size
dim = len(patch_size)
if dim == 2:
mirror_axes = (0, )
else:
mirror_axes = (0, 1)
self.inference_allowed_mirroring_axes = mirror_axes
return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/loss/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerCELoss.py
================================================
import torch
from nnunetv2.training.loss.deep_supervision import DeepSupervisionWrapper
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
from nnunetv2.training.loss.robust_ce_loss import RobustCrossEntropyLoss
import numpy as np
class nnUNetTrainerCELoss(nnUNetTrainer):
def _build_loss(self):
assert not self.label_manager.has_regions, "regions not supported by this trainer"
loss = RobustCrossEntropyLoss(
weight=None, ignore_index=self.label_manager.ignore_label if self.label_manager.has_ignore_label else -100
)
# we give each output a weight which decreases exponentially (division by 2) as the resolution decreases
# this gives higher resolution outputs more weight in the loss
if self.enable_deep_supervision:
deep_supervision_scales = self._get_deep_supervision_scales()
weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])
weights[-1] = 0
# we don't use the lowest 2 outputs. Normalize weights so that they sum to 1
weights = weights / weights.sum()
# now wrap the loss
loss = DeepSupervisionWrapper(loss, weights)
return loss
class nnUNetTrainerCELoss_5epochs(nnUNetTrainerCELoss):
def __init__(
self,
plans: dict,
configuration: str,
fold: int,
dataset_json: dict,
unpack_dataset: bool = True,
device: torch.device = torch.device("cuda"),
):
"""used for debugging plans etc"""
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 5
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerDiceLoss.py
================================================
import numpy as np
import torch
from nnunetv2.training.loss.compound_losses import DC_and_BCE_loss, DC_and_CE_loss
from nnunetv2.training.loss.deep_supervision import DeepSupervisionWrapper
from nnunetv2.training.loss.dice import MemoryEfficientSoftDiceLoss
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
from nnunetv2.utilities.helpers import softmax_helper_dim1
class nnUNetTrainerDiceLoss(nnUNetTrainer):
def _build_loss(self):
loss = MemoryEfficientSoftDiceLoss(**{'batch_dice': self.configuration_manager.batch_dice,
'do_bg': self.label_manager.has_regions, 'smooth': 1e-5, 'ddp': self.is_ddp},
apply_nonlin=torch.sigmoid if self.label_manager.has_regions else softmax_helper_dim1)
if self.enable_deep_supervision:
deep_supervision_scales = self._get_deep_supervision_scales()
# we give each output a weight which decreases exponentially (division by 2) as the resolution decreases
# this gives higher resolution outputs more weight in the loss
weights = np.array([1 / (2 ** i) for i in range(len(deep_supervision_scales))])
weights[-1] = 0
# we don't use the lowest 2 outputs. Normalize weights so that they sum to 1
weights = weights / weights.sum()
# now wrap the loss
loss = DeepSupervisionWrapper(loss, weights)
return loss
class nnUNetTrainerDiceCELoss_noSmooth(nnUNetTrainer):
def _build_loss(self):
# set smooth to 0
if self.label_manager.has_regions:
loss = DC_and_BCE_loss({},
{'batch_dice': self.configuration_manager.batch_dice,
'do_bg': True, 'smooth': 0, 'ddp': self.is_ddp},
use_ignore_label=self.label_manager.ignore_label is not None,
dice_class=MemoryEfficientSoftDiceLoss)
else:
loss = DC_and_CE_loss({'batch_dice': self.configuration_manager.batch_dice,
'smooth': 0, 'do_bg': False, 'ddp': self.is_ddp}, {}, weight_ce=1, weight_dice=1,
ignore_label=self.label_manager.ignore_label,
dice_class=MemoryEfficientSoftDiceLoss)
if self.enable_deep_supervision:
deep_supervision_scales = self._get_deep_supervision_scales()
# we give each output a weight which decreases exponentially (division by 2) as the resolution decreases
# this gives higher resolution outputs more weight in the loss
weights = np.array([1 / (2 ** i) for i in range(len(deep_supervision_scales))])
weights[-1] = 0
# we don't use the lowest 2 outputs. Normalize weights so that they sum to 1
weights = weights / weights.sum()
# now wrap the loss
loss = DeepSupervisionWrapper(loss, weights)
return loss
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerTopkLoss.py
================================================
from nnunetv2.training.loss.compound_losses import DC_and_topk_loss
from nnunetv2.training.loss.deep_supervision import DeepSupervisionWrapper
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
import numpy as np
from nnunetv2.training.loss.robust_ce_loss import TopKLoss
class nnUNetTrainerTopk10Loss(nnUNetTrainer):
def _build_loss(self):
assert not self.label_manager.has_regions, "regions not supported by this trainer"
loss = TopKLoss(
ignore_index=self.label_manager.ignore_label if self.label_manager.has_ignore_label else -100, k=10
)
if self.enable_deep_supervision:
deep_supervision_scales = self._get_deep_supervision_scales()
# we give each output a weight which decreases exponentially (division by 2) as the resolution decreases
# this gives higher resolution outputs more weight in the loss
weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])
weights[-1] = 0
# we don't use the lowest 2 outputs. Normalize weights so that they sum to 1
weights = weights / weights.sum()
# now wrap the loss
loss = DeepSupervisionWrapper(loss, weights)
return loss
class nnUNetTrainerTopk10LossLS01(nnUNetTrainer):
def _build_loss(self):
assert not self.label_manager.has_regions, "regions not supported by this trainer"
loss = TopKLoss(
ignore_index=self.label_manager.ignore_label if self.label_manager.has_ignore_label else -100,
k=10,
label_smoothing=0.1,
)
if self.enable_deep_supervision:
deep_supervision_scales = self._get_deep_supervision_scales()
# we give each output a weight which decreases exponentially (division by 2) as the resolution decreases
# this gives higher resolution outputs more weight in the loss
weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])
weights[-1] = 0
# we don't use the lowest 2 outputs. Normalize weights so that they sum to 1
weights = weights / weights.sum()
# now wrap the loss
loss = DeepSupervisionWrapper(loss, weights)
return loss
class nnUNetTrainerDiceTopK10Loss(nnUNetTrainer):
def _build_loss(self):
assert not self.label_manager.has_regions, "regions not supported by this trainer"
loss = DC_and_topk_loss(
{"batch_dice": self.configuration_manager.batch_dice, "smooth": 1e-5, "do_bg": False, "ddp": self.is_ddp},
{"k": 10, "label_smoothing": 0.0},
weight_ce=1,
weight_dice=1,
ignore_label=self.label_manager.ignore_label,
)
if self.enable_deep_supervision:
deep_supervision_scales = self._get_deep_supervision_scales()
# we give each output a weight which decreases exponentially (division by 2) as the resolution decreases
# this gives higher resolution outputs more weight in the loss
weights = np.array([1 / (2**i) for i in range(len(deep_supervision_scales))])
weights[-1] = 0
# we don't use the lowest 2 outputs. Normalize weights so that they sum to 1
weights = weights / weights.sum()
# now wrap the loss
loss = DeepSupervisionWrapper(loss, weights)
return loss
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/lr_schedule/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/lr_schedule/nnUNetTrainerCosAnneal.py
================================================
import torch
from torch.optim.lr_scheduler import CosineAnnealingLR
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
class nnUNetTrainerCosAnneal(nnUNetTrainer):
def configure_optimizers(self):
optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,
momentum=0.99, nesterov=True)
lr_scheduler = CosineAnnealingLR(optimizer, T_max=self.num_epochs)
return optimizer, lr_scheduler
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/network_architecture/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/network_architecture/nnUNetTrainerBN.py
================================================
from dynamic_network_architectures.architectures.unet import ResidualEncoderUNet, PlainConvUNet
from dynamic_network_architectures.building_blocks.helper import convert_dim_to_conv_op, get_matching_batchnorm
from dynamic_network_architectures.initialization.weight_init import init_last_bn_before_add_to_0, InitWeights_He
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
from nnunetv2.utilities.plans_handling.plans_handler import ConfigurationManager, PlansManager
from torch import nn
class nnUNetTrainerBN(nnUNetTrainer):
@staticmethod
def build_network_architecture(plans_manager: PlansManager,
dataset_json,
configuration_manager: ConfigurationManager,
num_input_channels,
enable_deep_supervision: bool = True) -> nn.Module:
num_stages = len(configuration_manager.conv_kernel_sizes)
dim = len(configuration_manager.conv_kernel_sizes[0])
conv_op = convert_dim_to_conv_op(dim)
label_manager = plans_manager.get_label_manager(dataset_json)
segmentation_network_class_name = configuration_manager.UNet_class_name
mapping = {
'PlainConvUNet': PlainConvUNet,
'ResidualEncoderUNet': ResidualEncoderUNet
}
kwargs = {
'PlainConvUNet': {
'conv_bias': True,
'norm_op': get_matching_batchnorm(conv_op),
'norm_op_kwargs': {'eps': 1e-5, 'affine': True},
'dropout_op': None, 'dropout_op_kwargs': None,
'nonlin': nn.LeakyReLU, 'nonlin_kwargs': {'inplace': True},
},
'ResidualEncoderUNet': {
'conv_bias': True,
'norm_op': get_matching_batchnorm(conv_op),
'norm_op_kwargs': {'eps': 1e-5, 'affine': True},
'dropout_op': None, 'dropout_op_kwargs': None,
'nonlin': nn.LeakyReLU, 'nonlin_kwargs': {'inplace': True},
}
}
assert segmentation_network_class_name in mapping.keys(), 'The network architecture specified by the plans file ' \
'is non-standard (maybe your own?). Yo\'ll have to dive ' \
'into either this ' \
'function (get_network_from_plans) or ' \
'the init of your nnUNetModule to accommodate that.'
network_class = mapping[segmentation_network_class_name]
conv_or_blocks_per_stage = {
'n_conv_per_stage'
if network_class != ResidualEncoderUNet else 'n_blocks_per_stage': configuration_manager.n_conv_per_stage_encoder,
'n_conv_per_stage_decoder': configuration_manager.n_conv_per_stage_decoder
}
# network class name!!
model = network_class(
input_channels=num_input_channels,
n_stages=num_stages,
features_per_stage=[min(configuration_manager.UNet_base_num_features * 2 ** i,
configuration_manager.unet_max_num_features) for i in range(num_stages)],
conv_op=conv_op,
kernel_sizes=configuration_manager.conv_kernel_sizes,
strides=configuration_manager.pool_op_kernel_sizes,
num_classes=label_manager.num_segmentation_heads,
deep_supervision=enable_deep_supervision,
**conv_or_blocks_per_stage,
**kwargs[segmentation_network_class_name]
)
model.apply(InitWeights_He(1e-2))
if network_class == ResidualEncoderUNet:
model.apply(init_last_bn_before_add_to_0)
return model
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/network_architecture/nnUNetTrainerNoDeepSupervision.py
================================================
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
import torch
class nnUNetTrainerNoDeepSupervision(nnUNetTrainer):
def __init__(
self,
plans: dict,
configuration: str,
fold: int,
dataset_json: dict,
unpack_dataset: bool = True,
device: torch.device = torch.device("cuda"),
):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.enable_deep_supervision = False
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/optimizer/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/optimizer/nnUNetTrainerAdam.py
================================================
import torch
from torch.optim import Adam, AdamW
from nnunetv2.training.lr_scheduler.polylr import PolyLRScheduler
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
class nnUNetTrainerAdam(nnUNetTrainer):
def configure_optimizers(self):
optimizer = AdamW(self.network.parameters(),
lr=self.initial_lr,
weight_decay=self.weight_decay,
amsgrad=True)
# optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,
# momentum=0.99, nesterov=True)
lr_scheduler = PolyLRScheduler(optimizer, self.initial_lr, self.num_epochs)
return optimizer, lr_scheduler
class nnUNetTrainerVanillaAdam(nnUNetTrainer):
def configure_optimizers(self):
optimizer = Adam(self.network.parameters(),
lr=self.initial_lr,
weight_decay=self.weight_decay)
# optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,
# momentum=0.99, nesterov=True)
lr_scheduler = PolyLRScheduler(optimizer, self.initial_lr, self.num_epochs)
return optimizer, lr_scheduler
class nnUNetTrainerVanillaAdam1en3(nnUNetTrainerVanillaAdam):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.initial_lr = 1e-3
class nnUNetTrainerVanillaAdam3en4(nnUNetTrainerVanillaAdam):
# https://twitter.com/karpathy/status/801621764144971776?lang=en
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.initial_lr = 3e-4
class nnUNetTrainerAdam1en3(nnUNetTrainerAdam):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.initial_lr = 1e-3
class nnUNetTrainerAdam3en4(nnUNetTrainerAdam):
# https://twitter.com/karpathy/status/801621764144971776?lang=en
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.initial_lr = 3e-4
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/optimizer/nnUNetTrainerAdan.py
================================================
import torch
from nnunetv2.training.lr_scheduler.polylr import PolyLRScheduler
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
from torch.optim.lr_scheduler import CosineAnnealingLR
try:
from adan_pytorch import Adan
except ImportError:
Adan = None
class nnUNetTrainerAdan(nnUNetTrainer):
def configure_optimizers(self):
if Adan is None:
raise RuntimeError('This trainer requires adan_pytorch to be installed, install with "pip install adan-pytorch"')
optimizer = Adan(self.network.parameters(),
lr=self.initial_lr,
# betas=(0.02, 0.08, 0.01), defaults
weight_decay=self.weight_decay)
# optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,
# momentum=0.99, nesterov=True)
lr_scheduler = PolyLRScheduler(optimizer, self.initial_lr, self.num_epochs)
return optimizer, lr_scheduler
class nnUNetTrainerAdan1en3(nnUNetTrainerAdan):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.initial_lr = 1e-3
class nnUNetTrainerAdan3en4(nnUNetTrainerAdan):
# https://twitter.com/karpathy/status/801621764144971776?lang=en
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.initial_lr = 3e-4
class nnUNetTrainerAdan1en1(nnUNetTrainerAdan):
# this trainer makes no sense -> nan!
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.initial_lr = 1e-1
class nnUNetTrainerAdanCosAnneal(nnUNetTrainerAdan):
# def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
# device: torch.device = torch.device('cuda')):
# super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
# self.num_epochs = 15
def configure_optimizers(self):
if Adan is None:
raise RuntimeError('This trainer requires adan_pytorch to be installed, install with "pip install adan-pytorch"')
optimizer = Adan(self.network.parameters(),
lr=self.initial_lr,
# betas=(0.02, 0.08, 0.01), defaults
weight_decay=self.weight_decay)
# optimizer = torch.optim.SGD(self.network.parameters(), self.initial_lr, weight_decay=self.weight_decay,
# momentum=0.99, nesterov=True)
lr_scheduler = CosineAnnealingLR(optimizer, T_max=self.num_epochs)
return optimizer, lr_scheduler
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/sampling/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/sampling/nnUNetTrainer_probabilisticOversampling.py
================================================
from typing import Tuple
import torch
from nnunetv2.training.dataloading.data_loader_2d import nnUNetDataLoader2D
from nnunetv2.training.dataloading.data_loader_3d import nnUNetDataLoader3D
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
import numpy as np
class nnUNetTrainer_probabilisticOversampling(nnUNetTrainer):
"""
sampling of foreground happens randomly and not for the last 33% of samples in a batch
since most trainings happen with batch size 2 and nnunet guarantees at least one fg sample, effectively this can
be 50%
Here we compute the actual oversampling percentage used by nnUNetTrainer in order to be as consistent as possible.
If we switch to this oversampling then we can keep it at a constant 0.33 or whatever.
"""
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.oversample_foreground_percent = float(np.mean(
[not sample_idx < round(self.configuration_manager.batch_size * (1 - self.oversample_foreground_percent))
for sample_idx in range(self.configuration_manager.batch_size)]))
self.print_to_log_file(f"self.oversample_foreground_percent {self.oversample_foreground_percent}")
def get_plain_dataloaders(self, initial_patch_size: Tuple[int, ...], dim: int):
dataset_tr, dataset_val = self.get_tr_and_val_datasets()
if dim == 2:
dl_tr = nnUNetDataLoader2D(dataset_tr,
self.batch_size,
initial_patch_size,
self.configuration_manager.patch_size,
self.label_manager,
oversample_foreground_percent=self.oversample_foreground_percent,
sampling_probabilities=None, pad_sides=None, probabilistic_oversampling=True)
dl_val = nnUNetDataLoader2D(dataset_val,
self.batch_size,
self.configuration_manager.patch_size,
self.configuration_manager.patch_size,
self.label_manager,
oversample_foreground_percent=self.oversample_foreground_percent,
sampling_probabilities=None, pad_sides=None, probabilistic_oversampling=True)
else:
dl_tr = nnUNetDataLoader3D(dataset_tr,
self.batch_size,
initial_patch_size,
self.configuration_manager.patch_size,
self.label_manager,
oversample_foreground_percent=self.oversample_foreground_percent,
sampling_probabilities=None, pad_sides=None, probabilistic_oversampling=True)
dl_val = nnUNetDataLoader3D(dataset_val,
self.batch_size,
self.configuration_manager.patch_size,
self.configuration_manager.patch_size,
self.label_manager,
oversample_foreground_percent=self.oversample_foreground_percent,
sampling_probabilities=None, pad_sides=None, probabilistic_oversampling=True)
return dl_tr, dl_val
class nnUNetTrainer_probabilisticOversampling_033(nnUNetTrainer_probabilisticOversampling):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.oversample_foreground_percent = 0.33
class nnUNetTrainer_probabilisticOversampling_010(nnUNetTrainer_probabilisticOversampling):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.oversample_foreground_percent = 0.1
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/training_length/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/training_length/nnUNetTrainer_Xepochs.py
================================================
import torch
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
class nnUNetTrainer_5epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
"""used for debugging plans etc"""
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 5
class nnUNetTrainer_1epoch(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
"""used for debugging plans etc"""
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 1
class nnUNetTrainer_10epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
"""used for debugging plans etc"""
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 10
class nnUNetTrainer_20epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 20
class nnUNetTrainer_50epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 50
class nnUNetTrainer_100epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 100
class nnUNetTrainer_250epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 250
class nnUNetTrainer_2000epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 2000
class nnUNetTrainer_4000epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 4000
class nnUNetTrainer_8000epochs(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 8000
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/variants/training_length/nnUNetTrainer_Xepochs_NoMirroring.py
================================================
import torch
from nnunetv2.training.nnUNetTrainer.nnUNetTrainer import nnUNetTrainer
class nnUNetTrainer_250epochs_NoMirroring(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 250
def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()
mirror_axes = None
self.inference_allowed_mirroring_axes = None
return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes
class nnUNetTrainer_2000epochs_NoMirroring(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 2000
def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()
mirror_axes = None
self.inference_allowed_mirroring_axes = None
return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes
class nnUNetTrainer_4000epochs_NoMirroring(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 4000
def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()
mirror_axes = None
self.inference_allowed_mirroring_axes = None
return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes
class nnUNetTrainer_8000epochs_NoMirroring(nnUNetTrainer):
def __init__(self, plans: dict, configuration: str, fold: int, dataset_json: dict, unpack_dataset: bool = True,
device: torch.device = torch.device('cuda')):
super().__init__(plans, configuration, fold, dataset_json, unpack_dataset, device)
self.num_epochs = 8000
def configure_rotation_dummyDA_mirroring_and_inital_patch_size(self):
rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes = \
super().configure_rotation_dummyDA_mirroring_and_inital_patch_size()
mirror_axes = None
self.inference_allowed_mirroring_axes = None
return rotation_for_DA, do_dummy_2d_data_aug, initial_patch_size, mirror_axes
================================================
FILE: Finetune/nnUNet/nnunetv2/training/nnUNetTrainer/vit.py
================================================
from typing import Union, Type, List, Tuple
import torch
from dynamic_network_architectures.building_blocks.residual_encoders import ResidualEncoder
from dynamic_network_architectures.building_blocks.residual import BasicBlockD, BottleneckD
from torch import nn
from torch.nn.modules.conv import _ConvNd
from torch.nn.modules.dropout import _DropoutNd
from dynamic_network_architectures.building_blocks.plain_conv_encoder import PlainConvEncoder
from dynamic_network_architectures.building_blocks.unet_decoder import UNetDecoder
from dynamic_network_architectures.building_blocks.helper import convert_conv_op_to_dim
import numpy as np
from monai.networks.nets.swin_unetr import *
from monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock
from monai.networks.nets.swin_unetr import SwinTransformer as SwinViT
from monai.utils import ensure_tuple_rep
import argparse
import torch.nn.functional as F
class Swin(nn.Module):
def __init__(self, input_channels: int,
num_classes: int):
super(Swin, self).__init__()
spatial_dims = 3
feature_size = 48
patch_size = ensure_tuple_rep(2, spatial_dims)
window_size = ensure_tuple_rep(7, spatial_dims)
self.swinViT = SwinViT(
in_chans=input_channels,
embed_dim=feature_size,
window_size=window_size,
patch_size=patch_size,
depths=[2, 2, 2, 2],
num_heads=[3, 6, 12, 24],
mlp_ratio=4.0,
qkv_bias=True,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.0,
norm_layer=torch.nn.LayerNorm,
use_checkpoint=True,
spatial_dims=spatial_dims,
use_v2=True,
)
norm_name = 'instance'
self.encoder1 = UnetrBasicBlock(
spatial_dims=spatial_dims,
in_channels=input_channels,
out_channels=feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder2 = UnetrBasicBlock(
spatial_dims=spatial_dims,
in_channels=feature_size,
out_channels=feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder3 = UnetrBasicBlock(
spatial_dims=spatial_dims,
in_channels=2 * feature_size,
out_channels=2 * feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder4 = UnetrBasicBlock(
spatial_dims=spatial_dims,
in_channels=4 * feature_size,
out_channels=4 * feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder10 = UnetrBasicBlock(
spatial_dims=spatial_dims,
in_channels=16 * feature_size,
out_channels=16 * feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.decoder5 = UnetrUpBlock(
spatial_dims=spatial_dims,
in_channels=16 * feature_size,
out_channels=8 * feature_size,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.decoder4 = UnetrUpBlock(
spatial_dims=spatial_dims,
in_channels=feature_size * 8,
out_channels=feature_size * 4,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.decoder3 = UnetrUpBlock(
spatial_dims=spatial_dims,
in_channels=feature_size * 4,
out_channels=feature_size * 2,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.decoder2 = UnetrUpBlock(
spatial_dims=spatial_dims,
in_channels=feature_size * 2,
out_channels=feature_size,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.decoder1 = UnetrUpBlock(
spatial_dims=spatial_dims,
in_channels=feature_size,
out_channels=feature_size,
kernel_size=3,
upsample_kernel_size=2,
norm_name=norm_name,
res_block=True,
)
self.out = UnetOutBlock(spatial_dims=spatial_dims, in_channels=feature_size, out_channels=num_classes)
def forward(self, x_in):
hidden_states_out = self.swinViT(x_in)
enc0 = self.encoder1(x_in)
enc1 = self.encoder2(hidden_states_out[0])
enc2 = self.encoder3(hidden_states_out[1])
enc3 = self.encoder4(hidden_states_out[2])
dec4 = self.encoder10(hidden_states_out[4])
dec3 = self.decoder5(dec4, hidden_states_out[3])
dec2 = self.decoder4(dec3, enc3)
dec1 = self.decoder3(dec2, enc2)
dec0 = self.decoder2(dec1, enc1)
out = self.decoder1(dec0, enc0)
return self.out(out)
def compute_conv_feature_map_size(self, input_size):
"""
IMPORTANT: input_size is the input_size of the encoder!
:param input_size:
:return:
"""
# first we need to compute the skip sizes. Skip bottleneck because all output feature maps of our ops will at
# least have the size of the skip above that (therefore -1)
skip_sizes = []
for s in range(len(self.encoder.strides) - 1):
skip_sizes.append([i // j for i, j in zip(input_size, self.encoder.strides[s])])
input_size = skip_sizes[-1]
# print(skip_sizes)
assert len(skip_sizes) == len(self.stages)
# our ops are the other way around, so let's match things up
output = np.int64(0)
for s in range(len(self.stages)):
# print(skip_sizes[-(s+1)], self.encoder.output_channels[-(s+2)])
# conv blocks
output += self.stages[s].compute_conv_feature_map_size(skip_sizes[-(s+1)])
# trans conv
output += np.prod([self.encoder.output_channels[-(s+2)], *skip_sizes[-(s+1)]], dtype=np.int64)
# segmentation
if self.deep_supervision or (s == (len(self.stages) - 1)):
output += np.prod([self.num_classes, *skip_sizes[-(s+1)]], dtype=np.int64)
return output
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/collate_outputs.py
================================================
from typing import List
import numpy as np
def collate_outputs(outputs: List[dict]):
"""
used to collate default train_step and validation_step outputs. If you want something different then you gotta
extend this
we expect outputs to be a list of dictionaries where each of the dict has the same set of keys
"""
collated = {}
for k in outputs[0].keys():
if np.isscalar(outputs[0][k]):
collated[k] = [o[k] for o in outputs]
elif isinstance(outputs[0][k], np.ndarray):
collated[k] = np.vstack([o[k][None] for o in outputs])
elif isinstance(outputs[0][k], list):
collated[k] = [item for o in outputs for item in o[k]]
else:
raise ValueError(f'Cannot collate input of type {type(outputs[0][k])}. '
f'Modify collate_outputs to add this functionality')
return collated
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/dataset_name_id_conversion.py
================================================
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union
from nnunetv2.paths import nnUNet_preprocessed, nnUNet_raw, nnUNet_results
from batchgenerators.utilities.file_and_folder_operations import *
import numpy as np
def find_candidate_datasets(dataset_id: int):
startswith = "Dataset%03.0d" % dataset_id
if nnUNet_preprocessed is not None and isdir(nnUNet_preprocessed):
candidates_preprocessed = subdirs(nnUNet_preprocessed, prefix=startswith, join=False)
else:
candidates_preprocessed = []
if nnUNet_raw is not None and isdir(nnUNet_raw):
candidates_raw = subdirs(nnUNet_raw, prefix=startswith, join=False)
else:
candidates_raw = []
candidates_trained_models = []
if nnUNet_results is not None and isdir(nnUNet_results):
candidates_trained_models += subdirs(nnUNet_results, prefix=startswith, join=False)
all_candidates = candidates_preprocessed + candidates_raw + candidates_trained_models
unique_candidates = np.unique(all_candidates)
return unique_candidates
def convert_id_to_dataset_name(dataset_id: int):
unique_candidates = find_candidate_datasets(dataset_id)
if len(unique_candidates) > 1:
raise RuntimeError("More than one dataset name found for dataset id %d. Please correct that. (I looked in the "
"following folders:\n%s\n%s\n%s" % (dataset_id, nnUNet_raw, nnUNet_preprocessed, nnUNet_results))
if len(unique_candidates) == 0:
raise RuntimeError(f"Could not find a dataset with the ID {dataset_id}. Make sure the requested dataset ID "
f"exists and that nnU-Net knows where raw and preprocessed data are located "
f"(see Documentation - Installation). Here are your currently defined folders:\n"
f"nnUNet_preprocessed={os.environ.get('nnUNet_preprocessed') if os.environ.get('nnUNet_preprocessed') is not None else 'None'}\n"
f"nnUNet_results={os.environ.get('nnUNet_results') if os.environ.get('nnUNet_results') is not None else 'None'}\n"
f"nnUNet_raw={os.environ.get('nnUNet_raw') if os.environ.get('nnUNet_raw') is not None else 'None'}\n"
f"If something is not right, adapt your environment variables.")
return unique_candidates[0]
def convert_dataset_name_to_id(dataset_name: str):
assert dataset_name.startswith("Dataset")
dataset_id = int(dataset_name[7:10])
return dataset_id
def maybe_convert_to_dataset_name(dataset_name_or_id: Union[int, str]) -> str:
if isinstance(dataset_name_or_id, str) and dataset_name_or_id.startswith("Dataset"):
return dataset_name_or_id
if isinstance(dataset_name_or_id, str):
try:
dataset_name_or_id = int(dataset_name_or_id)
except ValueError:
raise ValueError("dataset_name_or_id was a string and did not start with 'Dataset' so we tried to "
"convert it to a dataset ID (int). That failed, however. Please give an integer number "
"('1', '2', etc) or a correct dataset name. Your input: %s" % dataset_name_or_id)
return convert_id_to_dataset_name(dataset_name_or_id)
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/ddp_allgather.py
================================================
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Optional, Tuple
import torch
from torch import distributed
def print_if_rank0(*args):
if distributed.get_rank() == 0:
print(*args)
class AllGatherGrad(torch.autograd.Function):
# stolen from pytorch lightning
@staticmethod
def forward(
ctx: Any,
tensor: torch.Tensor,
group: Optional["torch.distributed.ProcessGroup"] = None,
) -> torch.Tensor:
ctx.group = group
gathered_tensor = [torch.zeros_like(tensor) for _ in range(torch.distributed.get_world_size())]
torch.distributed.all_gather(gathered_tensor, tensor, group=group)
gathered_tensor = torch.stack(gathered_tensor, dim=0)
return gathered_tensor
@staticmethod
def backward(ctx: Any, *grad_output: torch.Tensor) -> Tuple[torch.Tensor, None]:
grad_output = torch.cat(grad_output)
torch.distributed.all_reduce(grad_output, op=torch.distributed.ReduceOp.SUM, async_op=False, group=ctx.group)
return grad_output[torch.distributed.get_rank()], None
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/default_n_proc_DA.py
================================================
import subprocess
import os
def get_allowed_n_proc_DA():
"""
This function is used to set the number of processes used on different Systems. It is specific to our cluster
infrastructure at DKFZ. You can modify it to suit your needs. Everything is allowed.
IMPORTANT: if the environment variable nnUNet_n_proc_DA is set it will overwrite anything in this script
(see first line).
Interpret the output as the number of processes used for data augmentation PER GPU.
The way it is implemented here is simply a look up table. We know the hostnames, CPU and GPU configurations of our
systems and set the numbers accordingly. For example, a system with 4 GPUs and 48 threads can use 12 threads per
GPU without overloading the CPU (technically 11 because we have a main process as well), so that's what we use.
"""
if 'nnUNet_n_proc_DA' in os.environ.keys():
use_this = int(os.environ['nnUNet_n_proc_DA'])
else:
hostname = subprocess.getoutput(['hostname'])
if hostname in ['Fabian', ]:
use_this = 12
elif hostname in ['hdf19-gpu16', 'hdf19-gpu17', 'hdf19-gpu18', 'hdf19-gpu19', 'e230-AMDworkstation']:
use_this = 16
elif hostname.startswith('e230-dgx1'):
use_this = 10
elif hostname.startswith('hdf18-gpu') or hostname.startswith('e132-comp'):
use_this = 16
elif hostname.startswith('e230-dgx2'):
use_this = 6
elif hostname.startswith('e230-dgxa100-'):
use_this = 28
elif hostname.startswith('lsf22-gpu'):
use_this = 28
elif hostname.startswith('hdf19-gpu') or hostname.startswith('e071-gpu'):
use_this = 12
else:
use_this = 12 # default value
use_this = min(use_this, os.cpu_count())
return use_this
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/file_path_utilities.py
================================================
from multiprocessing import Pool
from typing import Union, Tuple
import numpy as np
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.configuration import default_num_processes
from nnunetv2.paths import nnUNet_results
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
def convert_trainer_plans_config_to_identifier(trainer_name, plans_identifier, configuration):
return f'{trainer_name}__{plans_identifier}__{configuration}'
def convert_identifier_to_trainer_plans_config(identifier: str):
return os.path.basename(identifier).split('__')
def get_output_folder(dataset_name_or_id: Union[str, int], trainer_name: str = 'nnUNetTrainer',
plans_identifier: str = 'nnUNetPlans', configuration: str = '3d_fullres',
fold: Union[str, int] = None) -> str:
tmp = join(nnUNet_results, maybe_convert_to_dataset_name(dataset_name_or_id),
convert_trainer_plans_config_to_identifier(trainer_name, plans_identifier, configuration))
if fold is not None:
tmp = join(tmp, f'fold_{fold}')
return tmp
def parse_dataset_trainer_plans_configuration_from_path(path: str):
folders = split_path(path)
# this here can be a little tricky because we are making assumptions. Let's hope this never fails lol
# safer to make this depend on two conditions, the fold_x and the DatasetXXX
# first let's see if some fold_X is present
fold_x_present = [i.startswith('fold_') for i in folders]
if any(fold_x_present):
idx = fold_x_present.index(True)
# OK now two entries before that there should be DatasetXXX
assert len(folders[:idx]) >= 2, 'Bad path, cannot extract what I need. Your path needs to be at least ' \
'DatasetXXX/MODULE__PLANS__CONFIGURATION for this to work'
if folders[idx - 2].startswith('Dataset'):
split = folders[idx - 1].split('__')
assert len(split) == 3, 'Bad path, cannot extract what I need. Your path needs to be at least ' \
'DatasetXXX/MODULE__PLANS__CONFIGURATION for this to work'
return folders[idx - 2], *split
else:
# we can only check for dataset followed by a string that is separable into three strings by splitting with '__'
# look for DatasetXXX
dataset_folder = [i.startswith('Dataset') for i in folders]
if any(dataset_folder):
idx = dataset_folder.index(True)
assert len(folders) >= (idx + 1), 'Bad path, cannot extract what I need. Your path needs to be at least ' \
'DatasetXXX/MODULE__PLANS__CONFIGURATION for this to work'
split = folders[idx + 1].split('__')
assert len(split) == 3, 'Bad path, cannot extract what I need. Your path needs to be at least ' \
'DatasetXXX/MODULE__PLANS__CONFIGURATION for this to work'
return folders[idx], *split
def get_ensemble_name(model1_folder, model2_folder, folds: Tuple[int, ...]):
identifier = 'ensemble___' + os.path.basename(model1_folder) + '___' + \
os.path.basename(model2_folder) + '___' + folds_tuple_to_string(folds)
return identifier
def get_ensemble_name_from_d_tr_c(dataset, tr1, p1, c1, tr2, p2, c2, folds: Tuple[int, ...]):
model1_folder = get_output_folder(dataset, tr1, p1, c1)
model2_folder = get_output_folder(dataset, tr2, p2, c2)
get_ensemble_name(model1_folder, model2_folder, folds)
def convert_ensemble_folder_to_model_identifiers_and_folds(ensemble_folder: str):
prefix, *models, folds = os.path.basename(ensemble_folder).split('___')
return models, folds
def folds_tuple_to_string(folds: Union[List[int], Tuple[int, ...]]):
s = str(folds[0])
for f in folds[1:]:
s += f"_{f}"
return s
def folds_string_to_tuple(folds_string: str):
folds = folds_string.split('_')
res = []
for f in folds:
try:
res.append(int(f))
except ValueError:
res.append(f)
return res
def check_workers_alive_and_busy(export_pool: Pool, worker_list: List, results_list: List, allowed_num_queued: int = 0):
"""
returns True if the number of results that are not ready is greater than the number of available workers + allowed_num_queued
"""
alive = [i.is_alive() for i in worker_list]
if not all(alive):
raise RuntimeError('Some background workers are no longer alive')
not_ready = [not i.ready() for i in results_list]
if sum(not_ready) >= (len(export_pool._pool) + allowed_num_queued):
return True
return False
if __name__ == '__main__':
### well at this point I could just write tests...
path = '/home/fabian/results/nnUNet_remake/Dataset002_Heart/nnUNetModule__nnUNetPlans__3d_fullres'
print(parse_dataset_trainer_plans_configuration_from_path(path))
path = 'Dataset002_Heart/nnUNetModule__nnUNetPlans__3d_fullres'
print(parse_dataset_trainer_plans_configuration_from_path(path))
path = '/home/fabian/results/nnUNet_remake/Dataset002_Heart/nnUNetModule__nnUNetPlans__3d_fullres/fold_all'
print(parse_dataset_trainer_plans_configuration_from_path(path))
try:
path = '/home/fabian/results/nnUNet_remake/Dataset002_Heart/'
print(parse_dataset_trainer_plans_configuration_from_path(path))
except AssertionError:
print('yayy, assertion works')
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/find_class_by_name.py
================================================
import importlib
import pkgutil
from batchgenerators.utilities.file_and_folder_operations import *
def recursive_find_python_class(folder: str, class_name: str, current_module: str):
tr = None
for importer, modname, ispkg in pkgutil.iter_modules([folder]):
# print(modname, ispkg)
if not ispkg:
m = importlib.import_module(current_module + "." + modname)
if hasattr(m, class_name):
tr = getattr(m, class_name)
break
if tr is None:
for importer, modname, ispkg in pkgutil.iter_modules([folder]):
if ispkg:
next_current_module = current_module + "." + modname
tr = recursive_find_python_class(join(folder, modname), class_name, current_module=next_current_module)
if tr is not None:
break
return tr
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/get_network_from_plans.py
================================================
from dynamic_network_architectures.architectures.unet import PlainConvUNet, ResidualEncoderUNet
from dynamic_network_architectures.building_blocks.helper import get_matching_instancenorm, convert_dim_to_conv_op
from dynamic_network_architectures.initialization.weight_init import init_last_bn_before_add_to_0
from nnunetv2.utilities.network_initialization import InitWeights_He
from nnunetv2.utilities.plans_handling.plans_handler import ConfigurationManager, PlansManager
from torch import nn
def get_network_from_plans(plans_manager: PlansManager,
dataset_json: dict,
configuration_manager: ConfigurationManager,
num_input_channels: int,
deep_supervision: bool = True):
"""
we may have to change this in the future to accommodate other plans -> network mappings
num_input_channels can differ depending on whether we do cascade. Its best to make this info available in the
trainer rather than inferring it again from the plans here.
"""
num_stages = len(configuration_manager.conv_kernel_sizes)
dim = len(configuration_manager.conv_kernel_sizes[0])
conv_op = convert_dim_to_conv_op(dim)
label_manager = plans_manager.get_label_manager(dataset_json)
segmentation_network_class_name = configuration_manager.UNet_class_name
mapping = {
'PlainConvUNet': PlainConvUNet,
'ResidualEncoderUNet': ResidualEncoderUNet
}
kwargs = {
'PlainConvUNet': {
'conv_bias': True,
'norm_op': get_matching_instancenorm(conv_op),
'norm_op_kwargs': {'eps': 1e-5, 'affine': True},
'dropout_op': None, 'dropout_op_kwargs': None,
'nonlin': nn.LeakyReLU, 'nonlin_kwargs': {'inplace': True},
},
'ResidualEncoderUNet': {
'conv_bias': True,
'norm_op': get_matching_instancenorm(conv_op),
'norm_op_kwargs': {'eps': 1e-5, 'affine': True},
'dropout_op': None, 'dropout_op_kwargs': None,
'nonlin': nn.LeakyReLU, 'nonlin_kwargs': {'inplace': True},
}
}
assert segmentation_network_class_name in mapping.keys(), 'The network architecture specified by the plans file ' \
'is non-standard (maybe your own?). Yo\'ll have to dive ' \
'into either this ' \
'function (get_network_from_plans) or ' \
'the init of your nnUNetModule to accommodate that.'
network_class = mapping[segmentation_network_class_name]
conv_or_blocks_per_stage = {
'n_conv_per_stage'
if network_class != ResidualEncoderUNet else 'n_blocks_per_stage': configuration_manager.n_conv_per_stage_encoder,
'n_conv_per_stage_decoder': configuration_manager.n_conv_per_stage_decoder
}
# network class name!!
model = network_class(
input_channels=num_input_channels,
n_stages=num_stages,
features_per_stage=[min(configuration_manager.UNet_base_num_features * 2 ** i,
configuration_manager.unet_max_num_features) for i in range(num_stages)],
conv_op=conv_op,
kernel_sizes=configuration_manager.conv_kernel_sizes,
strides=configuration_manager.pool_op_kernel_sizes,
num_classes=label_manager.num_segmentation_heads,
deep_supervision=deep_supervision,
**conv_or_blocks_per_stage,
**kwargs[segmentation_network_class_name]
)
model.apply(InitWeights_He(1e-2))
if network_class == ResidualEncoderUNet:
model.apply(init_last_bn_before_add_to_0)
return model
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/helpers.py
================================================
import torch
def softmax_helper_dim0(x: torch.Tensor) -> torch.Tensor:
return torch.softmax(x, 0)
def softmax_helper_dim1(x: torch.Tensor) -> torch.Tensor:
return torch.softmax(x, 1)
def empty_cache(device: torch.device):
if device.type == 'cuda':
torch.cuda.empty_cache()
elif device.type == 'mps':
from torch import mps
mps.empty_cache()
else:
pass
class dummy_context(object):
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
pass
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/json_export.py
================================================
from collections.abc import Iterable
import numpy as np
import torch
def recursive_fix_for_json_export(my_dict: dict):
# json is stupid. 'cannot serialize object of type bool_/int64/float64'. Come on bro.
keys = list(my_dict.keys()) # cannot iterate over keys() if we change keys....
for k in keys:
if isinstance(k, (np.int64, np.int32, np.int8, np.uint8)):
tmp = my_dict[k]
del my_dict[k]
my_dict[int(k)] = tmp
del tmp
k = int(k)
if isinstance(my_dict[k], dict):
recursive_fix_for_json_export(my_dict[k])
elif isinstance(my_dict[k], np.ndarray):
assert my_dict[k].ndim == 1, 'only 1d arrays are supported'
my_dict[k] = fix_types_iterable(my_dict[k], output_type=list)
elif isinstance(my_dict[k], (np.bool_,)):
my_dict[k] = bool(my_dict[k])
elif isinstance(my_dict[k], (np.int64, np.int32, np.int8, np.uint8)):
my_dict[k] = int(my_dict[k])
elif isinstance(my_dict[k], (np.float32, np.float64, np.float16)):
my_dict[k] = float(my_dict[k])
elif isinstance(my_dict[k], list):
my_dict[k] = fix_types_iterable(my_dict[k], output_type=type(my_dict[k]))
elif isinstance(my_dict[k], tuple):
my_dict[k] = fix_types_iterable(my_dict[k], output_type=tuple)
elif isinstance(my_dict[k], torch.device):
my_dict[k] = str(my_dict[k])
else:
pass # pray it can be serialized
def fix_types_iterable(iterable, output_type):
# this sh!t is hacky as hell and will break if you use it for anything outside nnunet. Keep you hands off of this.
out = []
for i in iterable:
if type(i) in (np.int64, np.int32, np.int8, np.uint8):
out.append(int(i))
elif isinstance(i, dict):
recursive_fix_for_json_export(i)
out.append(i)
elif type(i) in (np.float32, np.float64, np.float16):
out.append(float(i))
elif type(i) in (np.bool_,):
out.append(bool(i))
elif isinstance(i, str):
out.append(i)
elif isinstance(i, Iterable):
# print('recursive call on', i, type(i))
out.append(fix_types_iterable(i, type(i)))
else:
out.append(i)
return output_type(out)
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/label_handling/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/label_handling/label_handling.py
================================================
from __future__ import annotations
from time import time
from typing import Union, List, Tuple, Type
import numpy as np
import torch
from acvl_utils.cropping_and_padding.bounding_boxes import bounding_box_to_slice
from batchgenerators.utilities.file_and_folder_operations import join
import nnunetv2
from nnunetv2.utilities.find_class_by_name import recursive_find_python_class
from nnunetv2.utilities.helpers import softmax_helper_dim0
from typing import TYPE_CHECKING
# see https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
if TYPE_CHECKING:
from nnunetv2.utilities.plans_handling.plans_handler import PlansManager, ConfigurationManager
class LabelManager(object):
def __init__(self, label_dict: dict, regions_class_order: Union[List[int], None], force_use_labels: bool = False,
inference_nonlin=None):
self._sanity_check(label_dict)
self.label_dict = label_dict
self.regions_class_order = regions_class_order
self._force_use_labels = force_use_labels
if force_use_labels:
self._has_regions = False
else:
self._has_regions: bool = any(
[isinstance(i, (tuple, list)) and len(i) > 1 for i in self.label_dict.values()])
self._ignore_label: Union[None, int] = self._determine_ignore_label()
self._all_labels: List[int] = self._get_all_labels()
self._regions: Union[None, List[Union[int, Tuple[int, ...]]]] = self._get_regions()
if self.has_ignore_label:
assert self.ignore_label == max(
self.all_labels) + 1, 'If you use the ignore label it must have the highest ' \
'label value! It cannot be 0 or in between other labels. ' \
'Sorry bro.'
if inference_nonlin is None:
self.inference_nonlin = torch.sigmoid if self.has_regions else softmax_helper_dim0
else:
self.inference_nonlin = inference_nonlin
def _sanity_check(self, label_dict: dict):
if not 'background' in label_dict.keys():
raise RuntimeError('Background label not declared (remember that this should be label 0!)')
bg_label = label_dict['background']
if isinstance(bg_label, (tuple, list)):
raise RuntimeError(f"Background label must be 0. Not a list. Not a tuple. Your background label: {bg_label}")
assert int(bg_label) == 0, f"Background label must be 0. Your background label: {bg_label}"
# not sure if we want to allow regions that contain background. I don't immediately see how this could cause
# problems so we allow it for now. That doesn't mean that this is explicitly supported. It could be that this
# just crashes.
def _get_all_labels(self) -> List[int]:
all_labels = []
for k, r in self.label_dict.items():
# ignore label is not going to be used, hence the name. Duh.
if k == 'ignore':
continue
if isinstance(r, (tuple, list)):
for ri in r:
all_labels.append(int(ri))
else:
all_labels.append(int(r))
all_labels = list(np.unique(all_labels))
all_labels.sort()
return all_labels
def _get_regions(self) -> Union[None, List[Union[int, Tuple[int, ...]]]]:
if not self._has_regions or self._force_use_labels:
return None
else:
assert self.regions_class_order is not None, 'if region-based training is requested then you need to ' \
'define regions_class_order!'
regions = []
for k, r in self.label_dict.items():
# ignore ignore label
if k == 'ignore':
continue
# ignore regions that are background
if (np.isscalar(r) and r == 0) \
or \
(isinstance(r, (tuple, list)) and len(np.unique(r)) == 1 and np.unique(r)[0] == 0):
continue
if isinstance(r, list):
r = tuple(r)
regions.append(r)
assert len(self.regions_class_order) == len(regions), 'regions_class_order must have as ' \
'many entries as there are ' \
'regions'
return regions
def _determine_ignore_label(self) -> Union[None, int]:
ignore_label = self.label_dict.get('ignore')
if ignore_label is not None:
assert isinstance(ignore_label, int), f'Ignore label has to be an integer. It cannot be a region ' \
f'(list/tuple). Got {type(ignore_label)}.'
return ignore_label
@property
def has_regions(self) -> bool:
return self._has_regions
@property
def has_ignore_label(self) -> bool:
return self.ignore_label is not None
@property
def all_regions(self) -> Union[None, List[Union[int, Tuple[int, ...]]]]:
return self._regions
@property
def all_labels(self) -> List[int]:
return self._all_labels
@property
def ignore_label(self) -> Union[None, int]:
return self._ignore_label
def apply_inference_nonlin(self, logits: Union[np.ndarray, torch.Tensor]) -> \
Union[np.ndarray, torch.Tensor]:
"""
logits has to have shape (c, x, y(, z)) where c is the number of classes/regions
"""
if isinstance(logits, np.ndarray):
logits = torch.from_numpy(logits)
with torch.no_grad():
# softmax etc is not implemented for half
logits = logits.float()
probabilities = self.inference_nonlin(logits)
return probabilities
def convert_probabilities_to_segmentation(self, predicted_probabilities: Union[np.ndarray, torch.Tensor]) -> \
Union[np.ndarray, torch.Tensor]:
"""
assumes that inference_nonlinearity was already applied!
predicted_probabilities has to have shape (c, x, y(, z)) where c is the number of classes/regions
"""
if not isinstance(predicted_probabilities, (np.ndarray, torch.Tensor)):
raise RuntimeError(f"Unexpected input type. Expected np.ndarray or torch.Tensor,"
f" got {type(predicted_probabilities)}")
if self.has_regions:
assert self.regions_class_order is not None, 'if region-based training is requested then you need to ' \
'define regions_class_order!'
# check correct number of outputs
assert predicted_probabilities.shape[0] == self.num_segmentation_heads, \
f'unexpected number of channels in predicted_probabilities. Expected {self.num_segmentation_heads}, ' \
f'got {predicted_probabilities.shape[0]}. Remember that predicted_probabilities should have shape ' \
f'(c, x, y(, z)).'
if self.has_regions:
if isinstance(predicted_probabilities, np.ndarray):
segmentation = np.zeros(predicted_probabilities.shape[1:], dtype=np.uint16)
else:
# no uint16 in torch
segmentation = torch.zeros(predicted_probabilities.shape[1:], dtype=torch.int16,
device=predicted_probabilities.device)
for i, c in enumerate(self.regions_class_order):
segmentation[predicted_probabilities[i] > 0.5] = c
else:
segmentation = predicted_probabilities.argmax(0)
return segmentation
def convert_logits_to_segmentation(self, predicted_logits: Union[np.ndarray, torch.Tensor]) -> \
Union[np.ndarray, torch.Tensor]:
input_is_numpy = isinstance(predicted_logits, np.ndarray)
probabilities = self.apply_inference_nonlin(predicted_logits)
if input_is_numpy and isinstance(probabilities, torch.Tensor):
probabilities = probabilities.cpu().numpy()
return self.convert_probabilities_to_segmentation(probabilities)
def revert_cropping_on_probabilities(self, predicted_probabilities: Union[torch.Tensor, np.ndarray],
bbox: List[List[int]],
original_shape: Union[List[int], Tuple[int, ...]]):
"""
ONLY USE THIS WITH PROBABILITIES, DO NOT USE LOGITS AND DO NOT USE FOR SEGMENTATION MAPS!!!
predicted_probabilities must be (c, x, y(, z))
Why do we do this here? Well if we pad probabilities we need to make sure that convert_logits_to_segmentation
correctly returns background in the padded areas. Also we want to ba able to look at the padded probabilities
and not have strange artifacts.
Only LabelManager knows how this needs to be done. So let's let him/her do it, ok?
"""
# revert cropping
probs_reverted_cropping = np.zeros((predicted_probabilities.shape[0], *original_shape),
dtype=predicted_probabilities.dtype) \
if isinstance(predicted_probabilities, np.ndarray) else \
torch.zeros((predicted_probabilities.shape[0], *original_shape), dtype=predicted_probabilities.dtype)
if not self.has_regions:
probs_reverted_cropping[0] = 1
slicer = bounding_box_to_slice(bbox)
probs_reverted_cropping[tuple([slice(None)] + list(slicer))] = predicted_probabilities
return probs_reverted_cropping
@staticmethod
def filter_background(classes_or_regions: Union[List[int], List[Union[int, Tuple[int, ...]]]]):
# heck yeah
# This is definitely taking list comprehension too far. Enjoy.
return [i for i in classes_or_regions if
((not isinstance(i, (tuple, list))) and i != 0)
or
(isinstance(i, (tuple, list)) and not (
len(np.unique(i)) == 1 and np.unique(i)[0] == 0))]
@property
def foreground_regions(self):
return self.filter_background(self.all_regions)
@property
def foreground_labels(self):
return self.filter_background(self.all_labels)
@property
def num_segmentation_heads(self):
if self.has_regions:
return len(self.foreground_regions)
else:
return len(self.all_labels)
def get_labelmanager_class_from_plans(plans: dict) -> Type[LabelManager]:
if 'label_manager' not in plans.keys():
print('No label manager specified in plans. Using default: LabelManager')
return LabelManager
else:
labelmanager_class = recursive_find_python_class(join(nnunetv2.__path__[0], "utilities", "label_handling"),
plans['label_manager'],
current_module="nnunetv2.utilities.label_handling")
return labelmanager_class
def convert_labelmap_to_one_hot(segmentation: Union[np.ndarray, torch.Tensor],
all_labels: Union[List, torch.Tensor, np.ndarray, tuple],
output_dtype=None) -> Union[np.ndarray, torch.Tensor]:
"""
if output_dtype is None then we use np.uint8/torch.uint8
if input is torch.Tensor then output will be on the same device
np.ndarray is faster than torch.Tensor
if segmentation is torch.Tensor, this function will be faster if it is LongTensor. If it is somethine else we have
to cast which takes time.
IMPORTANT: This function only works properly if your labels are consecutive integers, so something like 0, 1, 2, 3, ...
DO NOT use it with 0, 32, 123, 255, ... or whatever (fix your labels, yo)
"""
if isinstance(segmentation, torch.Tensor):
result = torch.zeros((len(all_labels), *segmentation.shape),
dtype=output_dtype if output_dtype is not None else torch.uint8,
device=segmentation.device)
# variant 1, 2x faster than 2
result.scatter_(0, segmentation[None].long(), 1) # why does this have to be long!?
# variant 2, slower than 1
# for i, l in enumerate(all_labels):
# result[i] = segmentation == l
else:
result = np.zeros((len(all_labels), *segmentation.shape),
dtype=output_dtype if output_dtype is not None else np.uint8)
# variant 1, fastest in my testing
for i, l in enumerate(all_labels):
result[i] = segmentation == l
# variant 2. Takes about twice as long so nah
# result = np.eye(len(all_labels))[segmentation].transpose((3, 0, 1, 2))
return result
def determine_num_input_channels(plans_manager: PlansManager,
configuration_or_config_manager: Union[str, ConfigurationManager],
dataset_json: dict) -> int:
if isinstance(configuration_or_config_manager, str):
config_manager = plans_manager.get_configuration(configuration_or_config_manager)
else:
config_manager = configuration_or_config_manager
label_manager = plans_manager.get_label_manager(dataset_json)
num_modalities = len(dataset_json['modality']) if 'modality' in dataset_json.keys() else len(dataset_json['channel_names'])
# cascade has different number of input channels
if config_manager.previous_stage_name is not None:
num_label_inputs = len(label_manager.foreground_labels)
num_input_channels = num_modalities + num_label_inputs
else:
num_input_channels = num_modalities
return num_input_channels
if __name__ == '__main__':
# this code used to be able to differentiate variant 1 and 2 to measure time.
num_labels = 7
seg = np.random.randint(0, num_labels, size=(256, 256, 256), dtype=np.uint8)
seg_torch = torch.from_numpy(seg)
st = time()
onehot_npy = convert_labelmap_to_one_hot(seg, np.arange(num_labels))
time_1 = time()
onehot_npy2 = convert_labelmap_to_one_hot(seg, np.arange(num_labels))
time_2 = time()
onehot_torch = convert_labelmap_to_one_hot(seg_torch, np.arange(num_labels))
time_torch = time()
onehot_torch2 = convert_labelmap_to_one_hot(seg_torch, np.arange(num_labels))
time_torch2 = time()
print(
f'np: {time_1 - st}, np2: {time_2 - time_1}, torch: {time_torch - time_2}, torch2: {time_torch2 - time_torch}')
onehot_torch = onehot_torch.numpy()
onehot_torch2 = onehot_torch2.numpy()
print(np.all(onehot_torch == onehot_npy))
print(np.all(onehot_torch2 == onehot_npy))
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/network_initialization.py
================================================
from torch import nn
class InitWeights_He(object):
def __init__(self, neg_slope=1e-2):
self.neg_slope = neg_slope
def __call__(self, module):
if isinstance(module, nn.Conv3d) or isinstance(module, nn.Conv2d) or isinstance(module, nn.ConvTranspose2d) or isinstance(module, nn.ConvTranspose3d):
module.weight = nn.init.kaiming_normal_(module.weight, a=self.neg_slope)
if module.bias is not None:
module.bias = nn.init.constant_(module.bias, 0)
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/overlay_plots.py
================================================
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import multiprocessing
from multiprocessing.pool import Pool
from typing import Tuple, Union
import numpy as np
import pandas as pd
from batchgenerators.utilities.file_and_folder_operations import *
from nnunetv2.configuration import default_num_processes
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
from nnunetv2.imageio.reader_writer_registry import determine_reader_writer_from_dataset_json
from nnunetv2.paths import nnUNet_raw, nnUNet_preprocessed
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
from nnunetv2.utilities.utils import get_identifiers_from_splitted_dataset_folder, \
get_filenames_of_train_images_and_targets
color_cycle = (
"000000",
"4363d8",
"f58231",
"3cb44b",
"e6194B",
"911eb4",
"ffe119",
"bfef45",
"42d4f4",
"f032e6",
"000075",
"9A6324",
"808000",
"800000",
"469990",
)
def hex_to_rgb(hex: str):
assert len(hex) == 6
return tuple(int(hex[i:i + 2], 16) for i in (0, 2, 4))
def generate_overlay(input_image: np.ndarray, segmentation: np.ndarray, mapping: dict = None,
color_cycle: Tuple[str, ...] = color_cycle,
overlay_intensity: float = 0.6):
"""
image can be 2d greyscale or 2d RGB (color channel in last dimension!)
Segmentation must be label map of same shape as image (w/o color channels)
mapping can be label_id -> idx_in_cycle or None
returned image is scaled to [0, 255] (uint8)!!!
"""
# create a copy of image
image = np.copy(input_image)
if image.ndim == 2:
image = np.tile(image[:, :, None], (1, 1, 3))
elif image.ndim == 3:
if image.shape[2] == 1:
image = np.tile(image, (1, 1, 3))
else:
raise RuntimeError(f'if 3d image is given the last dimension must be the color channels (3 channels). '
f'Only 2D images are supported. Your image shape: {image.shape}')
else:
raise RuntimeError("unexpected image shape. only 2D images and 2D images with color channels (color in "
"last dimension) are supported")
# rescale image to [0, 255]
image = image - image.min()
image = image / image.max() * 255
# create output
if mapping is None:
uniques = np.sort(pd.unique(segmentation.ravel())) # np.unique(segmentation)
mapping = {i: c for c, i in enumerate(uniques)}
for l in mapping.keys():
image[segmentation == l] += overlay_intensity * np.array(hex_to_rgb(color_cycle[mapping[l]]))
# rescale result to [0, 255]
image = image / image.max() * 255
return image.astype(np.uint8)
def select_slice_to_plot(image: np.ndarray, segmentation: np.ndarray) -> int:
"""
image and segmentation are expected to be 3D
selects the slice with the largest amount of fg (regardless of label)
we give image so that we can easily replace this function if needed
"""
fg_mask = segmentation != 0
fg_per_slice = fg_mask.sum((1, 2))
selected_slice = int(np.argmax(fg_per_slice))
return selected_slice
def select_slice_to_plot2(image: np.ndarray, segmentation: np.ndarray) -> int:
"""
image and segmentation are expected to be 3D (or 1, x, y)
selects the slice with the largest amount of fg (how much percent of each class are in each slice? pick slice
with highest avg percent)
we give image so that we can easily replace this function if needed
"""
classes = [i for i in np.sort(pd.unique(segmentation.ravel())) if i != 0]
fg_per_slice = np.zeros((image.shape[0], len(classes)))
for i, c in enumerate(classes):
fg_mask = segmentation == c
fg_per_slice[:, i] = fg_mask.sum((1, 2))
fg_per_slice[:, i] /= fg_per_slice.sum()
fg_per_slice = fg_per_slice.mean(1)
return int(np.argmax(fg_per_slice))
def plot_overlay(image_file: str, segmentation_file: str, image_reader_writer: BaseReaderWriter, output_file: str,
overlay_intensity: float = 0.6):
import matplotlib.pyplot as plt
image, props = image_reader_writer.read_images((image_file, ))
image = image[0]
seg, props_seg = image_reader_writer.read_seg(segmentation_file)
seg = seg[0]
assert image.shape == seg.shape, "image and seg do not have the same shape: %s, %s" % (
image_file, segmentation_file)
assert image.ndim == 3, 'only 3D images/segs are supported'
selected_slice = select_slice_to_plot2(image, seg)
# print(image.shape, selected_slice)
overlay = generate_overlay(image[selected_slice], seg[selected_slice], overlay_intensity=overlay_intensity)
plt.imsave(output_file, overlay)
def plot_overlay_preprocessed(case_file: str, output_file: str, overlay_intensity: float = 0.6, channel_idx=0):
import matplotlib.pyplot as plt
data = np.load(case_file)['data']
seg = np.load(case_file)['seg'][0]
assert channel_idx < (data.shape[0]), 'This dataset only supports channel index up to %d' % (data.shape[0] - 1)
image = data[channel_idx]
seg[seg < 0] = 0
selected_slice = select_slice_to_plot2(image, seg)
overlay = generate_overlay(image[selected_slice], seg[selected_slice], overlay_intensity=overlay_intensity)
plt.imsave(output_file, overlay)
def multiprocessing_plot_overlay(list_of_image_files, list_of_seg_files, image_reader_writer,
list_of_output_files, overlay_intensity,
num_processes=8):
with multiprocessing.get_context("spawn").Pool(num_processes) as p:
r = p.starmap_async(plot_overlay, zip(
list_of_image_files, list_of_seg_files, [image_reader_writer] * len(list_of_output_files),
list_of_output_files, [overlay_intensity] * len(list_of_output_files)
))
r.get()
def multiprocessing_plot_overlay_preprocessed(list_of_case_files, list_of_output_files, overlay_intensity,
num_processes=8, channel_idx=0):
with multiprocessing.get_context("spawn").Pool(num_processes) as p:
r = p.starmap_async(plot_overlay_preprocessed, zip(
list_of_case_files, list_of_output_files, [overlay_intensity] * len(list_of_output_files),
[channel_idx] * len(list_of_output_files)
))
r.get()
def generate_overlays_from_raw(dataset_name_or_id: Union[int, str], output_folder: str,
num_processes: int = 8, channel_idx: int = 0, overlay_intensity: float = 0.6):
dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)
folder = join(nnUNet_raw, dataset_name)
dataset_json = load_json(join(folder, 'dataset.json'))
dataset = get_filenames_of_train_images_and_targets(folder, dataset_json)
image_files = [v['images'][channel_idx] for v in dataset.values()]
seg_files = [v['label'] for v in dataset.values()]
assert all([isfile(i) for i in image_files])
assert all([isfile(i) for i in seg_files])
maybe_mkdir_p(output_folder)
output_files = [join(output_folder, i + '.png') for i in dataset.keys()]
image_reader_writer = determine_reader_writer_from_dataset_json(dataset_json, image_files[0])()
multiprocessing_plot_overlay(image_files, seg_files, image_reader_writer, output_files, overlay_intensity, num_processes)
def generate_overlays_from_preprocessed(dataset_name_or_id: Union[int, str], output_folder: str,
num_processes: int = 8, channel_idx: int = 0,
configuration: str = None,
plans_identifier: str = 'nnUNetPlans',
overlay_intensity: float = 0.6):
dataset_name = maybe_convert_to_dataset_name(dataset_name_or_id)
folder = join(nnUNet_preprocessed, dataset_name)
if not isdir(folder): raise RuntimeError("run preprocessing for that task first")
plans = load_json(join(folder, plans_identifier + '.json'))
if configuration is None:
if '3d_fullres' in plans['configurations'].keys():
configuration = '3d_fullres'
else:
configuration = '2d'
data_identifier = plans['configurations'][configuration]["data_identifier"]
preprocessed_folder = join(folder, data_identifier)
if not isdir(preprocessed_folder):
raise RuntimeError(f"Preprocessed data folder for configuration {configuration} of plans identifier "
f"{plans_identifier} ({dataset_name}) does not exist. Run preprocessing for this "
f"configuration first!")
identifiers = [i[:-4] for i in subfiles(preprocessed_folder, suffix='.npz', join=False)]
output_files = [join(output_folder, i + '.png') for i in identifiers]
image_files = [join(preprocessed_folder, i + ".npz") for i in identifiers]
maybe_mkdir_p(output_folder)
multiprocessing_plot_overlay_preprocessed(image_files, output_files, overlay_intensity=overlay_intensity,
num_processes=num_processes, channel_idx=channel_idx)
def entry_point_generate_overlay():
import argparse
parser = argparse.ArgumentParser("Plots png overlays of the slice with the most foreground. Note that this "
"disregards spacing information!")
parser.add_argument('-d', type=str, help="Dataset name or id", required=True)
parser.add_argument('-o', type=str, help="output folder", required=True)
parser.add_argument('-np', type=int, default=default_num_processes, required=False,
help=f"number of processes used. Default: {default_num_processes}")
parser.add_argument('-channel_idx', type=int, default=0, required=False,
help="channel index used (0 = _0000). Default: 0")
parser.add_argument('--use_raw', action='store_true', required=False, help="if set then we use raw data. else "
"we use preprocessed")
parser.add_argument('-p', type=str, required=False, default='nnUNetPlans',
help='plans identifier. Only used if --use_raw is not set! Default: nnUNetPlans')
parser.add_argument('-c', type=str, required=False, default=None,
help='configuration name. Only used if --use_raw is not set! Default: None = '
'3d_fullres if available, else 2d')
parser.add_argument('-overlay_intensity', type=float, required=False, default=0.6,
help='overlay intensity. Higher = brighter/less transparent')
args = parser.parse_args()
if args.use_raw:
generate_overlays_from_raw(args.d, args.o, args.np, args.channel_idx,
overlay_intensity=args.overlay_intensity)
else:
generate_overlays_from_preprocessed(args.d, args.o, args.np, args.channel_idx, args.c, args.p,
overlay_intensity=args.overlay_intensity)
if __name__ == '__main__':
entry_point_generate_overlay()
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/plans_handling/__init__.py
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/plans_handling/plans_handler.py
================================================
from __future__ import annotations
import dynamic_network_architectures
from copy import deepcopy
from functools import lru_cache, partial
from typing import Union, Tuple, List, Type, Callable
import numpy as np
import torch
from nnunetv2.preprocessing.resampling.utils import recursive_find_resampling_fn_by_name
from torch import nn
import nnunetv2
from batchgenerators.utilities.file_and_folder_operations import load_json, join
from nnunetv2.imageio.reader_writer_registry import recursive_find_reader_writer_by_name
from nnunetv2.utilities.find_class_by_name import recursive_find_python_class
from nnunetv2.utilities.label_handling.label_handling import get_labelmanager_class_from_plans
# see https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from nnunetv2.utilities.label_handling.label_handling import LabelManager
from nnunetv2.imageio.base_reader_writer import BaseReaderWriter
from nnunetv2.preprocessing.preprocessors.default_preprocessor import DefaultPreprocessor
from nnunetv2.experiment_planning.experiment_planners.default_experiment_planner import ExperimentPlanner
class ConfigurationManager(object):
def __init__(self, configuration_dict: dict):
self.configuration = configuration_dict
def __repr__(self):
return self.configuration.__repr__()
@property
def data_identifier(self) -> str:
return self.configuration['data_identifier']
@property
def preprocessor_name(self) -> str:
return self.configuration['preprocessor_name']
@property
@lru_cache(maxsize=1)
def preprocessor_class(self) -> Type[DefaultPreprocessor]:
preprocessor_class = recursive_find_python_class(join(nnunetv2.__path__[0], "preprocessing"),
self.preprocessor_name,
current_module="nnunetv2.preprocessing")
return preprocessor_class
@property
def batch_size(self) -> int:
return self.configuration['batch_size']
@property
def patch_size(self) -> List[int]:
return self.configuration['patch_size']
@property
def median_image_size_in_voxels(self) -> List[int]:
return self.configuration['median_image_size_in_voxels']
@property
def spacing(self) -> List[float]:
return self.configuration['spacing']
@property
def normalization_schemes(self) -> List[str]:
return self.configuration['normalization_schemes']
@property
def use_mask_for_norm(self) -> List[bool]:
return self.configuration['use_mask_for_norm']
@property
def UNet_class_name(self) -> str:
return self.configuration['UNet_class_name']
@property
@lru_cache(maxsize=1)
def UNet_class(self) -> Type[nn.Module]:
unet_class = recursive_find_python_class(join(dynamic_network_architectures.__path__[0], "architectures"),
self.UNet_class_name,
current_module="dynamic_network_architectures.architectures")
if unet_class is None:
raise RuntimeError('The network architecture specified by the plans file '
'is non-standard (maybe your own?). Fix this by not using '
'ConfigurationManager.UNet_class to instantiate '
'it (probably just overwrite build_network_architecture of your trainer.')
return unet_class
@property
def UNet_base_num_features(self) -> int:
return self.configuration['UNet_base_num_features']
@property
def n_conv_per_stage_encoder(self) -> List[int]:
return self.configuration['n_conv_per_stage_encoder']
@property
def n_conv_per_stage_decoder(self) -> List[int]:
return self.configuration['n_conv_per_stage_decoder']
@property
def num_pool_per_axis(self) -> List[int]:
return self.configuration['num_pool_per_axis']
@property
def pool_op_kernel_sizes(self) -> List[List[int]]:
return self.configuration['pool_op_kernel_sizes']
@property
def conv_kernel_sizes(self) -> List[List[int]]:
return self.configuration['conv_kernel_sizes']
@property
def unet_max_num_features(self) -> int:
return self.configuration['unet_max_num_features']
@property
@lru_cache(maxsize=1)
def resampling_fn_data(self) -> Callable[
[Union[torch.Tensor, np.ndarray],
Union[Tuple[int, ...], List[int], np.ndarray],
Union[Tuple[float, ...], List[float], np.ndarray],
Union[Tuple[float, ...], List[float], np.ndarray]
],
Union[torch.Tensor, np.ndarray]]:
fn = recursive_find_resampling_fn_by_name(self.configuration['resampling_fn_data'])
fn = partial(fn, **self.configuration['resampling_fn_data_kwargs'])
return fn
@property
@lru_cache(maxsize=1)
def resampling_fn_probabilities(self) -> Callable[
[Union[torch.Tensor, np.ndarray],
Union[Tuple[int, ...], List[int], np.ndarray],
Union[Tuple[float, ...], List[float], np.ndarray],
Union[Tuple[float, ...], List[float], np.ndarray]
],
Union[torch.Tensor, np.ndarray]]:
fn = recursive_find_resampling_fn_by_name(self.configuration['resampling_fn_probabilities'])
fn = partial(fn, **self.configuration['resampling_fn_probabilities_kwargs'])
return fn
@property
@lru_cache(maxsize=1)
def resampling_fn_seg(self) -> Callable[
[Union[torch.Tensor, np.ndarray],
Union[Tuple[int, ...], List[int], np.ndarray],
Union[Tuple[float, ...], List[float], np.ndarray],
Union[Tuple[float, ...], List[float], np.ndarray]
],
Union[torch.Tensor, np.ndarray]]:
fn = recursive_find_resampling_fn_by_name(self.configuration['resampling_fn_seg'])
fn = partial(fn, **self.configuration['resampling_fn_seg_kwargs'])
return fn
@property
def batch_dice(self) -> bool:
return self.configuration['batch_dice']
@property
def next_stage_names(self) -> Union[List[str], None]:
ret = self.configuration.get('next_stage')
if ret is not None:
if isinstance(ret, str):
ret = [ret]
return ret
@property
def previous_stage_name(self) -> Union[str, None]:
return self.configuration.get('previous_stage')
class PlansManager(object):
def __init__(self, plans_file_or_dict: Union[str, dict]):
"""
Why do we need this?
1) resolve inheritance in configurations
2) expose otherwise annoying stuff like getting the label manager or IO class from a string
3) clearly expose the things that are in the plans instead of hiding them in a dict
4) cache shit
This class does not prevent you from going wild. You can still use the plans directly if you prefer
(PlansHandler.plans['key'])
"""
self.plans = plans_file_or_dict if isinstance(plans_file_or_dict, dict) else load_json(plans_file_or_dict)
def __repr__(self):
return self.plans.__repr__()
def _internal_resolve_configuration_inheritance(self, configuration_name: str,
visited: Tuple[str, ...] = None) -> dict:
if configuration_name not in self.plans['configurations'].keys():
raise ValueError(f'The configuration {configuration_name} does not exist in the plans I have. Valid '
f'configuration names are {list(self.plans["configurations"].keys())}.')
configuration = deepcopy(self.plans['configurations'][configuration_name])
if 'inherits_from' in configuration:
parent_config_name = configuration['inherits_from']
if visited is None:
visited = (configuration_name,)
else:
if parent_config_name in visited:
raise RuntimeError(f"Circular dependency detected. The following configurations were visited "
f"while solving inheritance (in that order!): {visited}. "
f"Current configuration: {configuration_name}. Its parent configuration "
f"is {parent_config_name}.")
visited = (*visited, configuration_name)
base_config = self._internal_resolve_configuration_inheritance(parent_config_name, visited)
base_config.update(configuration)
configuration = base_config
return configuration
@lru_cache(maxsize=10)
def get_configuration(self, configuration_name: str):
if configuration_name not in self.plans['configurations'].keys():
raise RuntimeError(f"Requested configuration {configuration_name} not found in plans. "
f"Available configurations: {list(self.plans['configurations'].keys())}")
configuration_dict = self._internal_resolve_configuration_inheritance(configuration_name)
return ConfigurationManager(configuration_dict)
@property
def dataset_name(self) -> str:
return self.plans['dataset_name']
@property
def plans_name(self) -> str:
return self.plans['plans_name']
@property
def original_median_spacing_after_transp(self) -> List[float]:
return self.plans['original_median_spacing_after_transp']
@property
def original_median_shape_after_transp(self) -> List[float]:
return self.plans['original_median_shape_after_transp']
@property
@lru_cache(maxsize=1)
def image_reader_writer_class(self) -> Type[BaseReaderWriter]:
return recursive_find_reader_writer_by_name(self.plans['image_reader_writer'])
@property
def transpose_forward(self) -> List[int]:
return self.plans['transpose_forward']
@property
def transpose_backward(self) -> List[int]:
return self.plans['transpose_backward']
@property
def available_configurations(self) -> List[str]:
return list(self.plans['configurations'].keys())
@property
@lru_cache(maxsize=1)
def experiment_planner_class(self) -> Type[ExperimentPlanner]:
planner_name = self.experiment_planner_name
experiment_planner = recursive_find_python_class(join(nnunetv2.__path__[0], "experiment_planning"),
planner_name,
current_module="nnunetv2.experiment_planning")
return experiment_planner
@property
def experiment_planner_name(self) -> str:
return self.plans['experiment_planner_used']
@property
@lru_cache(maxsize=1)
def label_manager_class(self) -> Type[LabelManager]:
return get_labelmanager_class_from_plans(self.plans)
def get_label_manager(self, dataset_json: dict, **kwargs) -> LabelManager:
return self.label_manager_class(label_dict=dataset_json['labels'],
regions_class_order=dataset_json.get('regions_class_order'),
**kwargs)
@property
def foreground_intensity_properties_per_channel(self) -> dict:
if 'foreground_intensity_properties_per_channel' not in self.plans.keys():
if 'foreground_intensity_properties_by_modality' in self.plans.keys():
return self.plans['foreground_intensity_properties_by_modality']
return self.plans['foreground_intensity_properties_per_channel']
if __name__ == '__main__':
from nnunetv2.paths import nnUNet_preprocessed
from nnunetv2.utilities.dataset_name_id_conversion import maybe_convert_to_dataset_name
plans = load_json(join(nnUNet_preprocessed, maybe_convert_to_dataset_name(3), 'nnUNetPlans.json'))
# build new configuration that inherits from 3d_fullres
plans['configurations']['3d_fullres_bs4'] = {
'batch_size': 4,
'inherits_from': '3d_fullres'
}
# now get plans and configuration managers
plans_manager = PlansManager(plans)
configuration_manager = plans_manager.get_configuration('3d_fullres_bs4')
print(configuration_manager) # look for batch size 4
================================================
FILE: Finetune/nnUNet/nnunetv2/utilities/utils.py
================================================
# Copyright 2021 HIP Applied Computer Vision Lab, Division of Medical Image Computing, German Cancer Research Center
# (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
from functools import lru_cache
from typing import Union
from batchgenerators.utilities.file_and_folder_operations import *
import numpy as np
import re
from nnunetv2.paths import nnUNet_raw
def get_identifiers_from_splitted_dataset_folder(folder: str, file_ending: str):
files = subfiles(folder, suffix=file_ending, join=False)
# all files have a 4 digit channel index (_XXXX)
crop = len(file_ending) + 5
files = [i[:-crop] for i in files]
# only unique image ids
files = np.unique(files)
return files
def create_lists_from_splitted_dataset_folder(folder: str, file_ending: str, identifiers: List[str] = None) -> List[
List[str]]:
"""
does not rely on dataset.json
"""
if identifiers is None:
identifiers = get_identifiers_from_splitted_dataset_folder(folder, file_ending)
files = subfiles(folder, suffix=file_ending, join=False, sort=True)
list_of_lists = []
for f in identifiers:
p = re.compile(re.escape(f) + r"_\d\d\d\d" + re.escape(file_ending))
list_of_lists.append([join(folder, i) for i in files if p.fullmatch(i)])
return list_of_lists
def get_filenames_of_train_images_and_targets(raw_dataset_folder: str, dataset_json: dict = None):
if dataset_json is None:
dataset_json = load_json(join(raw_dataset_folder, 'dataset.json'))
if 'dataset' in dataset_json.keys():
dataset = dataset_json['dataset']
for k in dataset.keys():
dataset[k]['label'] = os.path.abspath(join(raw_dataset_folder, dataset[k]['label'])) if not os.path.isabs(dataset[k]['label']) else dataset[k]['label']
dataset[k]['images'] = [os.path.abspath(join(raw_dataset_folder, i)) if not os.path.isabs(i) else i for i in dataset[k]['images']]
else:
identifiers = get_identifiers_from_splitted_dataset_folder(join(raw_dataset_folder, 'imagesTr'), dataset_json['file_ending'])
images = create_lists_from_splitted_dataset_folder(join(raw_dataset_folder, 'imagesTr'), dataset_json['file_ending'], identifiers)
segs = [join(raw_dataset_folder, 'labelsTr', i + dataset_json['file_ending']) for i in identifiers]
dataset = {i: {'images': im, 'label': se} for i, im, se in zip(identifiers, images, segs)}
return dataset
if __name__ == '__main__':
print(get_filenames_of_train_images_and_targets(join(nnUNet_raw, 'Dataset002_Heart')))
================================================
FILE: Finetune/nnUNet/nnunetv2.egg-info/PKG-INFO
================================================
Metadata-Version: 2.1
Name: nnunetv2
Version: 2.2.1
Summary: nnU-Net is a framework for out-of-the box image segmentation.
Author: Helmholtz Imaging Applied Computer Vision Lab
Author-email: Fabian Isensee
License: Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [2019] [Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Project-URL: homepage, https://github.com/MIC-DKFZ/nnUNet
Project-URL: repository, https://github.com/MIC-DKFZ/nnUNet
Keywords: deep learning,image segmentation,semantic segmentation,medical image analysis,medical image segmentation,nnU-Net,nnunet
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: Intended Audience :: Science/Research
Classifier: Intended Audience :: Healthcare Industry
Classifier: Programming Language :: Python :: 3
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
Classifier: Topic :: Scientific/Engineering :: Image Recognition
Classifier: Topic :: Scientific/Engineering :: Medical Science Apps.
Requires-Python: >=3.9
Description-Content-Type: text/markdown
License-File: LICENSE
Requires-Dist: torch>=2.0.0
Requires-Dist: acvl-utils>=0.2
Requires-Dist: dynamic-network-architectures>=0.2
Requires-Dist: tqdm
Requires-Dist: dicom2nifti
Requires-Dist: scipy
Requires-Dist: batchgenerators>=0.25
Requires-Dist: numpy
Requires-Dist: scikit-learn
Requires-Dist: scikit-image>=0.19.3
Requires-Dist: SimpleITK>=2.2.1
Requires-Dist: pandas
Requires-Dist: graphviz
Requires-Dist: tifffile
Requires-Dist: requests
Requires-Dist: nibabel
Requires-Dist: matplotlib
Requires-Dist: seaborn
Requires-Dist: imagecodecs
Requires-Dist: yacs
Provides-Extra: dev
Requires-Dist: black; extra == "dev"
Requires-Dist: ruff; extra == "dev"
Requires-Dist: pre-commit; extra == "dev"
# Welcome to the new nnU-Net!
Click [here](https://github.com/MIC-DKFZ/nnUNet/tree/nnunetv1) if you were looking for the old one instead.
Coming from V1? Check out the [TLDR Migration Guide](documentation/tldr_migration_guide_from_v1.md). Reading the rest of the documentation is still strongly recommended ;-)
# What is nnU-Net?
Image datasets are enormously diverse: image dimensionality (2D, 3D), modalities/input channels (RGB image, CT, MRI, microscopy, ...),
image sizes, voxel sizes, class ratio, target structure properties and more change substantially between datasets.
Traditionally, given a new problem, a tailored solution needs to be manually designed and optimized - a process that
is prone to errors, not scalable and where success is overwhelmingly determined by the skill of the experimenter. Even
for experts, this process is anything but simple: there are not only many design choices and data properties that need to
be considered, but they are also tightly interconnected, rendering reliable manual pipeline optimization all but impossible!

**nnU-Net is a semantic segmentation method that automatically adapts to a given dataset. It will analyze the provided
training cases and automatically configure a matching U-Net-based segmentation pipeline. No expertise required on your
end! You can simply train the models and use them for your application**.
Upon release, nnU-Net was evaluated on 23 datasets belonging to competitions from the biomedical domain. Despite competing
with handcrafted solutions for each respective dataset, nnU-Net's fully automated pipeline scored several first places on
open leaderboards! Since then nnU-Net has stood the test of time: it continues to be used as a baseline and method
development framework ([9 out of 10 challenge winners at MICCAI 2020](https://arxiv.org/abs/2101.00232) and 5 out of 7
in MICCAI 2021 built their methods on top of nnU-Net,
[we won AMOS2022 with nnU-Net](https://amos22.grand-challenge.org/final-ranking/))!
Please cite the [following paper](https://www.google.com/url?q=https://www.nature.com/articles/s41592-020-01008-z&sa=D&source=docs&ust=1677235958581755&usg=AOvVaw3dWL0SrITLhCJUBiNIHCQO) when using nnU-Net:
Isensee, F., Jaeger, P. F., Kohl, S. A., Petersen, J., & Maier-Hein, K. H. (2021). nnU-Net: a self-configuring
method for deep learning-based biomedical image segmentation. Nature methods, 18(2), 203-211.
## What can nnU-Net do for you?
If you are a **domain scientist** (biologist, radiologist, ...) looking to analyze your own images, nnU-Net provides
an out-of-the-box solution that is all but guaranteed to provide excellent results on your individual dataset. Simply
convert your dataset into the nnU-Net format and enjoy the power of AI - no expertise required!
If you are an **AI researcher** developing segmentation methods, nnU-Net:
- offers a fantastic out-of-the-box applicable baseline algorithm to compete against
- can act as a method development framework to test your contribution on a large number of datasets without having to
tune individual pipelines (for example evaluating a new loss function)
- provides a strong starting point for further dataset-specific optimizations. This is particularly used when competing
in segmentation challenges
- provides a new perspective on the design of segmentation methods: maybe you can find better connections between
dataset properties and best-fitting segmentation pipelines?
## What is the scope of nnU-Net?
nnU-Net is built for semantic segmentation. It can handle 2D and 3D images with arbitrary
input modalities/channels. It can understand voxel spacings, anisotropies and is robust even when classes are highly
imbalanced.
nnU-Net relies on supervised learning, which means that you need to provide training cases for your application. The number of
required training cases varies heavily depending on the complexity of the segmentation problem. No
one-fits-all number can be provided here! nnU-Net does not require more training cases than other solutions - maybe
even less due to our extensive use of data augmentation.
nnU-Net expects to be able to process entire images at once during preprocessing and postprocessing, so it cannot
handle enormous images. As a reference: we tested images from 40x40x40 pixels all the way up to 1500x1500x1500 in 3D
and 40x40 up to ~30000x30000 in 2D! If your RAM allows it, larger is always possible.
## How does nnU-Net work?
Given a new dataset, nnU-Net will systematically analyze the provided training cases and create a 'dataset fingerprint'.
nnU-Net then creates several U-Net configurations for each dataset:
- `2d`: a 2D U-Net (for 2D and 3D datasets)
- `3d_fullres`: a 3D U-Net that operates on a high image resolution (for 3D datasets only)
- `3d_lowres` → `3d_cascade_fullres`: a 3D U-Net cascade where first a 3D U-Net operates on low resolution images and
then a second high-resolution 3D U-Net refined the predictions of the former (for 3D datasets with large image sizes only)
**Note that not all U-Net configurations are created for all datasets. In datasets with small image sizes, the
U-Net cascade (and with it the 3d_lowres configuration) is omitted because the patch size of the full
resolution U-Net already covers a large part of the input images.**
nnU-Net configures its segmentation pipelines based on a three-step recipe:
- **Fixed parameters** are not adapted. During development of nnU-Net we identified a robust configuration (that is, certain architecture and training properties) that can
simply be used all the time. This includes, for example, nnU-Net's loss function, (most of the) data augmentation strategy and learning rate.
- **Rule-based parameters** use the dataset fingerprint to adapt certain segmentation pipeline properties by following
hard-coded heuristic rules. For example, the network topology (pooling behavior and depth of the network architecture)
are adapted to the patch size; the patch size, network topology and batch size are optimized jointly given some GPU
memory constraint.
- **Empirical parameters** are essentially trial-and-error. For example the selection of the best U-net configuration
for the given dataset (2D, 3D full resolution, 3D low resolution, 3D cascade) and the optimization of the postprocessing strategy.
## How to get started?
Read these:
- [Installation instructions](documentation/installation_instructions.md)
- [Dataset conversion](documentation/dataset_format.md)
- [Usage instructions](documentation/how_to_use_nnunet.md)
Additional information:
- [Region-based training](documentation/region_based_training.md)
- [Manual data splits](documentation/manual_data_splits.md)
- [Pretraining and finetuning](documentation/pretraining_and_finetuning.md)
- [Intensity Normalization in nnU-Net](documentation/explanation_normalization.md)
- [Manually editing nnU-Net configurations](documentation/explanation_plans_files.md)
- [Extending nnU-Net](documentation/extending_nnunet.md)
- [What is different in V2?](documentation/changelog.md)
Competitions:
- [AutoPET II](documentation/competitions/AutoPETII.md)
[//]: # (- [Ignore label](documentation/ignore_label.md))
## Where does nnU-Net perform well and where does it not perform?
nnU-Net excels in segmentation problems that need to be solved by training from scratch,
for example: research applications that feature non-standard image modalities and input channels,
challenge datasets from the biomedical domain, majority of 3D segmentation problems, etc . We have yet to find a
dataset for which nnU-Net's working principle fails!
Note: On standard segmentation
problems, such as 2D RGB images in ADE20k and Cityscapes, fine-tuning a foundation model (that was pretrained on a large corpus of
similar images, e.g. Imagenet 22k, JFT-300M) will provide better performance than nnU-Net! That is simply because these
models allow much better initialization. Foundation models are not supported by nnU-Net as
they 1) are not useful for segmentation problems that deviate from the standard setting (see above mentioned
datasets), 2) would typically only support 2D architectures and 3) conflict with our core design principle of carefully adapting
the network topology for each dataset (if the topology is changed one can no longer transfer pretrained weights!)
## What happened to the old nnU-Net?
The core of the old nnU-Net was hacked together in a short time period while participating in the Medical Segmentation
Decathlon challenge in 2018. Consequently, code structure and quality were not the best. Many features
were added later on and didn't quite fit into the nnU-Net design principles. Overall quite messy, really. And annoying to work with.
nnU-Net V2 is a complete overhaul. The "delete everything and start again" kind. So everything is better
(in the author's opinion haha). While the segmentation performance [remains the same](https://docs.google.com/spreadsheets/d/13gqjIKEMPFPyMMMwA1EML57IyoBjfC3-QCTn4zRN_Mg/edit?usp=sharing), a lot of cool stuff has been added.
It is now also much easier to use it as a development framework and to manually fine-tune its configuration to new
datasets. A big driver for the reimplementation was also the emergence of [Helmholtz Imaging](http://helmholtz-imaging.de),
prompting us to extend nnU-Net to more image formats and domains. Take a look [here](documentation/changelog.md) for some highlights.
# Acknowledgements
nnU-Net is developed and maintained by the Applied Computer Vision Lab (ACVL) of [Helmholtz Imaging](http://helmholtz-imaging.de)
and the [Division of Medical Image Computing](https://www.dkfz.de/en/mic/index.php) at the
[German Cancer Research Center (DKFZ)](https://www.dkfz.de/en/index.html).
================================================
FILE: Finetune/nnUNet/nnunetv2.egg-info/SOURCES.txt
================================================
LICENSE
pyproject.toml
readme.md
setup.py
nnunetv2/__init__.py
nnunetv2/configuration.py
nnunetv2/paths.py
nnunetv2.egg-info/PKG-INFO
nnunetv2.egg-info/SOURCES.txt
nnunetv2.egg-info/dependency_links.txt
nnunetv2.egg-info/entry_points.txt
nnunetv2.egg-info/requires.txt
nnunetv2.egg-info/top_level.txt
nnunetv2/batch_running/__init__.py
nnunetv2/batch_running/collect_results_custom_Decathlon.py
nnunetv2/batch_running/collect_results_custom_Decathlon_2d.py
nnunetv2/batch_running/generate_lsf_runs_customDecathlon.py
nnunetv2/batch_running/benchmarking/__init__.py
nnunetv2/batch_running/benchmarking/generate_benchmarking_commands.py
nnunetv2/batch_running/benchmarking/summarize_benchmark_results.py
nnunetv2/batch_running/release_trainings/__init__.py
nnunetv2/batch_running/release_trainings/nnunetv2_v1/__init__.py
nnunetv2/batch_running/release_trainings/nnunetv2_v1/collect_results.py
nnunetv2/batch_running/release_trainings/nnunetv2_v1/generate_lsf_commands.py
nnunetv2/dataset_conversion/Dataset027_ACDC.py
nnunetv2/dataset_conversion/Dataset073_Fluo_C3DH_A549_SIM.py
nnunetv2/dataset_conversion/Dataset114_MNMs.py
nnunetv2/dataset_conversion/Dataset115_EMIDEC.py
nnunetv2/dataset_conversion/Dataset120_RoadSegmentation.py
nnunetv2/dataset_conversion/Dataset137_BraTS21.py
nnunetv2/dataset_conversion/Dataset218_Amos2022_task1.py
nnunetv2/dataset_conversion/Dataset219_Amos2022_task2.py
nnunetv2/dataset_conversion/Dataset220_KiTS2023.py
nnunetv2/dataset_conversion/Dataset221_AutoPETII_2023.py
nnunetv2/dataset_conversion/Dataset988_dummyDataset4.py
nnunetv2/dataset_conversion/__init__.py
nnunetv2/dataset_conversion/convert_MSD_dataset.py
nnunetv2/dataset_conversion/convert_raw_dataset_from_old_nnunet_format.py
nnunetv2/dataset_conversion/generate_dataset_json.py
nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset996_IntegrationTest_Hippocampus_regions_ignore.py
nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset997_IntegrationTest_Hippocampus_regions.py
nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset998_IntegrationTest_Hippocampus_ignore.py
nnunetv2/dataset_conversion/datasets_for_integration_tests/Dataset999_IntegrationTest_Hippocampus.py
nnunetv2/dataset_conversion/datasets_for_integration_tests/__init__.py
nnunetv2/ensembling/__init__.py
nnunetv2/ensembling/ensemble.py
nnunetv2/evaluation/__init__.py
nnunetv2/evaluation/accumulate_cv_results.py
nnunetv2/evaluation/evaluate_predictions.py
nnunetv2/evaluation/find_best_configuration.py
nnunetv2/experiment_planning/__init__.py
nnunetv2/experiment_planning/plan_and_preprocess_api.py
nnunetv2/experiment_planning/plan_and_preprocess_entrypoints.py
nnunetv2/experiment_planning/verify_dataset_integrity.py
nnunetv2/experiment_planning/dataset_fingerprint/__init__.py
nnunetv2/experiment_planning/dataset_fingerprint/fingerprint_extractor.py
nnunetv2/experiment_planning/experiment_planners/__init__.py
nnunetv2/experiment_planning/experiment_planners/default_experiment_planner.py
nnunetv2/experiment_planning/experiment_planners/network_topology.py
nnunetv2/experiment_planning/experiment_planners/resencUNet_planner.py
nnunetv2/experiment_planning/plans_for_pretraining/__init__.py
nnunetv2/experiment_planning/plans_for_pretraining/move_plans_between_datasets.py
nnunetv2/imageio/__init__.py
nnunetv2/imageio/base_reader_writer.py
nnunetv2/imageio/natural_image_reader_writer.py
nnunetv2/imageio/nibabel_reader_writer.py
nnunetv2/imageio/reader_writer_registry.py
nnunetv2/imageio/simpleitk_reader_writer.py
nnunetv2/imageio/tif_reader_writer.py
nnunetv2/inference/__init__.py
nnunetv2/inference/data_iterators.py
nnunetv2/inference/examples.py
nnunetv2/inference/export_prediction.py
nnunetv2/inference/predict_from_raw_data.py
nnunetv2/inference/sliding_window_prediction.py
nnunetv2/model_sharing/__init__.py
nnunetv2/model_sharing/entry_points.py
nnunetv2/model_sharing/model_download.py
nnunetv2/model_sharing/model_export.py
nnunetv2/model_sharing/model_import.py
nnunetv2/postprocessing/__init__.py
nnunetv2/postprocessing/remove_connected_components.py
nnunetv2/preprocessing/__init__.py
nnunetv2/preprocessing/cropping/__init__.py
nnunetv2/preprocessing/cropping/cropping.py
nnunetv2/preprocessing/normalization/__init__.py
nnunetv2/preprocessing/normalization/default_normalization_schemes.py
nnunetv2/preprocessing/normalization/map_channel_name_to_normalization.py
nnunetv2/preprocessing/preprocessors/__init__.py
nnunetv2/preprocessing/preprocessors/default_preprocessor.py
nnunetv2/preprocessing/resampling/__init__.py
nnunetv2/preprocessing/resampling/default_resampling.py
nnunetv2/preprocessing/resampling/utils.py
nnunetv2/run/__init__.py
nnunetv2/run/load_pretrained_weights.py
nnunetv2/run/run_training.py
nnunetv2/tests/__init__.py
nnunetv2/tests/integration_tests/__init__.py
nnunetv2/tests/integration_tests/add_lowres_and_cascade.py
nnunetv2/tests/integration_tests/cleanup_integration_test.py
nnunetv2/tests/integration_tests/run_integration_test_bestconfig_inference.py
nnunetv2/training/__init__.py
nnunetv2/training/data_augmentation/__init__.py
nnunetv2/training/data_augmentation/compute_initial_patch_size.py
nnunetv2/training/data_augmentation/custom_transforms/__init__.py
nnunetv2/training/data_augmentation/custom_transforms/cascade_transforms.py
nnunetv2/training/data_augmentation/custom_transforms/deep_supervision_donwsampling.py
nnunetv2/training/data_augmentation/custom_transforms/limited_length_multithreaded_augmenter.py
nnunetv2/training/data_augmentation/custom_transforms/manipulating_data_dict.py
nnunetv2/training/data_augmentation/custom_transforms/masking.py
nnunetv2/training/data_augmentation/custom_transforms/region_based_training.py
nnunetv2/training/data_augmentation/custom_transforms/transforms_for_dummy_2d.py
nnunetv2/training/dataloading/__init__.py
nnunetv2/training/dataloading/base_data_loader.py
nnunetv2/training/dataloading/data_loader_2d.py
nnunetv2/training/dataloading/data_loader_3d.py
nnunetv2/training/dataloading/nnunet_dataset.py
nnunetv2/training/dataloading/utils.py
nnunetv2/training/logging/__init__.py
nnunetv2/training/logging/nnunet_logger.py
nnunetv2/training/loss/__init__.py
nnunetv2/training/loss/compound_losses.py
nnunetv2/training/loss/deep_supervision.py
nnunetv2/training/loss/dice.py
nnunetv2/training/loss/robust_ce_loss.py
nnunetv2/training/lr_scheduler/__init__.py
nnunetv2/training/lr_scheduler/polylr.py
nnunetv2/training/nnUNetTrainer/__init__.py
nnunetv2/training/nnUNetTrainer/nnUNetTrainer.py
nnunetv2/training/nnUNetTrainer/variants/__init__.py
nnunetv2/training/nnUNetTrainer/variants/benchmarking/__init__.py
nnunetv2/training/nnUNetTrainer/variants/benchmarking/nnUNetTrainerBenchmark_5epochs.py
nnunetv2/training/nnUNetTrainer/variants/benchmarking/nnUNetTrainerBenchmark_5epochs_noDataLoading.py
nnunetv2/training/nnUNetTrainer/variants/data_augmentation/__init__.py
nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDA5.py
nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerDAOrd0.py
nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerNoDA.py
nnunetv2/training/nnUNetTrainer/variants/data_augmentation/nnUNetTrainerNoMirroring.py
nnunetv2/training/nnUNetTrainer/variants/loss/__init__.py
nnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerCELoss.py
nnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerDiceLoss.py
nnunetv2/training/nnUNetTrainer/variants/loss/nnUNetTrainerTopkLoss.py
nnunetv2/training/nnUNetTrainer/variants/lr_schedule/__init__.py
nnunetv2/training/nnUNetTrainer/variants/lr_schedule/nnUNetTrainerCosAnneal.py
nnunetv2/training/nnUNetTrainer/variants/network_architecture/__init__.py
nnunetv2/training/nnUNetTrainer/variants/network_architecture/nnUNetTrainerBN.py
nnunetv2/training/nnUNetTrainer/variants/network_architecture/nnUNetTrainerNoDeepSupervision.py
nnunetv2/training/nnUNetTrainer/variants/optimizer/__init__.py
nnunetv2/training/nnUNetTrainer/variants/optimizer/nnUNetTrainerAdam.py
nnunetv2/training/nnUNetTrainer/variants/optimizer/nnUNetTrainerAdan.py
nnunetv2/training/nnUNetTrainer/variants/sampling/__init__.py
nnunetv2/training/nnUNetTrainer/variants/sampling/nnUNetTrainer_probabilisticOversampling.py
nnunetv2/training/nnUNetTrainer/variants/training_length/__init__.py
nnunetv2/training/nnUNetTrainer/variants/training_length/nnUNetTrainer_Xepochs.py
nnunetv2/training/nnUNetTrainer/variants/training_length/nnUNetTrainer_Xepochs_NoMirroring.py
nnunetv2/utilities/__init__.py
nnunetv2/utilities/collate_outputs.py
nnunetv2/utilities/dataset_name_id_conversion.py
nnunetv2/utilities/ddp_allgather.py
nnunetv2/utilities/default_n_proc_DA.py
nnunetv2/utilities/file_path_utilities.py
nnunetv2/utilities/find_class_by_name.py
nnunetv2/utilities/get_network_from_plans.py
nnunetv2/utilities/helpers.py
nnunetv2/utilities/json_export.py
nnunetv2/utilities/network_initialization.py
nnunetv2/utilities/overlay_plots.py
nnunetv2/utilities/utils.py
nnunetv2/utilities/label_handling/__init__.py
nnunetv2/utilities/label_handling/label_handling.py
nnunetv2/utilities/plans_handling/__init__.py
nnunetv2/utilities/plans_handling/plans_handler.py
================================================
FILE: Finetune/nnUNet/nnunetv2.egg-info/dependency_links.txt
================================================
================================================
FILE: Finetune/nnUNet/nnunetv2.egg-info/entry_points.txt
================================================
[console_scripts]
nnUNetv2_accumulate_crossval_results = nnunetv2.evaluation.find_best_configuration:accumulate_crossval_results_entry_point
nnUNetv2_apply_postprocessing = nnunetv2.postprocessing.remove_connected_components:entry_point_apply_postprocessing
nnUNetv2_convert_MSD_dataset = nnunetv2.dataset_conversion.convert_MSD_dataset:entry_point
nnUNetv2_convert_old_nnUNet_dataset = nnunetv2.dataset_conversion.convert_raw_dataset_from_old_nnunet_format:convert_entry_point
nnUNetv2_determine_postprocessing = nnunetv2.postprocessing.remove_connected_components:entry_point_determine_postprocessing_folder
nnUNetv2_download_pretrained_model_by_url = nnunetv2.model_sharing.entry_points:download_by_url
nnUNetv2_ensemble = nnunetv2.ensembling.ensemble:entry_point_ensemble_folders
nnUNetv2_evaluate_folder = nnunetv2.evaluation.evaluate_predictions:evaluate_folder_entry_point
nnUNetv2_evaluate_simple = nnunetv2.evaluation.evaluate_predictions:evaluate_simple_entry_point
nnUNetv2_export_model_to_zip = nnunetv2.model_sharing.entry_points:export_pretrained_model_entry
nnUNetv2_extract_fingerprint = nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:extract_fingerprint_entry
nnUNetv2_find_best_configuration = nnunetv2.evaluation.find_best_configuration:find_best_configuration_entry_point
nnUNetv2_install_pretrained_model_from_zip = nnunetv2.model_sharing.entry_points:install_from_zip_entry_point
nnUNetv2_move_plans_between_datasets = nnunetv2.experiment_planning.plans_for_pretraining.move_plans_between_datasets:entry_point_move_plans_between_datasets
nnUNetv2_plan_and_preprocess = nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_and_preprocess_entry
nnUNetv2_plan_experiment = nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_experiment_entry
nnUNetv2_plot_overlay_pngs = nnunetv2.utilities.overlay_plots:entry_point_generate_overlay
nnUNetv2_predict = nnunetv2.inference.predict_from_raw_data:predict_entry_point
nnUNetv2_predict_from_modelfolder = nnunetv2.inference.predict_from_raw_data:predict_entry_point_modelfolder
nnUNetv2_preprocess = nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:preprocess_entry
nnUNetv2_train = nnunetv2.run.run_training:run_training_entry
================================================
FILE: Finetune/nnUNet/nnunetv2.egg-info/requires.txt
================================================
torch>=2.0.0
acvl-utils>=0.2
dynamic-network-architectures>=0.2
tqdm
dicom2nifti
scipy
batchgenerators>=0.25
numpy
scikit-learn
scikit-image>=0.19.3
SimpleITK>=2.2.1
pandas
graphviz
tifffile
requests
nibabel
matplotlib
seaborn
imagecodecs
yacs
[dev]
black
ruff
pre-commit
================================================
FILE: Finetune/nnUNet/nnunetv2.egg-info/top_level.txt
================================================
nnunetv2
================================================
FILE: Finetune/nnUNet/pyproject.toml
================================================
[project]
name = "nnunetv2"
version = "2.2.1"
requires-python = ">=3.9"
description = "nnU-Net is a framework for out-of-the box image segmentation."
readme = "readme.md"
license = { file = "LICENSE" }
authors = [
{ name = "Fabian Isensee", email = "f.isensee@dkfz-heidelberg.de"},
{ name = "Helmholtz Imaging Applied Computer Vision Lab" }
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Intended Audience :: Healthcare Industry",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Image Recognition",
"Topic :: Scientific/Engineering :: Medical Science Apps.",
]
keywords = [
'deep learning',
'image segmentation',
'semantic segmentation',
'medical image analysis',
'medical image segmentation',
'nnU-Net',
'nnunet'
]
dependencies = [
"torch>=2.0.0",
"acvl-utils>=0.2",
"dynamic-network-architectures>=0.2",
"tqdm",
"dicom2nifti",
"scipy",
"batchgenerators>=0.25",
"numpy",
"scikit-learn",
"scikit-image>=0.19.3",
"SimpleITK>=2.2.1",
"pandas",
"graphviz",
'tifffile',
'requests',
"nibabel",
"matplotlib",
"seaborn",
"imagecodecs",
"yacs"
]
[project.urls]
homepage = "https://github.com/MIC-DKFZ/nnUNet"
repository = "https://github.com/MIC-DKFZ/nnUNet"
[project.scripts]
nnUNetv2_plan_and_preprocess = "nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_and_preprocess_entry"
nnUNetv2_extract_fingerprint = "nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:extract_fingerprint_entry"
nnUNetv2_plan_experiment = "nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:plan_experiment_entry"
nnUNetv2_preprocess = "nnunetv2.experiment_planning.plan_and_preprocess_entrypoints:preprocess_entry"
nnUNetv2_train = "nnunetv2.run.run_training:run_training_entry"
nnUNetv2_predict_from_modelfolder = "nnunetv2.inference.predict_from_raw_data:predict_entry_point_modelfolder"
nnUNetv2_predict = "nnunetv2.inference.predict_from_raw_data:predict_entry_point"
nnUNetv2_convert_old_nnUNet_dataset = "nnunetv2.dataset_conversion.convert_raw_dataset_from_old_nnunet_format:convert_entry_point"
nnUNetv2_find_best_configuration = "nnunetv2.evaluation.find_best_configuration:find_best_configuration_entry_point"
nnUNetv2_determine_postprocessing = "nnunetv2.postprocessing.remove_connected_components:entry_point_determine_postprocessing_folder"
nnUNetv2_apply_postprocessing = "nnunetv2.postprocessing.remove_connected_components:entry_point_apply_postprocessing"
nnUNetv2_ensemble = "nnunetv2.ensembling.ensemble:entry_point_ensemble_folders"
nnUNetv2_accumulate_crossval_results = "nnunetv2.evaluation.find_best_configuration:accumulate_crossval_results_entry_point"
nnUNetv2_plot_overlay_pngs = "nnunetv2.utilities.overlay_plots:entry_point_generate_overlay"
nnUNetv2_download_pretrained_model_by_url = "nnunetv2.model_sharing.entry_points:download_by_url"
nnUNetv2_install_pretrained_model_from_zip = "nnunetv2.model_sharing.entry_points:install_from_zip_entry_point"
nnUNetv2_export_model_to_zip = "nnunetv2.model_sharing.entry_points:export_pretrained_model_entry"
nnUNetv2_move_plans_between_datasets = "nnunetv2.experiment_planning.plans_for_pretraining.move_plans_between_datasets:entry_point_move_plans_between_datasets"
nnUNetv2_evaluate_folder = "nnunetv2.evaluation.evaluate_predictions:evaluate_folder_entry_point"
nnUNetv2_evaluate_simple = "nnunetv2.evaluation.evaluate_predictions:evaluate_simple_entry_point"
nnUNetv2_convert_MSD_dataset = "nnunetv2.dataset_conversion.convert_MSD_dataset:entry_point"
[project.optional-dependencies]
dev = [
"black",
"ruff",
"pre-commit"
]
[tool.codespell]
skip = '.git,*.pdf,*.svg'
#
# ignore-words-list = ''
================================================
FILE: Finetune/nnUNet/setup.py
================================================
import setuptools
if __name__ == "__main__":
setuptools.setup()
================================================
FILE: LICENSE
================================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
================================================
FILE: README.md
================================================
# VoCo
Code for CVPR 2024 paper, [**"VoCo: A Simple-yet-Effective Volume Contrastive Learning Framework for 3D Medical Image Analysis"**](https://arxiv.org/abs/2402.17300)
Authors: Linshan Wu, Jiaxin Zhuang, and Hao Chen
This work presents VoCo, a simple-yet-effective contrastive learning framework for pre-training large scale 3D medical images. Our **10k CT images pre-training** model are available. Our **160k CT images pre-training** models are available!
**Our TPAMI version is available at [**Large-Scale-Medical**](https://github.com/Luffy03/Large-Scale-Medical), which provides stronger models, larger-scale datasets, various training recipes, and more downstream tasks!!!**
## Abstract
Self-Supervised Learning (SSL) has demonstrated promising results in 3D medical image analysis. However, the lack of high-level semantics in pre-training still heavily hinders the performance of downstream tasks. We observe that 3D medical images contain relatively consistent contextual position information, i.e., consistent geometric relations between different organs, which leads to a potential way for us to learn consistent semantic representations in pre-training. In this paper, we propose a simple-yet-effective **Vo**lume **Co**ntrast (**VoCo**) framework to leverage the contextual position priors for pre-training. Specifically, we first generate a group of base crops from different regions while enforcing feature discrepancy among them, where we employ them as class assignments of different regions. Then, we randomly crop sub-volumes and predict them belonging to which class (located at which region) by contrasting their similarity to different base crops, which can be seen as predicting contextual positions of different sub-volumes. Through this pretext task, VoCo implicitly encodes the contextual position priors into model representations without the guidance of annotations, enabling us to effectively improve the performance of downstream tasks that require high-level semantics. Extensive experimental results on six downstream tasks demonstrate the superior effectiveness of VoCo.

## Usage
### Load Pre-trained weight
```
import torch
import argparse
from monai.networks.nets import SwinUNETR
parser = argparse.ArgumentParser(description="Swin UNETR")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--feature_size", default=48, type=int, help="feature size")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--out_channels", default=14, type=int, help="number of output channels")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
args = parser.parse_args()
model = SwinUNETR(
img_size=(args.roi_x, args.roi_y, args.roi_z),
in_channels=args.in_channels,
out_channels=args.out_channels,
feature_size=args.feature_size,
use_checkpoint=args.use_checkpoint,
use_v2=True)
model_dict = torch.load(args.pretrained_checkpoint, map_location=torch.device('cpu'))
state_dict = model_dict
if "module." in list(state_dict.keys())[0]:
print("Tag 'module.' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("module.", "")] = state_dict.pop(key)
if "swin_vit" in list(state_dict.keys())[0]:
print("Tag 'swin_vit' found in state dict - fixing!")
for key in list(state_dict.keys()):
state_dict[key.replace("swin_vit", "swinViT")] = state_dict.pop(key)
model.load_state_dict(state_dict, strict=False)
print("Using pretrained voco ema self-supervised Swin UNETR backbone weights !")
```
### Prepare Dataset
First, you need to download the pre-training dataset. The 10k dataset are all open-source and you can download yourself. Or you can download it in our [hugging face repo](https://huggingface.co/datasets/Luffy503/VoCo-10k/tree/main).

Note: 10k dataset is collected by Dr. Jiaxin Zhuang
```
├── data
├── BTCV
├── TCIAcovid19
├── Luna16-jx
├── stoic21
├── Totalsegmentator_dataset
├── Flare23
├── LiDC
└── HNSCC_convert_v1
```
### Pre-Training
(1) Note that in this repo, we present the version of our 10k pre-training, thus some details may be different to our paper.
(2) To accerlate the training, we use "Persistentdataset" to pre-cache dataset, which requires extra storage. It is important in our codes. If you don't have enough storage, you can change it back in "utils/data_utils.py".
To pre-train:
```bash
sh train.sh
```
### Finetune
Our finetune codes will soon be available, or you can directly use the codes in [MONAI](https://github.com/Project-MONAI/research-contributions).
More finetune implementation are in preparation!
## Acknowledgement
We thank [MONAI](https://github.com/Project-MONAI/research-contributions) for part of their codes.
## Citation ✏️ 📄
If you find this repo useful for your research, please consider citing the paper as follows:
```
@article{voco,
title={Large-Scale 3D Medical Image Pre-training with Geometric Context Priors},
author={Wu, Linshan and Zhuang, Jiaxin and Chen, Hao},
journal={IEEE Transactions on Pattern Analysis and Machine Intelligence},
year={2025},
publisher={IEEE}
}
@InProceedings{voco,
author = {Wu, Linshan and Zhuang, Jiaxin and Chen, Hao},
title = {VoCo: A Simple-yet-Effective Volume Contrastive Learning Framework for 3D Medical Image Analysis},
booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},
month = {June},
year = {2024},
pages = {22873-22882}
}
```
================================================
FILE: jsons/HNSCC.json
================================================
{"training": [{"image": "00001.nii.gz"}, {"image": "00002.nii.gz"}, {"image": "00003.nii.gz"}, {"image": "00005.nii.gz"}, {"image": "00006.nii.gz"}, {"image": "00007.nii.gz"}, {"image": "00008.nii.gz"}, {"image": "00009.nii.gz"}, {"image": "00010.nii.gz"}, {"image": "00011.nii.gz"}, {"image": "00013.nii.gz"}, {"image": "00014.nii.gz"}, {"image": "00015.nii.gz"}, {"image": "00016.nii.gz"}, {"image": "00017.nii.gz"}, {"image": "00018.nii.gz"}, {"image": "00019.nii.gz"}, {"image": "00020.nii.gz"}, {"image": "00021.nii.gz"}, {"image": "00022.nii.gz"}, {"image": "00023.nii.gz"}, {"image": "00025.nii.gz"}, {"image": "00026.nii.gz"}, {"image": "00028.nii.gz"}, {"image": "00030.nii.gz"}, {"image": "00031.nii.gz"}, {"image": "00032.nii.gz"}, {"image": "00033.nii.gz"}, {"image": "00034.nii.gz"}, {"image": "00035.nii.gz"}, {"image": "00036.nii.gz"}, {"image": "00037.nii.gz"}, {"image": "00038.nii.gz"}, {"image": "00040.nii.gz"}, {"image": "00041.nii.gz"}, {"image": "00042.nii.gz"}, {"image": "00043.nii.gz"}, {"image": "00044.nii.gz"}, {"image": "00045.nii.gz"}, {"image": "00046.nii.gz"}, {"image": "00047.nii.gz"}, {"image": "00048.nii.gz"}, {"image": "00049.nii.gz"}, {"image": "00050.nii.gz"}, {"image": "00051.nii.gz"}, {"image": "00052.nii.gz"}, {"image": "00053.nii.gz"}, {"image": "00054.nii.gz"}, {"image": "00055.nii.gz"}, {"image": "00056.nii.gz"}, {"image": "00058.nii.gz"}, {"image": "00059.nii.gz"}, {"image": "00061.nii.gz"}, {"image": "00062.nii.gz"}, {"image": "00063.nii.gz"}, {"image": "00066.nii.gz"}, {"image": "00067.nii.gz"}, {"image": "00068.nii.gz"}, {"image": "00069.nii.gz"}, {"image": "00070.nii.gz"}, {"image": "00071.nii.gz"}, {"image": "00073.nii.gz"}, {"image": "00074.nii.gz"}, {"image": "00075.nii.gz"}, {"image": "00077.nii.gz"}, {"image": "00078.nii.gz"}, {"image": "00079.nii.gz"}, {"image": "00080.nii.gz"}, {"image": "00081.nii.gz"}, {"image": "00082.nii.gz"}, {"image": "00083.nii.gz"}, {"image": "00084.nii.gz"}, {"image": "00085.nii.gz"}, {"image": "00086.nii.gz"}, {"image": "00087.nii.gz"}, {"image": "00089.nii.gz"}, {"image": "00090.nii.gz"}, {"image": "00091.nii.gz"}, {"image": "00092.nii.gz"}, {"image": "00093.nii.gz"}, {"image": "00094.nii.gz"}, {"image": "00096.nii.gz"}, {"image": "00097.nii.gz"}, {"image": "00099.nii.gz"}, {"image": "00100.nii.gz"}, {"image": "00101.nii.gz"}, {"image": "00102.nii.gz"}, {"image": "00103.nii.gz"}, {"image": "00104.nii.gz"}, {"image": "00105.nii.gz"}, {"image": "00106.nii.gz"}, {"image": "00107.nii.gz"}, {"image": "00108.nii.gz"}, {"image": "00109.nii.gz"}, {"image": "00110.nii.gz"}, {"image": "00111.nii.gz"}, {"image": "00112.nii.gz"}, {"image": "00117.nii.gz"}, {"image": "00118.nii.gz"}, {"image": "00119.nii.gz"}, {"image": "00120.nii.gz"}, {"image": "00121.nii.gz"}, {"image": "00122.nii.gz"}, {"image": "00126.nii.gz"}, {"image": "00127.nii.gz"}, {"image": "00128.nii.gz"}, {"image": "00129.nii.gz"}, {"image": "00130.nii.gz"}, {"image": "00131.nii.gz"}, {"image": "00132.nii.gz"}, {"image": "00133.nii.gz"}, {"image": "00134.nii.gz"}, {"image": "00135.nii.gz"}, {"image": "00136.nii.gz"}, {"image": "00137.nii.gz"}, {"image": "00138.nii.gz"}, {"image": "00139.nii.gz"}, {"image": "00140.nii.gz"}, {"image": "00141.nii.gz"}, {"image": "00142.nii.gz"}, {"image": "00143.nii.gz"}, {"image": "00144.nii.gz"}, {"image": "00145.nii.gz"}, {"image": "00146.nii.gz"}, {"image": "00147.nii.gz"}, {"image": "00148.nii.gz"}, {"image": "00149.nii.gz"}, {"image": "00150.nii.gz"}, {"image": "00151.nii.gz"}, {"image": "00152.nii.gz"}, {"image": "00154.nii.gz"}, {"image": "00155.nii.gz"}, {"image": "00156.nii.gz"}, {"image": "00157.nii.gz"}, {"image": "00158.nii.gz"}, {"image": "00161.nii.gz"}, {"image": "00162.nii.gz"}, {"image": "00163.nii.gz"}, {"image": "00165.nii.gz"}, {"image": "00166.nii.gz"}, {"image": "00167.nii.gz"}, {"image": "00168.nii.gz"}, {"image": "00169.nii.gz"}, {"image": "00170.nii.gz"}, {"image": "00171.nii.gz"}, {"image": "00172.nii.gz"}, {"image": "00173.nii.gz"}, {"image": "00174.nii.gz"}, {"image": "00175.nii.gz"}, {"image": "00176.nii.gz"}, {"image": "00177.nii.gz"}, {"image": "00178.nii.gz"}, {"image": "00179.nii.gz"}, {"image": "00180.nii.gz"}, {"image": "00181.nii.gz"}, {"image": "00182.nii.gz"}, {"image": "00183.nii.gz"}, {"image": "00184.nii.gz"}, {"image": "00185.nii.gz"}, {"image": "00186.nii.gz"}, {"image": "00187.nii.gz"}, {"image": "00188.nii.gz"}, {"image": "00189.nii.gz"}, {"image": "00190.nii.gz"}, {"image": "00191.nii.gz"}, {"image": "00193.nii.gz"}, {"image": "00196.nii.gz"}, {"image": "00197.nii.gz"}, {"image": "00198.nii.gz"}, {"image": "00199.nii.gz"}, {"image": "00200.nii.gz"}, {"image": "00201.nii.gz"}, {"image": "00202.nii.gz"}, {"image": "00203.nii.gz"}, {"image": "00205.nii.gz"}, {"image": "00206.nii.gz"}, {"image": "00207.nii.gz"}, {"image": "00208.nii.gz"}, {"image": "00209.nii.gz"}, {"image": "00210.nii.gz"}, {"image": "00211.nii.gz"}, {"image": "00213.nii.gz"}, {"image": "00216.nii.gz"}, {"image": "00217.nii.gz"}, {"image": "00218.nii.gz"}, {"image": "00219.nii.gz"}, {"image": "00220.nii.gz"}, {"image": "00221.nii.gz"}, {"image": "00222.nii.gz"}, {"image": "00223.nii.gz"}, {"image": "00225.nii.gz"}, {"image": "00226.nii.gz"}, {"image": "00227.nii.gz"}, {"image": "00228.nii.gz"}, {"image": "00229.nii.gz"}, {"image": "00230.nii.gz"}, {"image": "00231.nii.gz"}, {"image": "00232.nii.gz"}, {"image": "00233.nii.gz"}, {"image": "00234.nii.gz"}, {"image": "00235.nii.gz"}, {"image": "00236.nii.gz"}, {"image": "00237.nii.gz"}, {"image": "00238.nii.gz"}, {"image": "00239.nii.gz"}, {"image": "00240.nii.gz"}, {"image": "00241.nii.gz"}, {"image": "00242.nii.gz"}, {"image": "00243.nii.gz"}, {"image": "00244.nii.gz"}, {"image": "00245.nii.gz"}, {"image": "00246.nii.gz"}, {"image": "00251.nii.gz"}, {"image": "00252.nii.gz"}, {"image": "00253.nii.gz"}, {"image": "00254.nii.gz"}, {"image": "00255.nii.gz"}, {"image": "00256.nii.gz"}, {"image": "00257.nii.gz"}, {"image": "00258.nii.gz"}, {"image": "00259.nii.gz"}, {"image": "00260.nii.gz"}, {"image": "00261.nii.gz"}, {"image": "00262.nii.gz"}, {"image": "00263.nii.gz"}, {"image": "00264.nii.gz"}, {"image": "00265.nii.gz"}, {"image": "00266.nii.gz"}, {"image": "00267.nii.gz"}, {"image": "00268.nii.gz"}, {"image": "00269.nii.gz"}, {"image": "00271.nii.gz"}, {"image": "00272.nii.gz"}, {"image": "00273.nii.gz"}, {"image": "00275.nii.gz"}, {"image": "00277.nii.gz"}, {"image": "00278.nii.gz"}, {"image": "00279.nii.gz"}, {"image": "00280.nii.gz"}, {"image": "00281.nii.gz"}, {"image": "00282.nii.gz"}, {"image": "00283.nii.gz"}, {"image": "00284.nii.gz"}, {"image": "00285.nii.gz"}, {"image": "00286.nii.gz"}, {"image": "00287.nii.gz"}, {"image": "00288.nii.gz"}, {"image": "00289.nii.gz"}, {"image": "00290.nii.gz"}, {"image": "00292.nii.gz"}, {"image": "00293.nii.gz"}, {"image": "00294.nii.gz"}, {"image": "00295.nii.gz"}, {"image": "00296.nii.gz"}, {"image": "00297.nii.gz"}, {"image": "00298.nii.gz"}, {"image": "00299.nii.gz"}, {"image": "00300.nii.gz"}, {"image": "00302.nii.gz"}, {"image": "00303.nii.gz"}, {"image": "00304.nii.gz"}, {"image": "00305.nii.gz"}, {"image": "00306.nii.gz"}, {"image": "00307.nii.gz"}, {"image": "00308.nii.gz"}, {"image": "00309.nii.gz"}, {"image": "00310.nii.gz"}, {"image": "00311.nii.gz"}, {"image": "00312.nii.gz"}, {"image": "00313.nii.gz"}, {"image": "00314.nii.gz"}, {"image": "00315.nii.gz"}, {"image": "00316.nii.gz"}, {"image": "00317.nii.gz"}, {"image": "00318.nii.gz"}, {"image": "00319.nii.gz"}, {"image": "00320.nii.gz"}, {"image": "00321.nii.gz"}, {"image": "00322.nii.gz"}, {"image": "00323.nii.gz"}, {"image": "00324.nii.gz"}, {"image": "00325.nii.gz"}, {"image": "00326.nii.gz"}, {"image": "00327.nii.gz"}, {"image": "00328.nii.gz"}, {"image": "00329.nii.gz"}, {"image": "00330.nii.gz"}, {"image": "00331.nii.gz"}, {"image": "00332.nii.gz"}, {"image": "00333.nii.gz"}, {"image": "00334.nii.gz"}, {"image": "00335.nii.gz"}, {"image": "00336.nii.gz"}, {"image": "00337.nii.gz"}, {"image": "00338.nii.gz"}, {"image": "00339.nii.gz"}, {"image": "00340.nii.gz"}, {"image": "00341.nii.gz"}, {"image": "00342.nii.gz"}, {"image": "00343.nii.gz"}, {"image": "00345.nii.gz"}, {"image": "00348.nii.gz"}, {"image": "00349.nii.gz"}, {"image": "00350.nii.gz"}, {"image": "00351.nii.gz"}, {"image": "00352.nii.gz"}, {"image": "00353.nii.gz"}, {"image": "00354.nii.gz"}, {"image": "00355.nii.gz"}, {"image": "00356.nii.gz"}, {"image": "00357.nii.gz"}, {"image": "00358.nii.gz"}, {"image": "00359.nii.gz"}, {"image": "00362.nii.gz"}, {"image": "00363.nii.gz"}, {"image": "00364.nii.gz"}, {"image": "00365.nii.gz"}, {"image": "00366.nii.gz"}, {"image": "00367.nii.gz"}, {"image": "00368.nii.gz"}, {"image": "00369.nii.gz"}, {"image": "00370.nii.gz"}, {"image": "00371.nii.gz"}, {"image": "00372.nii.gz"}, {"image": "00373.nii.gz"}, {"image": "00374.nii.gz"}, {"image": "00375.nii.gz"}, {"image": "00376.nii.gz"}, {"image": "00377.nii.gz"}, {"image": "00378.nii.gz"}, {"image": "00379.nii.gz"}, {"image": "00380.nii.gz"}, {"image": "00381.nii.gz"}, {"image": "00382.nii.gz"}, {"image": "00383.nii.gz"}, {"image": "00384.nii.gz"}, {"image": "00385.nii.gz"}, {"image": "00386.nii.gz"}, {"image": "00387.nii.gz"}, {"image": "00389.nii.gz"}, {"image": "00390.nii.gz"}, {"image": "00391.nii.gz"}, {"image": "00392.nii.gz"}, {"image": "00393.nii.gz"}, {"image": "00394.nii.gz"}, {"image": "00395.nii.gz"}, {"image": "00396.nii.gz"}, {"image": "00397.nii.gz"}, {"image": "00398.nii.gz"}, {"image": "00399.nii.gz"}, {"image": "00400.nii.gz"}, {"image": "00401.nii.gz"}, {"image": "00402.nii.gz"}, {"image": "00403.nii.gz"}, {"image": "00404.nii.gz"}, {"image": "00405.nii.gz"}, {"image": "00406.nii.gz"}, {"image": "00407.nii.gz"}, {"image": "00408.nii.gz"}, {"image": "00409.nii.gz"}, {"image": "00410.nii.gz"}, {"image": "00411.nii.gz"}, {"image": "00412.nii.gz"}, {"image": "00413.nii.gz"}, {"image": "00414.nii.gz"}, {"image": "00415.nii.gz"}, {"image": "00416.nii.gz"}, {"image": "00419.nii.gz"}, {"image": "00421.nii.gz"}, {"image": "00422.nii.gz"}, {"image": "00423.nii.gz"}, {"image": "00424.nii.gz"}, {"image": "00425.nii.gz"}, {"image": "00426.nii.gz"}, {"image": "00427.nii.gz"}, {"image": "00428.nii.gz"}, {"image": "00429.nii.gz"}, {"image": "00430.nii.gz"}, {"image": "00431.nii.gz"}, {"image": "00432.nii.gz"}, {"image": "00433.nii.gz"}, {"image": "00435.nii.gz"}, {"image": "00436.nii.gz"}, {"image": "00437.nii.gz"}, {"image": "00438.nii.gz"}, {"image": "00439.nii.gz"}, {"image": "00440.nii.gz"}, {"image": "00441.nii.gz"}, {"image": "00442.nii.gz"}, {"image": "00443.nii.gz"}, {"image": "00444.nii.gz"}, {"image": "00445.nii.gz"}, {"image": "00446.nii.gz"}, {"image": "00447.nii.gz"}, {"image": "00448.nii.gz"}, {"image": "00449.nii.gz"}, {"image": "00450.nii.gz"}, {"image": "00451.nii.gz"}, {"image": "00452.nii.gz"}, {"image": "00454.nii.gz"}, {"image": "00455.nii.gz"}, {"image": "00456.nii.gz"}, {"image": "00457.nii.gz"}, {"image": "00458.nii.gz"}, {"image": "00459.nii.gz"}, {"image": "00460.nii.gz"}, {"image": "00461.nii.gz"}, {"image": "00462.nii.gz"}, {"image": "00463.nii.gz"}, {"image": "00464.nii.gz"}, {"image": "00465.nii.gz"}, {"image": "00466.nii.gz"}, {"image": "00467.nii.gz"}, {"image": "00468.nii.gz"}, {"image": "00469.nii.gz"}, {"image": "00473.nii.gz"}, {"image": "00474.nii.gz"}, {"image": "00476.nii.gz"}, {"image": "00477.nii.gz"}, {"image": "00478.nii.gz"}, {"image": "00479.nii.gz"}, {"image": "00480.nii.gz"}, {"image": "00481.nii.gz"}, {"image": "00483.nii.gz"}, {"image": "00484.nii.gz"}, {"image": "00485.nii.gz"}, {"image": "00486.nii.gz"}, {"image": "00487.nii.gz"}, {"image": "00488.nii.gz"}, {"image": "00489.nii.gz"}, {"image": "00490.nii.gz"}, {"image": "00491.nii.gz"}, {"image": "00492.nii.gz"}, {"image": "00494.nii.gz"}, {"image": "00497.nii.gz"}, {"image": "00498.nii.gz"}, {"image": "00499.nii.gz"}, {"image": "00500.nii.gz"}, {"image": "00501.nii.gz"}, {"image": "00502.nii.gz"}, {"image": "00504.nii.gz"}, {"image": "00505.nii.gz"}, {"image": "00506.nii.gz"}, {"image": "00507.nii.gz"}, {"image": "00508.nii.gz"}, {"image": "00509.nii.gz"}, {"image": "00510.nii.gz"}, {"image": "00511.nii.gz"}, {"image": "00512.nii.gz"}, {"image": "00513.nii.gz"}, {"image": "00514.nii.gz"}, {"image": "00515.nii.gz"}, {"image": "00516.nii.gz"}, {"image": "00518.nii.gz"}, {"image": "00519.nii.gz"}, {"image": "00520.nii.gz"}, {"image": "00521.nii.gz"}, {"image": "00522.nii.gz"}, {"image": "00523.nii.gz"}, {"image": "00524.nii.gz"}, {"image": "00525.nii.gz"}, {"image": "00526.nii.gz"}, {"image": "00527.nii.gz"}, {"image": "00528.nii.gz"}, {"image": "00529.nii.gz"}, {"image": "00530.nii.gz"}, {"image": "00531.nii.gz"}, {"image": "00532.nii.gz"}, {"image": "00533.nii.gz"}, {"image": "00534.nii.gz"}, {"image": "00535.nii.gz"}, {"image": "00536.nii.gz"}, {"image": "00537.nii.gz"}, {"image": "00538.nii.gz"}, {"image": "00539.nii.gz"}, {"image": "00540.nii.gz"}, {"image": "00541.nii.gz"}, {"image": "00542.nii.gz"}, {"image": "00543.nii.gz"}, {"image": "00544.nii.gz"}, {"image": "00545.nii.gz"}, {"image": "00546.nii.gz"}, {"image": "00547.nii.gz"}, {"image": "00548.nii.gz"}, {"image": "00549.nii.gz"}, {"image": "00550.nii.gz"}, {"image": "00551.nii.gz"}, {"image": "00552.nii.gz"}, {"image": "00554.nii.gz"}, {"image": "00555.nii.gz"}, {"image": "00556.nii.gz"}, {"image": "00557.nii.gz"}, {"image": "00558.nii.gz"}, {"image": "00559.nii.gz"}, {"image": "00560.nii.gz"}, {"image": "00561.nii.gz"}, {"image": "00562.nii.gz"}, {"image": "00564.nii.gz"}, {"image": "00566.nii.gz"}, {"image": "00567.nii.gz"}, {"image": "00568.nii.gz"}, {"image": "00569.nii.gz"}, {"image": "00570.nii.gz"}, {"image": "00571.nii.gz"}, {"image": "00572.nii.gz"}, {"image": "00573.nii.gz"}, {"image": "00574.nii.gz"}, {"image": "00576.nii.gz"}, {"image": "00577.nii.gz"}, {"image": "00578.nii.gz"}, {"image": "00579.nii.gz"}, {"image": "00580.nii.gz"}, {"image": "00581.nii.gz"}, {"image": "00582.nii.gz"}, {"image": "00583.nii.gz"}, {"image": "00584.nii.gz"}, {"image": "00585.nii.gz"}, {"image": "00586.nii.gz"}, {"image": "00587.nii.gz"}, {"image": "00588.nii.gz"}, {"image": "00589.nii.gz"}, {"image": "00590.nii.gz"}, {"image": "00591.nii.gz"}, {"image": "00592.nii.gz"}, {"image": "00593.nii.gz"}, {"image": "00594.nii.gz"}, {"image": "00595.nii.gz"}, {"image": "00596.nii.gz"}, {"image": "00597.nii.gz"}, {"image": "00598.nii.gz"}, {"image": "00599.nii.gz"}, {"image": "00600.nii.gz"}, {"image": "00601.nii.gz"}, {"image": "00602.nii.gz"}, {"image": "00603.nii.gz"}, {"image": "00604.nii.gz"}, {"image": "00606.nii.gz"}, {"image": "00607.nii.gz"}, {"image": "00608.nii.gz"}, {"image": "00610.nii.gz"}, {"image": "00611.nii.gz"}, {"image": "00612.nii.gz"}, {"image": "00613.nii.gz"}, {"image": "00614.nii.gz"}, {"image": "00615.nii.gz"}, {"image": "00616.nii.gz"}, {"image": "00617.nii.gz"}, {"image": "00618.nii.gz"}, {"image": "00619.nii.gz"}, {"image": "00620.nii.gz"}, {"image": "00621.nii.gz"}, {"image": "00622.nii.gz"}, {"image": "00626.nii.gz"}, {"image": "00628.nii.gz"}, {"image": "00629.nii.gz"}, {"image": "00630.nii.gz"}, {"image": "00631.nii.gz"}, {"image": "00632.nii.gz"}, {"image": "00633.nii.gz"}, {"image": "00634.nii.gz"}, {"image": "00635.nii.gz"}, {"image": "00636.nii.gz"}, {"image": "00637.nii.gz"}, {"image": "00638.nii.gz"}, {"image": "00639.nii.gz"}, {"image": "00640.nii.gz"}, {"image": "00641.nii.gz"}, {"image": "00643.nii.gz"}, {"image": "00644.nii.gz"}, {"image": "00645.nii.gz"}, {"image": "00646.nii.gz"}, {"image": "00647.nii.gz"}, {"image": "00648.nii.gz"}, {"image": "00649.nii.gz"}, {"image": "00650.nii.gz"}, {"image": "00651.nii.gz"}, {"image": "00653.nii.gz"}, {"image": "00654.nii.gz"}, {"image": "00655.nii.gz"}, {"image": "00656.nii.gz"}, {"image": "00657.nii.gz"}, {"image": "00659.nii.gz"}, {"image": "00660.nii.gz"}, {"image": "00661.nii.gz"}, {"image": "00662.nii.gz"}, {"image": "00663.nii.gz"}, {"image": "00664.nii.gz"}, {"image": "00665.nii.gz"}, {"image": "00666.nii.gz"}, {"image": "00667.nii.gz"}, {"image": "00668.nii.gz"}, {"image": "00669.nii.gz"}, {"image": "00670.nii.gz"}, {"image": "00671.nii.gz"}, {"image": "00673.nii.gz"}, {"image": "00674.nii.gz"}, {"image": "00675.nii.gz"}, {"image": "00676.nii.gz"}, {"image": "00677.nii.gz"}, {"image": "00678.nii.gz"}, {"image": "00679.nii.gz"}, {"image": "00681.nii.gz"}, {"image": "00683.nii.gz"}, {"image": "00684.nii.gz"}, {"image": "00685.nii.gz"}, {"image": "00686.nii.gz"}, {"image": "00687.nii.gz"}, {"image": "00688.nii.gz"}, {"image": "00689.nii.gz"}, {"image": "00690.nii.gz"}, {"image": "00692.nii.gz"}, {"image": "00693.nii.gz"}, {"image": "00694.nii.gz"}, {"image": "00695.nii.gz"}, {"image": "00696.nii.gz"}, {"image": "00697.nii.gz"}, {"image": "00698.nii.gz"}, {"image": "00699.nii.gz"}, {"image": "00700.nii.gz"}, {"image": "00701.nii.gz"}, {"image": "00702.nii.gz"}, {"image": "00703.nii.gz"}, {"image": "00704.nii.gz"}, {"image": "00705.nii.gz"}, {"image": "00706.nii.gz"}, {"image": "00707.nii.gz"}, {"image": "00708.nii.gz"}, {"image": "00709.nii.gz"}, {"image": "00710.nii.gz"}, {"image": "00711.nii.gz"}, {"image": "00712.nii.gz"}, {"image": "00713.nii.gz"}, {"image": "00714.nii.gz"}, {"image": "00716.nii.gz"}, {"image": "00717.nii.gz"}, {"image": "00718.nii.gz"}, {"image": "00719.nii.gz"}, {"image": "00720.nii.gz"}, {"image": "00722.nii.gz"}, {"image": "00723.nii.gz"}, {"image": "00724.nii.gz"}, {"image": "00725.nii.gz"}, {"image": "00726.nii.gz"}, {"image": "00727.nii.gz"}, {"image": "00728.nii.gz"}, {"image": "00731.nii.gz"}, {"image": "00732.nii.gz"}, {"image": "00733.nii.gz"}, {"image": "00734.nii.gz"}, {"image": "00735.nii.gz"}, {"image": "00736.nii.gz"}, {"image": "00737.nii.gz"}, {"image": "00738.nii.gz"}, {"image": "00739.nii.gz"}, {"image": "00741.nii.gz"}, {"image": "00743.nii.gz"}, {"image": "00744.nii.gz"}, {"image": "00745.nii.gz"}, {"image": "00746.nii.gz"}, {"image": "00747.nii.gz"}, {"image": "00748.nii.gz"}, {"image": "00749.nii.gz"}, {"image": "00750.nii.gz"}, {"image": "00751.nii.gz"}, {"image": "00752.nii.gz"}, {"image": "00755.nii.gz"}, {"image": "00757.nii.gz"}, {"image": "00758.nii.gz"}, {"image": "00760.nii.gz"}, {"image": "00762.nii.gz"}, {"image": "00763.nii.gz"}, {"image": "00764.nii.gz"}, {"image": "00767.nii.gz"}, {"image": "00768.nii.gz"}, {"image": "00769.nii.gz"}, {"image": "00770.nii.gz"}, {"image": "00771.nii.gz"}, {"image": "00772.nii.gz"}, {"image": "00774.nii.gz"}, {"image": "00775.nii.gz"}, {"image": "00777.nii.gz"}, {"image": "00779.nii.gz"}, {"image": "00780.nii.gz"}, {"image": "00781.nii.gz"}, {"image": "00782.nii.gz"}, {"image": "00783.nii.gz"}, {"image": "00784.nii.gz"}, {"image": "00786.nii.gz"}, {"image": "00787.nii.gz"}, {"image": "00788.nii.gz"}, {"image": "00789.nii.gz"}, {"image": "00790.nii.gz"}, {"image": "00791.nii.gz"}, {"image": "00792.nii.gz"}, {"image": "00793.nii.gz"}, {"image": "00794.nii.gz"}, {"image": "00795.nii.gz"}, {"image": "00796.nii.gz"}, {"image": "00797.nii.gz"}, {"image": "00798.nii.gz"}, {"image": "00799.nii.gz"}, {"image": "00800.nii.gz"}, {"image": "00801.nii.gz"}, {"image": "00804.nii.gz"}, {"image": "00805.nii.gz"}, {"image": "00807.nii.gz"}, {"image": "00808.nii.gz"}, {"image": "00809.nii.gz"}, {"image": "00811.nii.gz"}, {"image": "00813.nii.gz"}, {"image": "00814.nii.gz"}, {"image": "00815.nii.gz"}, {"image": "00816.nii.gz"}, {"image": "00817.nii.gz"}, {"image": "00818.nii.gz"}, {"image": "00819.nii.gz"}, {"image": "00820.nii.gz"}, {"image": "00821.nii.gz"}, {"image": "00822.nii.gz"}, {"image": "00828.nii.gz"}, {"image": "00829.nii.gz"}, {"image": "00830.nii.gz"}, {"image": "00831.nii.gz"}, {"image": "00832.nii.gz"}, {"image": "00833.nii.gz"}, {"image": "00835.nii.gz"}, {"image": "00838.nii.gz"}, {"image": "00839.nii.gz"}, {"image": "00840.nii.gz"}, {"image": "00841.nii.gz"}, {"image": "00842.nii.gz"}, {"image": "00844.nii.gz"}, {"image": "00845.nii.gz"}, {"image": "00846.nii.gz"}, {"image": "00848.nii.gz"}, {"image": "00849.nii.gz"}, {"image": "00850.nii.gz"}, {"image": "00851.nii.gz"}, {"image": "00852.nii.gz"}, {"image": "00853.nii.gz"}, {"image": "00854.nii.gz"}, {"image": "00855.nii.gz"}, {"image": "00857.nii.gz"}, {"image": "00858.nii.gz"}, {"image": "00859.nii.gz"}, {"image": "00860.nii.gz"}, {"image": "00861.nii.gz"}, {"image": "00862.nii.gz"}, {"image": "00863.nii.gz"}, {"image": "00864.nii.gz"}, {"image": "00865.nii.gz"}, {"image": "00867.nii.gz"}, {"image": "00868.nii.gz"}, {"image": "00869.nii.gz"}, {"image": "00870.nii.gz"}, {"image": "00871.nii.gz"}, {"image": "00872.nii.gz"}, {"image": "00875.nii.gz"}, {"image": "00876.nii.gz"}, {"image": "00877.nii.gz"}, {"image": "00879.nii.gz"}, {"image": "00880.nii.gz"}, {"image": "00881.nii.gz"}, {"image": "00882.nii.gz"}, {"image": "00883.nii.gz"}, {"image": "00884.nii.gz"}, {"image": "00885.nii.gz"}, {"image": "00886.nii.gz"}, {"image": "00887.nii.gz"}, {"image": "00888.nii.gz"}, {"image": "00889.nii.gz"}, {"image": "00890.nii.gz"}, {"image": "00891.nii.gz"}, {"image": "00892.nii.gz"}, {"image": "00893.nii.gz"}, {"image": "00894.nii.gz"}, {"image": "00895.nii.gz"}, {"image": "00896.nii.gz"}, {"image": "00897.nii.gz"}, {"image": "00898.nii.gz"}, {"image": "00899.nii.gz"}, {"image": "00900.nii.gz"}, {"image": "00901.nii.gz"}, {"image": "00902.nii.gz"}, {"image": "00903.nii.gz"}, {"image": "00905.nii.gz"}, {"image": "00906.nii.gz"}, {"image": "00907.nii.gz"}, {"image": "00908.nii.gz"}, {"image": "00909.nii.gz"}, {"image": "00910.nii.gz"}, {"image": "00911.nii.gz"}, {"image": "00913.nii.gz"}, {"image": "00914.nii.gz"}, {"image": "00916.nii.gz"}, {"image": "00917.nii.gz"}, {"image": "00918.nii.gz"}, {"image": "00919.nii.gz"}, {"image": "00920.nii.gz"}, {"image": "00921.nii.gz"}, {"image": "00922.nii.gz"}, {"image": "00923.nii.gz"}, {"image": "00924.nii.gz"}, {"image": "00925.nii.gz"}, {"image": "00926.nii.gz"}, {"image": "00927.nii.gz"}, {"image": "00928.nii.gz"}, {"image": "00929.nii.gz"}, {"image": "00930.nii.gz"}, {"image": "00931.nii.gz"}, {"image": "00932.nii.gz"}, {"image": "00933.nii.gz"}, {"image": "00934.nii.gz"}, {"image": "00935.nii.gz"}, {"image": "00936.nii.gz"}, {"image": "00937.nii.gz"}, {"image": "00938.nii.gz"}, {"image": "00939.nii.gz"}, {"image": "00940.nii.gz"}, {"image": "00941.nii.gz"}, {"image": "00942.nii.gz"}, {"image": "00943.nii.gz"}, {"image": "00944.nii.gz"}, {"image": "00945.nii.gz"}, {"image": "00946.nii.gz"}, {"image": "00947.nii.gz"}, {"image": "00948.nii.gz"}, {"image": "00949.nii.gz"}, {"image": "00950.nii.gz"}, {"image": "00951.nii.gz"}, {"image": "00952.nii.gz"}, {"image": "00954.nii.gz"}, {"image": "00955.nii.gz"}, {"image": "00956.nii.gz"}, {"image": "00957.nii.gz"}, {"image": "00958.nii.gz"}, {"image": "00959.nii.gz"}, {"image": "00960.nii.gz"}, {"image": "00961.nii.gz"}, {"image": "00962.nii.gz"}, {"image": "00963.nii.gz"}, {"image": "00964.nii.gz"}, {"image": "00965.nii.gz"}, {"image": "00966.nii.gz"}, {"image": "00967.nii.gz"}, {"image": "00968.nii.gz"}, {"image": "00969.nii.gz"}, {"image": "00970.nii.gz"}, {"image": "00971.nii.gz"}, {"image": "00972.nii.gz"}, {"image": "00973.nii.gz"}, {"image": "00974.nii.gz"}, {"image": "00975.nii.gz"}, {"image": "00976.nii.gz"}, {"image": "00977.nii.gz"}, {"image": "00978.nii.gz"}, {"image": "00979.nii.gz"}, {"image": "00981.nii.gz"}, {"image": "00982.nii.gz"}, {"image": "00983.nii.gz"}, {"image": "00984.nii.gz"}, {"image": "00985.nii.gz"}, {"image": "00986.nii.gz"}, {"image": "00987.nii.gz"}, {"image": "00988.nii.gz"}, {"image": "00989.nii.gz"}, {"image": "00990.nii.gz"}, {"image": "00991.nii.gz"}, {"image": "00992.nii.gz"}, {"image": "00993.nii.gz"}, {"image": "00994.nii.gz"}, {"image": "00995.nii.gz"}, {"image": "00997.nii.gz"}, {"image": "00998.nii.gz"}, {"image": "00999.nii.gz"}, {"image": "01000.nii.gz"}, {"image": "01001.nii.gz"}, {"image": "01002.nii.gz"}, {"image": "01003.nii.gz"}, {"image": "01004.nii.gz"}, {"image": "01005.nii.gz"}, {"image": "01006.nii.gz"}, {"image": "01007.nii.gz"}, {"image": "01008.nii.gz"}, {"image": "01009.nii.gz"}, {"image": "01010.nii.gz"}, {"image": "01011.nii.gz"}, {"image": "01012.nii.gz"}, {"image": "01013.nii.gz"}, {"image": "01014.nii.gz"}, {"image": "01015.nii.gz"}, {"image": "01016.nii.gz"}, {"image": "01017.nii.gz"}, {"image": "01019.nii.gz"}, {"image": "01020.nii.gz"}, {"image": "01021.nii.gz"}, {"image": "01022.nii.gz"}, {"image": "01023.nii.gz"}, {"image": "01024.nii.gz"}, {"image": "01025.nii.gz"}, {"image": "01026.nii.gz"}, {"image": "01027.nii.gz"}, {"image": "01028.nii.gz"}, {"image": "01030.nii.gz"}, {"image": "01031.nii.gz"}, {"image": "01032.nii.gz"}, {"image": "01033.nii.gz"}, {"image": "01034.nii.gz"}, {"image": "01035.nii.gz"}, {"image": "01036.nii.gz"}, {"image": "01037.nii.gz"}, {"image": "01038.nii.gz"}, {"image": "01040.nii.gz"}, {"image": "01041.nii.gz"}, {"image": "01042.nii.gz"}, {"image": "01043.nii.gz"}, {"image": "01044.nii.gz"}, {"image": "01045.nii.gz"}, {"image": "01046.nii.gz"}, {"image": "01047.nii.gz"}, {"image": "01048.nii.gz"}, {"image": "01049.nii.gz"}, {"image": "01051.nii.gz"}, {"image": "01052.nii.gz"}, {"image": "01054.nii.gz"}, {"image": "01055.nii.gz"}, {"image": "01056.nii.gz"}, {"image": "01057.nii.gz"}, {"image": "01058.nii.gz"}, {"image": "01059.nii.gz"}, {"image": "01060.nii.gz"}, {"image": "01061.nii.gz"}, {"image": "01062.nii.gz"}, {"image": "01063.nii.gz"}, {"image": "01064.nii.gz"}, {"image": "01065.nii.gz"}, {"image": "01066.nii.gz"}, {"image": "01067.nii.gz"}, {"image": "01068.nii.gz"}, {"image": "01069.nii.gz"}, {"image": "01070.nii.gz"}, {"image": "01071.nii.gz"}, {"image": "01072.nii.gz"}, {"image": "01073.nii.gz"}, {"image": "01075.nii.gz"}, {"image": "01077.nii.gz"}, {"image": "01078.nii.gz"}, {"image": "01080.nii.gz"}, {"image": "01081.nii.gz"}, {"image": "01082.nii.gz"}, {"image": "01083.nii.gz"}, {"image": "01084.nii.gz"}, {"image": "01086.nii.gz"}, {"image": "01087.nii.gz"}, {"image": "01088.nii.gz"}, {"image": "01090.nii.gz"}, {"image": "01091.nii.gz"}, {"image": "01092.nii.gz"}, {"image": "01093.nii.gz"}, {"image": "01094.nii.gz"}, {"image": "01095.nii.gz"}, {"image": "01096.nii.gz"}, {"image": "01097.nii.gz"}, {"image": "01098.nii.gz"}, {"image": "01099.nii.gz"}, {"image": "01100.nii.gz"}, {"image": "01101.nii.gz"}, {"image": "01102.nii.gz"}, {"image": "01103.nii.gz"}, {"image": "01105.nii.gz"}, {"image": "01106.nii.gz"}, {"image": "01107.nii.gz"}, {"image": "01108.nii.gz"}, {"image": "01109.nii.gz"}, {"image": "01110.nii.gz"}, {"image": "01111.nii.gz"}, {"image": "01112.nii.gz"}, {"image": "01113.nii.gz"}, {"image": "01114.nii.gz"}, {"image": "01115.nii.gz"}, {"image": "01116.nii.gz"}, {"image": "01117.nii.gz"}, {"image": "01118.nii.gz"}, {"image": "01119.nii.gz"}, {"image": "01120.nii.gz"}, {"image": "01121.nii.gz"}, {"image": "01122.nii.gz"}, {"image": "01123.nii.gz"}, {"image": "01124.nii.gz"}, {"image": "01125.nii.gz"}, {"image": "01127.nii.gz"}, {"image": "01128.nii.gz"}, {"image": "01129.nii.gz"}, {"image": "01130.nii.gz"}, {"image": "01131.nii.gz"}, {"image": "01132.nii.gz"}, {"image": "01133.nii.gz"}, {"image": "01134.nii.gz"}, {"image": "01135.nii.gz"}, {"image": "01136.nii.gz"}, {"image": "01137.nii.gz"}, {"image": "01138.nii.gz"}, {"image": "01139.nii.gz"}, {"image": "01140.nii.gz"}, {"image": "01141.nii.gz"}, {"image": "01142.nii.gz"}, {"image": "01143.nii.gz"}, {"image": "01144.nii.gz"}, {"image": "01146.nii.gz"}, {"image": "01147.nii.gz"}, {"image": "01148.nii.gz"}, {"image": "01149.nii.gz"}, {"image": "01150.nii.gz"}, {"image": "01151.nii.gz"}, {"image": "01152.nii.gz"}, {"image": "01153.nii.gz"}, {"image": "01155.nii.gz"}, {"image": "01156.nii.gz"}, {"image": "01157.nii.gz"}, {"image": "01158.nii.gz"}, {"image": "01159.nii.gz"}, {"image": "01161.nii.gz"}, {"image": "01162.nii.gz"}, {"image": "01163.nii.gz"}, {"image": "01164.nii.gz"}, {"image": "01166.nii.gz"}, {"image": "01167.nii.gz"}, {"image": "01170.nii.gz"}, {"image": "01171.nii.gz"}, {"image": "01173.nii.gz"}, {"image": "01175.nii.gz"}, {"image": "01176.nii.gz"}, {"image": "01177.nii.gz"}, {"image": "01178.nii.gz"}, {"image": "01179.nii.gz"}, {"image": "01180.nii.gz"}, {"image": "01181.nii.gz"}, {"image": "01182.nii.gz"}, {"image": "01183.nii.gz"}, {"image": "01184.nii.gz"}, {"image": "01186.nii.gz"}, {"image": "01187.nii.gz"}, {"image": "01188.nii.gz"}, {"image": "01189.nii.gz"}, {"image": "01190.nii.gz"}, {"image": "01192.nii.gz"}, {"image": "01193.nii.gz"}, {"image": "01194.nii.gz"}, {"image": "01195.nii.gz"}, {"image": "01196.nii.gz"}, {"image": "01197.nii.gz"}, {"image": "01198.nii.gz"}, {"image": "01201.nii.gz"}, {"image": "01202.nii.gz"}, {"image": "01203.nii.gz"}, {"image": "01204.nii.gz"}, {"image": "01206.nii.gz"}, {"image": "01207.nii.gz"}, {"image": "01208.nii.gz"}, {"image": "01209.nii.gz"}, {"image": "01210.nii.gz"}, {"image": "01212.nii.gz"}, {"image": "01213.nii.gz"}, {"image": "01214.nii.gz"}, {"image": "01216.nii.gz"}, {"image": "01217.nii.gz"}, {"image": "01218.nii.gz"}, {"image": "01219.nii.gz"}, {"image": "01220.nii.gz"}, {"image": "01222.nii.gz"}, {"image": "01223.nii.gz"}], "createDate": "2023-07-08_15-38-30"}
================================================
FILE: jsons/Totalsegmentator_dataset.json
================================================
{"training": [{"image": "s1405/ct.nii.gz"}, {"image": "s1404/ct.nii.gz"}, {"image": "s1403/ct.nii.gz"}, {"image": "s1401/ct.nii.gz"}, {"image": "s1400/ct.nii.gz"}, {"image": "s1399/ct.nii.gz"}, {"image": "s1397/ct.nii.gz"}, {"image": "s1395/ct.nii.gz"}, {"image": "s1394/ct.nii.gz"}, {"image": "s1390/ct.nii.gz"}, {"image": "s1385/ct.nii.gz"}, {"image": "s1374/ct.nii.gz"}, {"image": "s1368/ct.nii.gz"}, {"image": "s1364/ct.nii.gz"}, {"image": "s1355/ct.nii.gz"}, {"image": "s1354/ct.nii.gz"}, {"image": "s1353/ct.nii.gz"}, {"image": "s1352/ct.nii.gz"}, {"image": "s1350/ct.nii.gz"}, {"image": "s1349/ct.nii.gz"}, {"image": "s1348/ct.nii.gz"}, {"image": "s1347/ct.nii.gz"}, {"image": "s1346/ct.nii.gz"}, {"image": "s1345/ct.nii.gz"}, {"image": "s1344/ct.nii.gz"}, {"image": "s1343/ct.nii.gz"}, {"image": "s1342/ct.nii.gz"}, {"image": "s1341/ct.nii.gz"}, {"image": "s1340/ct.nii.gz"}, {"image": "s1339/ct.nii.gz"}, {"image": "s1338/ct.nii.gz"}, {"image": "s1337/ct.nii.gz"}, {"image": "s1336/ct.nii.gz"}, {"image": "s1335/ct.nii.gz"}, {"image": "s1334/ct.nii.gz"}, {"image": "s1333/ct.nii.gz"}, {"image": "s1332/ct.nii.gz"}, {"image": "s1331/ct.nii.gz"}, {"image": "s1330/ct.nii.gz"}, {"image": "s1329/ct.nii.gz"}, {"image": "s1328/ct.nii.gz"}, {"image": "s1327/ct.nii.gz"}, {"image": "s1326/ct.nii.gz"}, {"image": "s1325/ct.nii.gz"}, {"image": "s1323/ct.nii.gz"}, {"image": "s1322/ct.nii.gz"}, {"image": "s1321/ct.nii.gz"}, {"image": "s1319/ct.nii.gz"}, {"image": "s1318/ct.nii.gz"}, {"image": "s1317/ct.nii.gz"}, {"image": "s1316/ct.nii.gz"}, {"image": "s1315/ct.nii.gz"}, {"image": "s1314/ct.nii.gz"}, {"image": "s1312/ct.nii.gz"}, {"image": "s1311/ct.nii.gz"}, {"image": "s1310/ct.nii.gz"}, {"image": "s1309/ct.nii.gz"}, {"image": "s1308/ct.nii.gz"}, {"image": "s1307/ct.nii.gz"}, {"image": "s1305/ct.nii.gz"}, {"image": "s1304/ct.nii.gz"}, {"image": "s1303/ct.nii.gz"}, {"image": "s1301/ct.nii.gz"}, {"image": "s1300/ct.nii.gz"}, {"image": "s1299/ct.nii.gz"}, {"image": "s1298/ct.nii.gz"}, {"image": "s1297/ct.nii.gz"}, {"image": "s1296/ct.nii.gz"}, {"image": "s1295/ct.nii.gz"}, {"image": "s1294/ct.nii.gz"}, {"image": "s1293/ct.nii.gz"}, {"image": "s1292/ct.nii.gz"}, {"image": "s1291/ct.nii.gz"}, {"image": "s1290/ct.nii.gz"}, {"image": "s1289/ct.nii.gz"}, {"image": "s1288/ct.nii.gz"}, {"image": "s1287/ct.nii.gz"}, {"image": "s1286/ct.nii.gz"}, {"image": "s1285/ct.nii.gz"}, {"image": "s1283/ct.nii.gz"}, {"image": "s1281/ct.nii.gz"}, {"image": "s1280/ct.nii.gz"}, {"image": "s1279/ct.nii.gz"}, {"image": "s1278/ct.nii.gz"}, {"image": "s1277/ct.nii.gz"}, {"image": "s1276/ct.nii.gz"}, {"image": "s1275/ct.nii.gz"}, {"image": "s1274/ct.nii.gz"}, {"image": "s1273/ct.nii.gz"}, {"image": "s1272/ct.nii.gz"}, {"image": "s1271/ct.nii.gz"}, {"image": "s1270/ct.nii.gz"}, {"image": "s1269/ct.nii.gz"}, {"image": "s1268/ct.nii.gz"}, {"image": "s1267/ct.nii.gz"}, {"image": "s1264/ct.nii.gz"}, {"image": "s1262/ct.nii.gz"}, {"image": "s1261/ct.nii.gz"}, {"image": "s1260/ct.nii.gz"}, {"image": "s1259/ct.nii.gz"}, {"image": "s1258/ct.nii.gz"}, {"image": "s1257/ct.nii.gz"}, {"image": "s1256/ct.nii.gz"}, {"image": "s1255/ct.nii.gz"}, {"image": "s1254/ct.nii.gz"}, {"image": "s1252/ct.nii.gz"}, {"image": "s1251/ct.nii.gz"}, {"image": "s1250/ct.nii.gz"}, {"image": "s1249/ct.nii.gz"}, {"image": "s1248/ct.nii.gz"}, {"image": "s1247/ct.nii.gz"}, {"image": "s1246/ct.nii.gz"}, {"image": "s1245/ct.nii.gz"}, {"image": "s1244/ct.nii.gz"}, {"image": "s1243/ct.nii.gz"}, {"image": "s1242/ct.nii.gz"}, {"image": "s1241/ct.nii.gz"}, {"image": "s1240/ct.nii.gz"}, {"image": "s1239/ct.nii.gz"}, {"image": "s1238/ct.nii.gz"}, {"image": "s1237/ct.nii.gz"}, {"image": "s1236/ct.nii.gz"}, {"image": "s1235/ct.nii.gz"}, {"image": "s1234/ct.nii.gz"}, {"image": "s1228/ct.nii.gz"}, {"image": "s1215/ct.nii.gz"}, {"image": "s1210/ct.nii.gz"}, {"image": "s1206/ct.nii.gz"}, {"image": "s1197/ct.nii.gz"}, {"image": "s1191/ct.nii.gz"}, {"image": "s1190/ct.nii.gz"}, {"image": "s1189/ct.nii.gz"}, {"image": "s1187/ct.nii.gz"}, {"image": "s1185/ct.nii.gz"}, {"image": "s1184/ct.nii.gz"}, {"image": "s1183/ct.nii.gz"}, {"image": "s1182/ct.nii.gz"}, {"image": "s1179/ct.nii.gz"}, {"image": "s1178/ct.nii.gz"}, {"image": "s1177/ct.nii.gz"}, {"image": "s1176/ct.nii.gz"}, {"image": "s1175/ct.nii.gz"}, {"image": "s1174/ct.nii.gz"}, {"image": "s1173/ct.nii.gz"}, {"image": "s1172/ct.nii.gz"}, {"image": "s1171/ct.nii.gz"}, {"image": "s1170/ct.nii.gz"}, {"image": "s1169/ct.nii.gz"}, {"image": "s1168/ct.nii.gz"}, {"image": "s1167/ct.nii.gz"}, {"image": "s1165/ct.nii.gz"}, {"image": "s1164/ct.nii.gz"}, {"image": "s1163/ct.nii.gz"}, {"image": "s1162/ct.nii.gz"}, {"image": "s1161/ct.nii.gz"}, {"image": "s1159/ct.nii.gz"}, {"image": "s1158/ct.nii.gz"}, {"image": "s1157/ct.nii.gz"}, {"image": "s1156/ct.nii.gz"}, {"image": "s1155/ct.nii.gz"}, {"image": "s1154/ct.nii.gz"}, {"image": "s1153/ct.nii.gz"}, {"image": "s1152/ct.nii.gz"}, {"image": "s1151/ct.nii.gz"}, {"image": "s1150/ct.nii.gz"}, {"image": "s1149/ct.nii.gz"}, {"image": "s1148/ct.nii.gz"}, {"image": "s1147/ct.nii.gz"}, {"image": "s1146/ct.nii.gz"}, {"image": "s1145/ct.nii.gz"}, {"image": "s1144/ct.nii.gz"}, {"image": "s1143/ct.nii.gz"}, {"image": "s1142/ct.nii.gz"}, {"image": "s1141/ct.nii.gz"}, {"image": "s1140/ct.nii.gz"}, {"image": "s1138/ct.nii.gz"}, {"image": "s1137/ct.nii.gz"}, {"image": "s1136/ct.nii.gz"}, {"image": "s1135/ct.nii.gz"}, {"image": "s1134/ct.nii.gz"}, {"image": "s1133/ct.nii.gz"}, {"image": "s1132/ct.nii.gz"}, {"image": "s1131/ct.nii.gz"}, {"image": "s1130/ct.nii.gz"}, {"image": "s1129/ct.nii.gz"}, {"image": "s1128/ct.nii.gz"}, {"image": "s1127/ct.nii.gz"}, {"image": "s1125/ct.nii.gz"}, {"image": "s1124/ct.nii.gz"}, {"image": "s1123/ct.nii.gz"}, {"image": "s1122/ct.nii.gz"}, {"image": "s1120/ct.nii.gz"}, {"image": "s1112/ct.nii.gz"}, {"image": "s1110/ct.nii.gz"}, {"image": "s1099/ct.nii.gz"}, {"image": "s1097/ct.nii.gz"}, {"image": "s1093/ct.nii.gz"}, {"image": "s1086/ct.nii.gz"}, {"image": "s1070/ct.nii.gz"}, {"image": "s1069/ct.nii.gz"}, {"image": "s1067/ct.nii.gz"}, {"image": "s1065/ct.nii.gz"}, {"image": "s1063/ct.nii.gz"}, {"image": "s1060/ct.nii.gz"}, {"image": "s1056/ct.nii.gz"}, {"image": "s1049/ct.nii.gz"}, {"image": "s1048/ct.nii.gz"}, {"image": "s1040/ct.nii.gz"}, {"image": "s1022/ct.nii.gz"}, {"image": "s1017/ct.nii.gz"}, {"image": "s1015/ct.nii.gz"}, {"image": "s1010/ct.nii.gz"}, {"image": "s1005/ct.nii.gz"}, {"image": "s0994/ct.nii.gz"}, {"image": "s0980/ct.nii.gz"}, {"image": "s0972/ct.nii.gz"}, {"image": "s0962/ct.nii.gz"}, {"image": "s0956/ct.nii.gz"}, {"image": "s0955/ct.nii.gz"}, {"image": "s0949/ct.nii.gz"}, {"image": "s0945/ct.nii.gz"}, {"image": "s0923/ct.nii.gz"}, {"image": "s0913/ct.nii.gz"}, {"image": "s0908/ct.nii.gz"}, {"image": "s0907/ct.nii.gz"}, {"image": "s0891/ct.nii.gz"}, {"image": "s0880/ct.nii.gz"}, {"image": "s0870/ct.nii.gz"}, {"image": "s0868/ct.nii.gz"}, {"image": "s0866/ct.nii.gz"}, {"image": "s0865/ct.nii.gz"}, {"image": "s0862/ct.nii.gz"}, {"image": "s0860/ct.nii.gz"}, {"image": "s0852/ct.nii.gz"}, {"image": "s0851/ct.nii.gz"}, {"image": "s0818/ct.nii.gz"}, {"image": "s0813/ct.nii.gz"}, {"image": "s0809/ct.nii.gz"}, {"image": "s0789/ct.nii.gz"}, {"image": "s0788/ct.nii.gz"}, {"image": "s0784/ct.nii.gz"}, {"image": "s0782/ct.nii.gz"}, {"image": "s0777/ct.nii.gz"}, {"image": "s0773/ct.nii.gz"}, {"image": "s0768/ct.nii.gz"}, {"image": "s0766/ct.nii.gz"}, {"image": "s0746/ct.nii.gz"}, {"image": "s0733/ct.nii.gz"}, {"image": "s0718/ct.nii.gz"}, {"image": "s0714/ct.nii.gz"}, {"image": "s0710/ct.nii.gz"}, {"image": "s0708/ct.nii.gz"}, {"image": "s0682/ct.nii.gz"}, {"image": "s0681/ct.nii.gz"}, {"image": "s0674/ct.nii.gz"}, {"image": "s0672/ct.nii.gz"}, {"image": "s0666/ct.nii.gz"}, {"image": "s0660/ct.nii.gz"}, {"image": "s0655/ct.nii.gz"}, {"image": "s0650/ct.nii.gz"}, {"image": "s0622/ct.nii.gz"}, {"image": "s0618/ct.nii.gz"}, {"image": "s0612/ct.nii.gz"}, {"image": "s0611/ct.nii.gz"}, {"image": "s0600/ct.nii.gz"}, {"image": "s0593/ct.nii.gz"}, {"image": "s0586/ct.nii.gz"}, {"image": "s0581/ct.nii.gz"}, {"image": "s0579/ct.nii.gz"}, {"image": "s0576/ct.nii.gz"}, {"image": "s0568/ct.nii.gz"}, {"image": "s0560/ct.nii.gz"}, {"image": "s0535/ct.nii.gz"}, {"image": "s0511/ct.nii.gz"}, {"image": "s0502/ct.nii.gz"}, {"image": "s0499/ct.nii.gz"}, {"image": "s0497/ct.nii.gz"}, {"image": "s0493/ct.nii.gz"}, {"image": "s0492/ct.nii.gz"}, {"image": "s0486/ct.nii.gz"}, {"image": "s0475/ct.nii.gz"}, {"image": "s0472/ct.nii.gz"}, {"image": "s0461/ct.nii.gz"}, {"image": "s0454/ct.nii.gz"}, {"image": "s0449/ct.nii.gz"}, {"image": "s0445/ct.nii.gz"}, {"image": "s0438/ct.nii.gz"}, {"image": "s0429/ct.nii.gz"}, {"image": "s0428/ct.nii.gz"}, {"image": "s0420/ct.nii.gz"}, {"image": "s0419/ct.nii.gz"}, {"image": "s0414/ct.nii.gz"}, {"image": "s0406/ct.nii.gz"}, {"image": "s0393/ct.nii.gz"}, {"image": "s0390/ct.nii.gz"}, {"image": "s0385/ct.nii.gz"}, {"image": "s0377/ct.nii.gz"}, {"image": "s0375/ct.nii.gz"}, {"image": "s0371/ct.nii.gz"}, {"image": "s0369/ct.nii.gz"}, {"image": "s0342/ct.nii.gz"}, {"image": "s0338/ct.nii.gz"}, {"image": "s0334/ct.nii.gz"}, {"image": "s0331/ct.nii.gz"}, {"image": "s0319/ct.nii.gz"}, {"image": "s0315/ct.nii.gz"}, {"image": "s0306/ct.nii.gz"}, {"image": "s0294/ct.nii.gz"}, {"image": "s0283/ct.nii.gz"}, {"image": "s0279/ct.nii.gz"}, {"image": "s0275/ct.nii.gz"}, {"image": "s0266/ct.nii.gz"}, {"image": "s0261/ct.nii.gz"}, {"image": "s0260/ct.nii.gz"}, {"image": "s0257/ct.nii.gz"}, {"image": "s0247/ct.nii.gz"}, {"image": "s0230/ct.nii.gz"}, {"image": "s0229/ct.nii.gz"}, {"image": "s0224/ct.nii.gz"}, {"image": "s0222/ct.nii.gz"}, {"image": "s0208/ct.nii.gz"}, {"image": "s0193/ct.nii.gz"}, {"image": "s0188/ct.nii.gz"}, {"image": "s0172/ct.nii.gz"}, {"image": "s0170/ct.nii.gz"}, {"image": "s0162/ct.nii.gz"}, {"image": "s0150/ct.nii.gz"}, {"image": "s0147/ct.nii.gz"}, {"image": "s0141/ct.nii.gz"}, {"image": "s0131/ct.nii.gz"}, {"image": "s0129/ct.nii.gz"}, {"image": "s0124/ct.nii.gz"}, {"image": "s0122/ct.nii.gz"}, {"image": "s0112/ct.nii.gz"}, {"image": "s0111/ct.nii.gz"}, {"image": "s0105/ct.nii.gz"}, {"image": "s0100/ct.nii.gz"}, {"image": "s0088/ct.nii.gz"}, {"image": "s0084/ct.nii.gz"}, {"image": "s0083/ct.nii.gz"}, {"image": "s0065/ct.nii.gz"}, {"image": "s0059/ct.nii.gz"}, {"image": "s0056/ct.nii.gz"}, {"image": "s0045/ct.nii.gz"}, {"image": "s0043/ct.nii.gz"}, {"image": "s0042/ct.nii.gz"}, {"image": "s0028/ct.nii.gz"}, {"image": "s0024/ct.nii.gz"}, {"image": "s0022/ct.nii.gz"}, {"image": "s0000/ct.nii.gz"}, {"image": "s1391/ct.nii.gz"}, {"image": "s1389/ct.nii.gz"}, {"image": "s1388/ct.nii.gz"}, {"image": "s1387/ct.nii.gz"}, {"image": "s1386/ct.nii.gz"}, {"image": "s1384/ct.nii.gz"}, {"image": "s1383/ct.nii.gz"}, {"image": "s1382/ct.nii.gz"}, {"image": "s1380/ct.nii.gz"}, {"image": "s1379/ct.nii.gz"}, {"image": "s1378/ct.nii.gz"}, {"image": "s1377/ct.nii.gz"}, {"image": "s1375/ct.nii.gz"}, {"image": "s1373/ct.nii.gz"}, {"image": "s1372/ct.nii.gz"}, {"image": "s1371/ct.nii.gz"}, {"image": "s1370/ct.nii.gz"}, {"image": "s1369/ct.nii.gz"}, {"image": "s1367/ct.nii.gz"}, {"image": "s1366/ct.nii.gz"}, {"image": "s1365/ct.nii.gz"}, {"image": "s1363/ct.nii.gz"}, {"image": "s1362/ct.nii.gz"}, {"image": "s1361/ct.nii.gz"}, {"image": "s1359/ct.nii.gz"}, {"image": "s1358/ct.nii.gz"}, {"image": "s1357/ct.nii.gz"}, {"image": "s1233/ct.nii.gz"}, {"image": "s1231/ct.nii.gz"}, {"image": "s1230/ct.nii.gz"}, {"image": "s1227/ct.nii.gz"}, {"image": "s1226/ct.nii.gz"}, {"image": "s1225/ct.nii.gz"}, {"image": "s1224/ct.nii.gz"}, {"image": "s1223/ct.nii.gz"}, {"image": "s1222/ct.nii.gz"}, {"image": "s1221/ct.nii.gz"}, {"image": "s1220/ct.nii.gz"}, {"image": "s1218/ct.nii.gz"}, {"image": "s1216/ct.nii.gz"}, {"image": "s1212/ct.nii.gz"}, {"image": "s1209/ct.nii.gz"}, {"image": "s1208/ct.nii.gz"}, {"image": "s1207/ct.nii.gz"}, {"image": "s1205/ct.nii.gz"}, {"image": "s1203/ct.nii.gz"}, {"image": "s1201/ct.nii.gz"}, {"image": "s1199/ct.nii.gz"}, {"image": "s1196/ct.nii.gz"}, {"image": "s1195/ct.nii.gz"}, {"image": "s1194/ct.nii.gz"}, {"image": "s1192/ct.nii.gz"}, {"image": "s1121/ct.nii.gz"}, {"image": "s1119/ct.nii.gz"}, {"image": "s1115/ct.nii.gz"}, {"image": "s1114/ct.nii.gz"}, {"image": "s1113/ct.nii.gz"}, {"image": "s1111/ct.nii.gz"}, {"image": "s1109/ct.nii.gz"}, {"image": "s1107/ct.nii.gz"}, {"image": "s1106/ct.nii.gz"}, {"image": "s1105/ct.nii.gz"}, {"image": "s1104/ct.nii.gz"}, {"image": "s1103/ct.nii.gz"}, {"image": "s1102/ct.nii.gz"}, {"image": "s1101/ct.nii.gz"}, {"image": "s1100/ct.nii.gz"}, {"image": "s1098/ct.nii.gz"}, {"image": "s1096/ct.nii.gz"}, {"image": "s1094/ct.nii.gz"}, {"image": "s1090/ct.nii.gz"}, {"image": "s1089/ct.nii.gz"}, {"image": "s1088/ct.nii.gz"}, {"image": "s1085/ct.nii.gz"}, {"image": "s1084/ct.nii.gz"}, {"image": "s1083/ct.nii.gz"}, {"image": "s1082/ct.nii.gz"}, {"image": "s1079/ct.nii.gz"}, {"image": "s1077/ct.nii.gz"}, {"image": "s1075/ct.nii.gz"}, {"image": "s1073/ct.nii.gz"}, {"image": "s1072/ct.nii.gz"}, {"image": "s1071/ct.nii.gz"}, {"image": "s1068/ct.nii.gz"}, {"image": "s1066/ct.nii.gz"}, {"image": "s1062/ct.nii.gz"}, {"image": "s1061/ct.nii.gz"}, {"image": "s1059/ct.nii.gz"}, {"image": "s1058/ct.nii.gz"}, {"image": "s1057/ct.nii.gz"}, {"image": "s1055/ct.nii.gz"}, {"image": "s1053/ct.nii.gz"}, {"image": "s1052/ct.nii.gz"}, {"image": "s1051/ct.nii.gz"}, {"image": "s1050/ct.nii.gz"}, {"image": "s1047/ct.nii.gz"}, {"image": "s1046/ct.nii.gz"}, {"image": "s1045/ct.nii.gz"}, {"image": "s1044/ct.nii.gz"}, {"image": "s1043/ct.nii.gz"}, {"image": "s1042/ct.nii.gz"}, {"image": "s1041/ct.nii.gz"}, {"image": "s1039/ct.nii.gz"}, {"image": "s1038/ct.nii.gz"}, {"image": "s1037/ct.nii.gz"}, {"image": "s1036/ct.nii.gz"}, {"image": "s1035/ct.nii.gz"}, {"image": "s1034/ct.nii.gz"}, {"image": "s1033/ct.nii.gz"}, {"image": "s1032/ct.nii.gz"}, {"image": "s1031/ct.nii.gz"}, {"image": "s1029/ct.nii.gz"}, {"image": "s1028/ct.nii.gz"}, {"image": "s1026/ct.nii.gz"}, {"image": "s1025/ct.nii.gz"}, {"image": "s1024/ct.nii.gz"}, {"image": "s1023/ct.nii.gz"}, {"image": "s1021/ct.nii.gz"}, {"image": "s1020/ct.nii.gz"}, {"image": "s1018/ct.nii.gz"}, {"image": "s1016/ct.nii.gz"}, {"image": "s1014/ct.nii.gz"}, {"image": "s1013/ct.nii.gz"}, {"image": "s1012/ct.nii.gz"}, {"image": "s1011/ct.nii.gz"}, {"image": "s1009/ct.nii.gz"}, {"image": "s1008/ct.nii.gz"}, {"image": "s1007/ct.nii.gz"}, {"image": "s1006/ct.nii.gz"}, {"image": "s1004/ct.nii.gz"}, {"image": "s1003/ct.nii.gz"}, {"image": "s1002/ct.nii.gz"}, {"image": "s1001/ct.nii.gz"}, {"image": "s1000/ct.nii.gz"}, {"image": "s0999/ct.nii.gz"}, {"image": "s0997/ct.nii.gz"}, {"image": "s0996/ct.nii.gz"}, {"image": "s0995/ct.nii.gz"}, {"image": "s0993/ct.nii.gz"}, {"image": "s0992/ct.nii.gz"}, {"image": "s0991/ct.nii.gz"}, {"image": "s0989/ct.nii.gz"}, {"image": "s0988/ct.nii.gz"}, {"image": "s0987/ct.nii.gz"}, {"image": "s0986/ct.nii.gz"}, {"image": "s0985/ct.nii.gz"}, {"image": "s0984/ct.nii.gz"}, {"image": "s0983/ct.nii.gz"}, {"image": "s0982/ct.nii.gz"}, {"image": "s0981/ct.nii.gz"}, {"image": "s0979/ct.nii.gz"}, {"image": "s0978/ct.nii.gz"}, {"image": "s0977/ct.nii.gz"}, {"image": "s0976/ct.nii.gz"}, {"image": "s0975/ct.nii.gz"}, {"image": "s0974/ct.nii.gz"}, {"image": "s0973/ct.nii.gz"}, {"image": "s0971/ct.nii.gz"}, {"image": "s0970/ct.nii.gz"}, {"image": "s0968/ct.nii.gz"}, {"image": "s0965/ct.nii.gz"}, {"image": "s0963/ct.nii.gz"}, {"image": "s0961/ct.nii.gz"}, {"image": "s0960/ct.nii.gz"}, {"image": "s0959/ct.nii.gz"}, {"image": "s0958/ct.nii.gz"}, {"image": "s0957/ct.nii.gz"}, {"image": "s0954/ct.nii.gz"}, {"image": "s0953/ct.nii.gz"}, {"image": "s0952/ct.nii.gz"}, {"image": "s0951/ct.nii.gz"}, {"image": "s0950/ct.nii.gz"}, {"image": "s0947/ct.nii.gz"}, {"image": "s0946/ct.nii.gz"}, {"image": "s0944/ct.nii.gz"}, {"image": "s0943/ct.nii.gz"}, {"image": "s0941/ct.nii.gz"}, {"image": "s0940/ct.nii.gz"}, {"image": "s0939/ct.nii.gz"}, {"image": "s0938/ct.nii.gz"}, {"image": "s0937/ct.nii.gz"}, {"image": "s0936/ct.nii.gz"}, {"image": "s0935/ct.nii.gz"}, {"image": "s0934/ct.nii.gz"}, {"image": "s0933/ct.nii.gz"}, {"image": "s0931/ct.nii.gz"}, {"image": "s0930/ct.nii.gz"}, {"image": "s0928/ct.nii.gz"}, {"image": "s0927/ct.nii.gz"}, {"image": "s0925/ct.nii.gz"}, {"image": "s0924/ct.nii.gz"}, {"image": "s0922/ct.nii.gz"}, {"image": "s0921/ct.nii.gz"}, {"image": "s0919/ct.nii.gz"}, {"image": "s0918/ct.nii.gz"}, {"image": "s0916/ct.nii.gz"}, {"image": "s0915/ct.nii.gz"}, {"image": "s0914/ct.nii.gz"}, {"image": "s0912/ct.nii.gz"}, {"image": "s0911/ct.nii.gz"}, {"image": "s0910/ct.nii.gz"}, {"image": "s0909/ct.nii.gz"}, {"image": "s0904/ct.nii.gz"}, {"image": "s0903/ct.nii.gz"}, {"image": "s0901/ct.nii.gz"}, {"image": "s0899/ct.nii.gz"}, {"image": "s0898/ct.nii.gz"}, {"image": "s0897/ct.nii.gz"}, {"image": "s0896/ct.nii.gz"}, {"image": "s0895/ct.nii.gz"}, {"image": "s0894/ct.nii.gz"}, {"image": "s0892/ct.nii.gz"}, {"image": "s0890/ct.nii.gz"}, {"image": "s0889/ct.nii.gz"}, {"image": "s0885/ct.nii.gz"}, {"image": "s0884/ct.nii.gz"}, {"image": "s0883/ct.nii.gz"}, {"image": "s0881/ct.nii.gz"}, {"image": "s0879/ct.nii.gz"}, {"image": "s0878/ct.nii.gz"}, {"image": "s0877/ct.nii.gz"}, {"image": "s0876/ct.nii.gz"}, {"image": "s0875/ct.nii.gz"}, {"image": "s0874/ct.nii.gz"}, {"image": "s0873/ct.nii.gz"}, {"image": "s0871/ct.nii.gz"}, {"image": "s0869/ct.nii.gz"}, {"image": "s0867/ct.nii.gz"}, {"image": "s0863/ct.nii.gz"}, {"image": "s0861/ct.nii.gz"}, {"image": "s0859/ct.nii.gz"}, {"image": "s0858/ct.nii.gz"}, {"image": "s0857/ct.nii.gz"}, {"image": "s0855/ct.nii.gz"}, {"image": "s0853/ct.nii.gz"}, {"image": "s0850/ct.nii.gz"}, {"image": "s0849/ct.nii.gz"}, {"image": "s0848/ct.nii.gz"}, {"image": "s0847/ct.nii.gz"}, {"image": "s0846/ct.nii.gz"}, {"image": "s0845/ct.nii.gz"}, {"image": "s0844/ct.nii.gz"}, {"image": "s0843/ct.nii.gz"}, {"image": "s0842/ct.nii.gz"}, {"image": "s0840/ct.nii.gz"}, {"image": "s0839/ct.nii.gz"}, {"image": "s0838/ct.nii.gz"}, {"image": "s0837/ct.nii.gz"}, {"image": "s0836/ct.nii.gz"}, {"image": "s0835/ct.nii.gz"}, {"image": "s0834/ct.nii.gz"}, {"image": "s0833/ct.nii.gz"}, {"image": "s0832/ct.nii.gz"}, {"image": "s0831/ct.nii.gz"}, {"image": "s0830/ct.nii.gz"}, {"image": "s0829/ct.nii.gz"}, {"image": "s0826/ct.nii.gz"}, {"image": "s0825/ct.nii.gz"}, {"image": "s0824/ct.nii.gz"}, {"image": "s0822/ct.nii.gz"}, {"image": "s0821/ct.nii.gz"}, {"image": "s0820/ct.nii.gz"}, {"image": "s0819/ct.nii.gz"}, {"image": "s0816/ct.nii.gz"}, {"image": "s0815/ct.nii.gz"}, {"image": "s0814/ct.nii.gz"}, {"image": "s0812/ct.nii.gz"}, {"image": "s0811/ct.nii.gz"}, {"image": "s0810/ct.nii.gz"}, {"image": "s0808/ct.nii.gz"}, {"image": "s0807/ct.nii.gz"}, {"image": "s0806/ct.nii.gz"}, {"image": "s0805/ct.nii.gz"}, {"image": "s0804/ct.nii.gz"}, {"image": "s0802/ct.nii.gz"}, {"image": "s0801/ct.nii.gz"}, {"image": "s0800/ct.nii.gz"}, {"image": "s0798/ct.nii.gz"}, {"image": "s0797/ct.nii.gz"}, {"image": "s0796/ct.nii.gz"}, {"image": "s0795/ct.nii.gz"}, {"image": "s0794/ct.nii.gz"}, {"image": "s0793/ct.nii.gz"}, {"image": "s0792/ct.nii.gz"}, {"image": "s0791/ct.nii.gz"}, {"image": "s0790/ct.nii.gz"}, {"image": "s0787/ct.nii.gz"}, {"image": "s0786/ct.nii.gz"}, {"image": "s0785/ct.nii.gz"}, {"image": "s0783/ct.nii.gz"}, {"image": "s0781/ct.nii.gz"}, {"image": "s0780/ct.nii.gz"}, {"image": "s0778/ct.nii.gz"}, {"image": "s0776/ct.nii.gz"}, {"image": "s0775/ct.nii.gz"}, {"image": "s0774/ct.nii.gz"}, {"image": "s0772/ct.nii.gz"}, {"image": "s0771/ct.nii.gz"}, {"image": "s0770/ct.nii.gz"}, {"image": "s0769/ct.nii.gz"}, {"image": "s0767/ct.nii.gz"}, {"image": "s0765/ct.nii.gz"}, {"image": "s0764/ct.nii.gz"}, {"image": "s0763/ct.nii.gz"}, {"image": "s0762/ct.nii.gz"}, {"image": "s0760/ct.nii.gz"}, {"image": "s0759/ct.nii.gz"}, {"image": "s0756/ct.nii.gz"}, {"image": "s0754/ct.nii.gz"}, {"image": "s0753/ct.nii.gz"}, {"image": "s0752/ct.nii.gz"}, {"image": "s0751/ct.nii.gz"}, {"image": "s0750/ct.nii.gz"}, {"image": "s0749/ct.nii.gz"}, {"image": "s0748/ct.nii.gz"}, {"image": "s0747/ct.nii.gz"}, {"image": "s0744/ct.nii.gz"}, {"image": "s0743/ct.nii.gz"}, {"image": "s0741/ct.nii.gz"}, {"image": "s0740/ct.nii.gz"}, {"image": "s0739/ct.nii.gz"}, {"image": "s0738/ct.nii.gz"}, {"image": "s0737/ct.nii.gz"}, {"image": "s0736/ct.nii.gz"}, {"image": "s0735/ct.nii.gz"}, {"image": "s0734/ct.nii.gz"}, {"image": "s0732/ct.nii.gz"}, {"image": "s0731/ct.nii.gz"}, {"image": "s0730/ct.nii.gz"}, {"image": "s0729/ct.nii.gz"}, {"image": "s0728/ct.nii.gz"}, {"image": "s0727/ct.nii.gz"}, {"image": "s0726/ct.nii.gz"}, {"image": "s0724/ct.nii.gz"}, {"image": "s0723/ct.nii.gz"}, {"image": "s0721/ct.nii.gz"}, {"image": "s0720/ct.nii.gz"}, {"image": "s0719/ct.nii.gz"}, {"image": "s0717/ct.nii.gz"}, {"image": "s0716/ct.nii.gz"}, {"image": "s0715/ct.nii.gz"}, {"image": "s0713/ct.nii.gz"}, {"image": "s0712/ct.nii.gz"}, {"image": "s0711/ct.nii.gz"}, {"image": "s0709/ct.nii.gz"}, {"image": "s0707/ct.nii.gz"}, {"image": "s0706/ct.nii.gz"}, {"image": "s0705/ct.nii.gz"}, {"image": "s0704/ct.nii.gz"}, {"image": "s0703/ct.nii.gz"}, {"image": "s0702/ct.nii.gz"}, {"image": "s0700/ct.nii.gz"}, {"image": "s0699/ct.nii.gz"}, {"image": "s0698/ct.nii.gz"}, {"image": "s0697/ct.nii.gz"}, {"image": "s0696/ct.nii.gz"}, {"image": "s0695/ct.nii.gz"}, {"image": "s0694/ct.nii.gz"}, {"image": "s0693/ct.nii.gz"}, {"image": "s0692/ct.nii.gz"}, {"image": "s0691/ct.nii.gz"}, {"image": "s0690/ct.nii.gz"}, {"image": "s0688/ct.nii.gz"}, {"image": "s0687/ct.nii.gz"}, {"image": "s0686/ct.nii.gz"}, {"image": "s0685/ct.nii.gz"}, {"image": "s0684/ct.nii.gz"}, {"image": "s0683/ct.nii.gz"}, {"image": "s0680/ct.nii.gz"}, {"image": "s0679/ct.nii.gz"}, {"image": "s0677/ct.nii.gz"}, {"image": "s0676/ct.nii.gz"}, {"image": "s0675/ct.nii.gz"}, {"image": "s0673/ct.nii.gz"}, {"image": "s0671/ct.nii.gz"}, {"image": "s0670/ct.nii.gz"}, {"image": "s0669/ct.nii.gz"}, {"image": "s0668/ct.nii.gz"}, {"image": "s0667/ct.nii.gz"}, {"image": "s0665/ct.nii.gz"}, {"image": "s0664/ct.nii.gz"}, {"image": "s0663/ct.nii.gz"}, {"image": "s0662/ct.nii.gz"}, {"image": "s0661/ct.nii.gz"}, {"image": "s0659/ct.nii.gz"}, {"image": "s0658/ct.nii.gz"}, {"image": "s0657/ct.nii.gz"}, {"image": "s0656/ct.nii.gz"}, {"image": "s0654/ct.nii.gz"}, {"image": "s0653/ct.nii.gz"}, {"image": "s0652/ct.nii.gz"}, {"image": "s0651/ct.nii.gz"}, {"image": "s0649/ct.nii.gz"}, {"image": "s0648/ct.nii.gz"}, {"image": "s0647/ct.nii.gz"}, {"image": "s0646/ct.nii.gz"}, {"image": "s0645/ct.nii.gz"}, {"image": "s0644/ct.nii.gz"}, {"image": "s0643/ct.nii.gz"}, {"image": "s0642/ct.nii.gz"}, {"image": "s0641/ct.nii.gz"}, {"image": "s0640/ct.nii.gz"}, {"image": "s0639/ct.nii.gz"}, {"image": "s0638/ct.nii.gz"}, {"image": "s0637/ct.nii.gz"}, {"image": "s0636/ct.nii.gz"}, {"image": "s0635/ct.nii.gz"}, {"image": "s0633/ct.nii.gz"}, {"image": "s0632/ct.nii.gz"}, {"image": "s0629/ct.nii.gz"}, {"image": "s0628/ct.nii.gz"}, {"image": "s0627/ct.nii.gz"}, {"image": "s0626/ct.nii.gz"}, {"image": "s0625/ct.nii.gz"}, {"image": "s0624/ct.nii.gz"}, {"image": "s0623/ct.nii.gz"}, {"image": "s0621/ct.nii.gz"}, {"image": "s0620/ct.nii.gz"}, {"image": "s0619/ct.nii.gz"}, {"image": "s0617/ct.nii.gz"}, {"image": "s0616/ct.nii.gz"}, {"image": "s0615/ct.nii.gz"}, {"image": "s0614/ct.nii.gz"}, {"image": "s0613/ct.nii.gz"}, {"image": "s0610/ct.nii.gz"}, {"image": "s0608/ct.nii.gz"}, {"image": "s0607/ct.nii.gz"}, {"image": "s0606/ct.nii.gz"}, {"image": "s0605/ct.nii.gz"}, {"image": "s0604/ct.nii.gz"}, {"image": "s0603/ct.nii.gz"}, {"image": "s0602/ct.nii.gz"}, {"image": "s0601/ct.nii.gz"}, {"image": "s0598/ct.nii.gz"}, {"image": "s0597/ct.nii.gz"}, {"image": "s0595/ct.nii.gz"}, {"image": "s0594/ct.nii.gz"}, {"image": "s0592/ct.nii.gz"}, {"image": "s0591/ct.nii.gz"}, {"image": "s0590/ct.nii.gz"}, {"image": "s0589/ct.nii.gz"}, {"image": "s0588/ct.nii.gz"}, {"image": "s0587/ct.nii.gz"}, {"image": "s0585/ct.nii.gz"}, {"image": "s0584/ct.nii.gz"}, {"image": "s0583/ct.nii.gz"}, {"image": "s0582/ct.nii.gz"}, {"image": "s0580/ct.nii.gz"}, {"image": "s0578/ct.nii.gz"}, {"image": "s0577/ct.nii.gz"}, {"image": "s0575/ct.nii.gz"}, {"image": "s0574/ct.nii.gz"}, {"image": "s0573/ct.nii.gz"}, {"image": "s0572/ct.nii.gz"}, {"image": "s0571/ct.nii.gz"}, {"image": "s0567/ct.nii.gz"}, {"image": "s0566/ct.nii.gz"}, {"image": "s0565/ct.nii.gz"}, {"image": "s0564/ct.nii.gz"}, {"image": "s0563/ct.nii.gz"}, {"image": "s0561/ct.nii.gz"}, {"image": "s0559/ct.nii.gz"}, {"image": "s0557/ct.nii.gz"}, {"image": "s0556/ct.nii.gz"}, {"image": "s0555/ct.nii.gz"}, {"image": "s0553/ct.nii.gz"}, {"image": "s0552/ct.nii.gz"}, {"image": "s0551/ct.nii.gz"}, {"image": "s0550/ct.nii.gz"}, {"image": "s0549/ct.nii.gz"}, {"image": "s0548/ct.nii.gz"}, {"image": "s0546/ct.nii.gz"}, {"image": "s0545/ct.nii.gz"}, {"image": "s0544/ct.nii.gz"}, {"image": "s0543/ct.nii.gz"}, {"image": "s0542/ct.nii.gz"}, {"image": "s0541/ct.nii.gz"}, {"image": "s0539/ct.nii.gz"}, {"image": "s0537/ct.nii.gz"}, {"image": "s0536/ct.nii.gz"}, {"image": "s0532/ct.nii.gz"}, {"image": "s0531/ct.nii.gz"}, {"image": "s0530/ct.nii.gz"}, {"image": "s0529/ct.nii.gz"}, {"image": "s0528/ct.nii.gz"}, {"image": "s0527/ct.nii.gz"}, {"image": "s0526/ct.nii.gz"}, {"image": "s0525/ct.nii.gz"}, {"image": "s0523/ct.nii.gz"}, {"image": "s0522/ct.nii.gz"}, {"image": "s0521/ct.nii.gz"}, {"image": "s0520/ct.nii.gz"}, {"image": "s0519/ct.nii.gz"}, {"image": "s0518/ct.nii.gz"}, {"image": "s0517/ct.nii.gz"}, {"image": "s0516/ct.nii.gz"}, {"image": "s0515/ct.nii.gz"}, {"image": "s0514/ct.nii.gz"}, {"image": "s0513/ct.nii.gz"}, {"image": "s0510/ct.nii.gz"}, {"image": "s0509/ct.nii.gz"}, {"image": "s0508/ct.nii.gz"}, {"image": "s0507/ct.nii.gz"}, {"image": "s0506/ct.nii.gz"}, {"image": "s0505/ct.nii.gz"}, {"image": "s0504/ct.nii.gz"}, {"image": "s0503/ct.nii.gz"}, {"image": "s0501/ct.nii.gz"}, {"image": "s0500/ct.nii.gz"}, {"image": "s0498/ct.nii.gz"}, {"image": "s0495/ct.nii.gz"}, {"image": "s0494/ct.nii.gz"}, {"image": "s0491/ct.nii.gz"}, {"image": "s0490/ct.nii.gz"}, {"image": "s0488/ct.nii.gz"}, {"image": "s0487/ct.nii.gz"}, {"image": "s0485/ct.nii.gz"}, {"image": "s0484/ct.nii.gz"}, {"image": "s0483/ct.nii.gz"}, {"image": "s0482/ct.nii.gz"}, {"image": "s0481/ct.nii.gz"}, {"image": "s0480/ct.nii.gz"}, {"image": "s0478/ct.nii.gz"}, {"image": "s0477/ct.nii.gz"}, {"image": "s0476/ct.nii.gz"}, {"image": "s0474/ct.nii.gz"}, {"image": "s0473/ct.nii.gz"}, {"image": "s0471/ct.nii.gz"}, {"image": "s0470/ct.nii.gz"}, {"image": "s0469/ct.nii.gz"}, {"image": "s0468/ct.nii.gz"}, {"image": "s0467/ct.nii.gz"}, {"image": "s0466/ct.nii.gz"}, {"image": "s0465/ct.nii.gz"}, {"image": "s0463/ct.nii.gz"}, {"image": "s0462/ct.nii.gz"}, {"image": "s0460/ct.nii.gz"}, {"image": "s0459/ct.nii.gz"}, {"image": "s0458/ct.nii.gz"}, {"image": "s0457/ct.nii.gz"}, {"image": "s0456/ct.nii.gz"}, {"image": "s0455/ct.nii.gz"}, {"image": "s0453/ct.nii.gz"}, {"image": "s0452/ct.nii.gz"}, {"image": "s0450/ct.nii.gz"}, {"image": "s0447/ct.nii.gz"}, {"image": "s0446/ct.nii.gz"}, {"image": "s0444/ct.nii.gz"}, {"image": "s0443/ct.nii.gz"}, {"image": "s0442/ct.nii.gz"}, {"image": "s0441/ct.nii.gz"}, {"image": "s0440/ct.nii.gz"}, {"image": "s0439/ct.nii.gz"}, {"image": "s0437/ct.nii.gz"}, {"image": "s0436/ct.nii.gz"}, {"image": "s0435/ct.nii.gz"}, {"image": "s0433/ct.nii.gz"}, {"image": "s0431/ct.nii.gz"}, {"image": "s0430/ct.nii.gz"}, {"image": "s0426/ct.nii.gz"}, {"image": "s0425/ct.nii.gz"}, {"image": "s0424/ct.nii.gz"}, {"image": "s0423/ct.nii.gz"}, {"image": "s0422/ct.nii.gz"}, {"image": "s0421/ct.nii.gz"}, {"image": "s0418/ct.nii.gz"}, {"image": "s0417/ct.nii.gz"}, {"image": "s0416/ct.nii.gz"}, {"image": "s0413/ct.nii.gz"}, {"image": "s0412/ct.nii.gz"}, {"image": "s0411/ct.nii.gz"}, {"image": "s0410/ct.nii.gz"}, {"image": "s0408/ct.nii.gz"}, {"image": "s0407/ct.nii.gz"}, {"image": "s0405/ct.nii.gz"}, {"image": "s0403/ct.nii.gz"}, {"image": "s0402/ct.nii.gz"}, {"image": "s0401/ct.nii.gz"}, {"image": "s0400/ct.nii.gz"}, {"image": "s0399/ct.nii.gz"}, {"image": "s0398/ct.nii.gz"}, {"image": "s0396/ct.nii.gz"}, {"image": "s0395/ct.nii.gz"}, {"image": "s0394/ct.nii.gz"}, {"image": "s0392/ct.nii.gz"}, {"image": "s0391/ct.nii.gz"}, {"image": "s0389/ct.nii.gz"}, {"image": "s0388/ct.nii.gz"}, {"image": "s0386/ct.nii.gz"}, {"image": "s0383/ct.nii.gz"}, {"image": "s0382/ct.nii.gz"}, {"image": "s0381/ct.nii.gz"}, {"image": "s0380/ct.nii.gz"}, {"image": "s0379/ct.nii.gz"}, {"image": "s0378/ct.nii.gz"}, {"image": "s0376/ct.nii.gz"}, {"image": "s0374/ct.nii.gz"}, {"image": "s0373/ct.nii.gz"}, {"image": "s0372/ct.nii.gz"}, {"image": "s0370/ct.nii.gz"}, {"image": "s0368/ct.nii.gz"}, {"image": "s0367/ct.nii.gz"}, {"image": "s0366/ct.nii.gz"}, {"image": "s0365/ct.nii.gz"}, {"image": "s0364/ct.nii.gz"}, {"image": "s0363/ct.nii.gz"}, {"image": "s0362/ct.nii.gz"}, {"image": "s0361/ct.nii.gz"}, {"image": "s0360/ct.nii.gz"}, {"image": "s0359/ct.nii.gz"}, {"image": "s0358/ct.nii.gz"}, {"image": "s0357/ct.nii.gz"}, {"image": "s0356/ct.nii.gz"}, {"image": "s0355/ct.nii.gz"}, {"image": "s0354/ct.nii.gz"}, {"image": "s0353/ct.nii.gz"}, {"image": "s0352/ct.nii.gz"}, {"image": "s0350/ct.nii.gz"}, {"image": "s0349/ct.nii.gz"}, {"image": "s0347/ct.nii.gz"}, {"image": "s0346/ct.nii.gz"}, {"image": "s0345/ct.nii.gz"}, {"image": "s0344/ct.nii.gz"}, {"image": "s0343/ct.nii.gz"}, {"image": "s0341/ct.nii.gz"}, {"image": "s0340/ct.nii.gz"}, {"image": "s0339/ct.nii.gz"}, {"image": "s0336/ct.nii.gz"}, {"image": "s0335/ct.nii.gz"}, {"image": "s0333/ct.nii.gz"}, {"image": "s0332/ct.nii.gz"}, {"image": "s0330/ct.nii.gz"}, {"image": "s0329/ct.nii.gz"}, {"image": "s0328/ct.nii.gz"}, {"image": "s0327/ct.nii.gz"}, {"image": "s0326/ct.nii.gz"}, {"image": "s0325/ct.nii.gz"}, {"image": "s0324/ct.nii.gz"}, {"image": "s0322/ct.nii.gz"}, {"image": "s0321/ct.nii.gz"}, {"image": "s0320/ct.nii.gz"}, {"image": "s0316/ct.nii.gz"}, {"image": "s0314/ct.nii.gz"}, {"image": "s0313/ct.nii.gz"}, {"image": "s0312/ct.nii.gz"}, {"image": "s0311/ct.nii.gz"}, {"image": "s0310/ct.nii.gz"}, {"image": "s0308/ct.nii.gz"}, {"image": "s0307/ct.nii.gz"}, {"image": "s0305/ct.nii.gz"}, {"image": "s0304/ct.nii.gz"}, {"image": "s0303/ct.nii.gz"}, {"image": "s0301/ct.nii.gz"}, {"image": "s0300/ct.nii.gz"}, {"image": "s0299/ct.nii.gz"}, {"image": "s0298/ct.nii.gz"}, {"image": "s0296/ct.nii.gz"}, {"image": "s0295/ct.nii.gz"}, {"image": "s0293/ct.nii.gz"}, {"image": "s0292/ct.nii.gz"}, {"image": "s0291/ct.nii.gz"}, {"image": "s0290/ct.nii.gz"}, {"image": "s0289/ct.nii.gz"}, {"image": "s0288/ct.nii.gz"}, {"image": "s0287/ct.nii.gz"}, {"image": "s0286/ct.nii.gz"}, {"image": "s0285/ct.nii.gz"}, {"image": "s0282/ct.nii.gz"}, {"image": "s0281/ct.nii.gz"}, {"image": "s0278/ct.nii.gz"}, {"image": "s0277/ct.nii.gz"}, {"image": "s0271/ct.nii.gz"}, {"image": "s0270/ct.nii.gz"}, {"image": "s0265/ct.nii.gz"}, {"image": "s0264/ct.nii.gz"}, {"image": "s0263/ct.nii.gz"}, {"image": "s0262/ct.nii.gz"}, {"image": "s0259/ct.nii.gz"}, {"image": "s0258/ct.nii.gz"}, {"image": "s0256/ct.nii.gz"}, {"image": "s0255/ct.nii.gz"}, {"image": "s0254/ct.nii.gz"}, {"image": "s0253/ct.nii.gz"}, {"image": "s0252/ct.nii.gz"}, {"image": "s0250/ct.nii.gz"}, {"image": "s0249/ct.nii.gz"}, {"image": "s0248/ct.nii.gz"}, {"image": "s0246/ct.nii.gz"}, {"image": "s0245/ct.nii.gz"}, {"image": "s0244/ct.nii.gz"}, {"image": "s0243/ct.nii.gz"}, {"image": "s0242/ct.nii.gz"}, {"image": "s0241/ct.nii.gz"}, {"image": "s0240/ct.nii.gz"}, {"image": "s0239/ct.nii.gz"}, {"image": "s0238/ct.nii.gz"}, {"image": "s0237/ct.nii.gz"}, {"image": "s0236/ct.nii.gz"}, {"image": "s0235/ct.nii.gz"}, {"image": "s0234/ct.nii.gz"}, {"image": "s0233/ct.nii.gz"}, {"image": "s0232/ct.nii.gz"}, {"image": "s0231/ct.nii.gz"}, {"image": "s0228/ct.nii.gz"}, {"image": "s0227/ct.nii.gz"}, {"image": "s0226/ct.nii.gz"}, {"image": "s0223/ct.nii.gz"}, {"image": "s0221/ct.nii.gz"}, {"image": "s0220/ct.nii.gz"}, {"image": "s0219/ct.nii.gz"}, {"image": "s0218/ct.nii.gz"}, {"image": "s0217/ct.nii.gz"}, {"image": "s0216/ct.nii.gz"}, {"image": "s0215/ct.nii.gz"}, {"image": "s0213/ct.nii.gz"}, {"image": "s0212/ct.nii.gz"}, {"image": "s0211/ct.nii.gz"}, {"image": "s0210/ct.nii.gz"}, {"image": "s0209/ct.nii.gz"}, {"image": "s0206/ct.nii.gz"}, {"image": "s0204/ct.nii.gz"}, {"image": "s0201/ct.nii.gz"}, {"image": "s0199/ct.nii.gz"}, {"image": "s0197/ct.nii.gz"}, {"image": "s0196/ct.nii.gz"}, {"image": "s0194/ct.nii.gz"}, {"image": "s0192/ct.nii.gz"}, {"image": "s0191/ct.nii.gz"}, {"image": "s0190/ct.nii.gz"}, {"image": "s0189/ct.nii.gz"}, {"image": "s0187/ct.nii.gz"}, {"image": "s0185/ct.nii.gz"}, {"image": "s0184/ct.nii.gz"}, {"image": "s0183/ct.nii.gz"}, {"image": "s0182/ct.nii.gz"}, {"image": "s0181/ct.nii.gz"}, {"image": "s0179/ct.nii.gz"}, {"image": "s0178/ct.nii.gz"}, {"image": "s0175/ct.nii.gz"}, {"image": "s0174/ct.nii.gz"}, {"image": "s0171/ct.nii.gz"}, {"image": "s0169/ct.nii.gz"}, {"image": "s0168/ct.nii.gz"}, {"image": "s0167/ct.nii.gz"}, {"image": "s0166/ct.nii.gz"}, {"image": "s0165/ct.nii.gz"}, {"image": "s0163/ct.nii.gz"}, {"image": "s0161/ct.nii.gz"}, {"image": "s0160/ct.nii.gz"}, {"image": "s0159/ct.nii.gz"}, {"image": "s0158/ct.nii.gz"}, {"image": "s0157/ct.nii.gz"}, {"image": "s0156/ct.nii.gz"}, {"image": "s0154/ct.nii.gz"}, {"image": "s0153/ct.nii.gz"}, {"image": "s0152/ct.nii.gz"}, {"image": "s0151/ct.nii.gz"}, {"image": "s0149/ct.nii.gz"}, {"image": "s0146/ct.nii.gz"}, {"image": "s0145/ct.nii.gz"}, {"image": "s0143/ct.nii.gz"}, {"image": "s0140/ct.nii.gz"}, {"image": "s0139/ct.nii.gz"}, {"image": "s0138/ct.nii.gz"}, {"image": "s0137/ct.nii.gz"}, {"image": "s0136/ct.nii.gz"}, {"image": "s0135/ct.nii.gz"}, {"image": "s0133/ct.nii.gz"}, {"image": "s0132/ct.nii.gz"}, {"image": "s0130/ct.nii.gz"}, {"image": "s0128/ct.nii.gz"}, {"image": "s0123/ct.nii.gz"}, {"image": "s0120/ct.nii.gz"}, {"image": "s0119/ct.nii.gz"}, {"image": "s0117/ct.nii.gz"}, {"image": "s0115/ct.nii.gz"}, {"image": "s0114/ct.nii.gz"}, {"image": "s0110/ct.nii.gz"}, {"image": "s0109/ct.nii.gz"}, {"image": "s0108/ct.nii.gz"}, {"image": "s0107/ct.nii.gz"}, {"image": "s0106/ct.nii.gz"}, {"image": "s0104/ct.nii.gz"}, {"image": "s0103/ct.nii.gz"}, {"image": "s0102/ct.nii.gz"}, {"image": "s0101/ct.nii.gz"}, {"image": "s0099/ct.nii.gz"}, {"image": "s0098/ct.nii.gz"}, {"image": "s0096/ct.nii.gz"}, {"image": "s0095/ct.nii.gz"}, {"image": "s0092/ct.nii.gz"}, {"image": "s0091/ct.nii.gz"}, {"image": "s0090/ct.nii.gz"}, {"image": "s0089/ct.nii.gz"}, {"image": "s0086/ct.nii.gz"}, {"image": "s0085/ct.nii.gz"}, {"image": "s0082/ct.nii.gz"}, {"image": "s0081/ct.nii.gz"}, {"image": "s0080/ct.nii.gz"}, {"image": "s0079/ct.nii.gz"}, {"image": "s0078/ct.nii.gz"}, {"image": "s0077/ct.nii.gz"}, {"image": "s0076/ct.nii.gz"}, {"image": "s0075/ct.nii.gz"}, {"image": "s0074/ct.nii.gz"}, {"image": "s0073/ct.nii.gz"}, {"image": "s0072/ct.nii.gz"}, {"image": "s0071/ct.nii.gz"}, {"image": "s0070/ct.nii.gz"}, {"image": "s0069/ct.nii.gz"}, {"image": "s0068/ct.nii.gz"}, {"image": "s0067/ct.nii.gz"}, {"image": "s0066/ct.nii.gz"}, {"image": "s0063/ct.nii.gz"}, {"image": "s0062/ct.nii.gz"}, {"image": "s0061/ct.nii.gz"}, {"image": "s0058/ct.nii.gz"}, {"image": "s0057/ct.nii.gz"}, {"image": "s0054/ct.nii.gz"}, {"image": "s0053/ct.nii.gz"}, {"image": "s0052/ct.nii.gz"}, {"image": "s0050/ct.nii.gz"}, {"image": "s0049/ct.nii.gz"}, {"image": "s0048/ct.nii.gz"}, {"image": "s0046/ct.nii.gz"}, {"image": "s0044/ct.nii.gz"}, {"image": "s0040/ct.nii.gz"}, {"image": "s0039/ct.nii.gz"}, {"image": "s0038/ct.nii.gz"}, {"image": "s0037/ct.nii.gz"}, {"image": "s0036/ct.nii.gz"}, {"image": "s0035/ct.nii.gz"}, {"image": "s0034/ct.nii.gz"}, {"image": "s0032/ct.nii.gz"}, {"image": "s0031/ct.nii.gz"}, {"image": "s0030/ct.nii.gz"}, {"image": "s0029/ct.nii.gz"}, {"image": "s0025/ct.nii.gz"}, {"image": "s0021/ct.nii.gz"}, {"image": "s0019/ct.nii.gz"}, {"image": "s0016/ct.nii.gz"}, {"image": "s0015/ct.nii.gz"}, {"image": "s0014/ct.nii.gz"}, {"image": "s0013/ct.nii.gz"}, {"image": "s0012/ct.nii.gz"}, {"image": "s0011/ct.nii.gz"}, {"image": "s0010/ct.nii.gz"}, {"image": "s0009/ct.nii.gz"}, {"image": "s0006/ct.nii.gz"}, {"image": "s0004/ct.nii.gz"}, {"image": "s0003/ct.nii.gz"}, {"image": "s0002/ct.nii.gz"}, {"image": "s0001/ct.nii.gz"}]}
================================================
FILE: jsons/__init__.py
================================================
================================================
FILE: jsons/btcv.json
================================================
{"training": [{"image": "imagesTr/img0001.nii.gz"}, {"image": "imagesTr/img0002.nii.gz"}, {"image": "imagesTr/img0003.nii.gz"}, {"image": "imagesTr/img0004.nii.gz"}, {"image": "imagesTr/img0005.nii.gz"}, {"image": "imagesTr/img0006.nii.gz"}, {"image": "imagesTr/img0007.nii.gz"}, {"image": "imagesTr/img0008.nii.gz"}, {"image": "imagesTr/img0009.nii.gz"}, {"image": "imagesTr/img0010.nii.gz"}, {"image": "imagesTr/img0021.nii.gz"}, {"image": "imagesTr/img0022.nii.gz"}, {"image": "imagesTr/img0023.nii.gz"}, {"image": "imagesTr/img0024.nii.gz"}, {"image": "imagesTr/img0025.nii.gz"}, {"image": "imagesTr/img0026.nii.gz"}, {"image": "imagesTr/img0027.nii.gz"}, {"image": "imagesTr/img0028.nii.gz"}, {"image": "imagesTr/img0029.nii.gz"}, {"image": "imagesTr/img0030.nii.gz"}, {"image": "imagesTr/img0031.nii.gz"}, {"image": "imagesTr/img0032.nii.gz"}, {"image": "imagesTr/img0033.nii.gz"}, {"image": "imagesTr/img0034.nii.gz"}], "validation": [{"image": "imagesTr/img0035.nii.gz"}, {"image": "imagesTr/img0036.nii.gz"}, {"image": "imagesTr/img0037.nii.gz"}, {"image": "imagesTr/img0038.nii.gz"}, {"image": "imagesTr/img0039.nii.gz"}, {"image": "imagesTr/img0040.nii.gz"}]}
================================================
FILE: jsons/dataset_LUNA16_0.json
================================================
{"training": [{"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.979083010707182900091062408058.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.979083010707182900091062408058.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.154677396354641150280013275227.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.154677396354641150280013275227.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.752756872840730509471096155114.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.752756872840730509471096155114.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.621916089407825046337959219998.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.621916089407825046337959219998.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.124154461048929153767743874565.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.124154461048929153767743874565.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.397062004302272014259317520874.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.397062004302272014259317520874.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.805925269324902055566754756843.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.805925269324902055566754756843.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.295298571102631191572192562523.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.295298571102631191572192562523.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.450501966058662668272378865145.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.450501966058662668272378865145.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.534083630500464995109143618896.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.534083630500464995109143618896.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.130438550890816550994739120843.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.130438550890816550994739120843.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.534006575256943390479252771547.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.534006575256943390479252771547.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.231645134739451754302647733304.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.231645134739451754302647733304.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.334517907433161353885866806005.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334517907433161353885866806005.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.311981398931043315779172047718.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.311981398931043315779172047718.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.868211851413924881662621747734.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.868211851413924881662621747734.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.238522526736091851696274044574.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.238522526736091851696274044574.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.305858704835252413616501469037.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.305858704835252413616501469037.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.417815314896088956784723476543.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.417815314896088956784723476543.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.332453873575389860371315979768.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.332453873575389860371315979768.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.333145094436144085379032922488.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.333145094436144085379032922488.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.194440094986948071643661798326.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194440094986948071643661798326.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.404364125369979066736354549484.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.404364125369979066736354549484.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.122763913896761494371822656720.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.122763913896761494371822656720.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.317087518531899043292346860596.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.317087518531899043292346860596.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.194465340552956447447896167830.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194465340552956447447896167830.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.832260670372728970918746541371.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.832260670372728970918746541371.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.716498695101447665580610403574.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.716498695101447665580610403574.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.281489753704424911132261151767.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.281489753704424911132261151767.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.395623571499047043765181005112.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.395623571499047043765181005112.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.141069661700670042960678408762.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141069661700670042960678408762.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.295420274214095686326263147663.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.295420274214095686326263147663.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.216882370221919561230873289517.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.216882370221919561230873289517.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.323859712968543712594665815359.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323859712968543712594665815359.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.280972147860943609388015648430.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.280972147860943609388015648430.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.293757615532132808762625441831.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.293757615532132808762625441831.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.210837812047373739447725050963.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.210837812047373739447725050963.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.137763212752154081977261297097.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.137763212752154081977261297097.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.430109407146633213496148200410.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.430109407146633213496148200410.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.139258777898746693365877042411.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139258777898746693365877042411.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.278660284797073139172446973682.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.278660284797073139172446973682.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.303421828981831854739626597495.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.303421828981831854739626597495.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.134996872583497382954024478441.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.134996872583497382954024478441.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.213140617640021803112060161074.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.213140617640021803112060161074.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.286647622786041008124419915089.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286647622786041008124419915089.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.315214756157389122376518747372.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.315214756157389122376518747372.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.193808128386712859512130599234.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.193808128386712859512130599234.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.169128136262002764211589185953.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.169128136262002764211589185953.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.616033753016904899083676284739.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.616033753016904899083676284739.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.183184435049555024219115904825.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183184435049555024219115904825.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.503980049263254396021509831276.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.503980049263254396021509831276.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.179162671133894061547290922949.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179162671133894061547290922949.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.336894364358709782463716339027.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.336894364358709782463716339027.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.139595277234735528205899724196.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139595277234735528205899724196.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.300271604576987336866436407488.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300271604576987336866436407488.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.275766318636944297772360944907.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275766318636944297772360944907.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.892375496445736188832556446335.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.892375496445736188832556446335.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.162901839201654862079549658100.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162901839201654862079549658100.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.140527383975300992150799777603.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.140527383975300992150799777603.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.325164338773720548739146851679.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.325164338773720548739146851679.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.222087811960706096424718056430.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.222087811960706096424718056430.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.250397690690072950000431855143.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.250397690690072950000431855143.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.479402560265137632920333093071.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.479402560265137632920333093071.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.756684168227383088294595834066.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.756684168227383088294595834066.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.168605638657404145360275453085.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168605638657404145360275453085.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.173106154739244262091404659845.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.173106154739244262091404659845.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.104562737760173137525888934217.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.104562737760173137525888934217.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.128881800399702510818644205032.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.128881800399702510818644205032.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.243094273518213382155770295147.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.243094273518213382155770295147.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.121824995088859376862458155637.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121824995088859376862458155637.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.114218724025049818743426522343.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.114218724025049818743426522343.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.111017101339429664883879536171.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111017101339429664883879536171.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.134370886216012873213579659366.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.134370886216012873213579659366.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.168037818448885856452592057286.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168037818448885856452592057286.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.259543921154154401875872845498.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259543921154154401875872845498.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.314789075871001236641548593165.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.314789075871001236641548593165.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.184019785706727365023450012318.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.184019785706727365023450012318.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.272961322147784625028175033640.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272961322147784625028175033640.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.171919524048654494439256263785.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.171919524048654494439256263785.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.113697708991260454310623082679.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.113697708991260454310623082679.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.458525794434429386945463560826.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.458525794434429386945463560826.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.674809958213117379592437424616.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.674809958213117379592437424616.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.206539885154775002929031534291.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.206539885154775002929031534291.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.108231420525711026834210228428.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.108231420525711026834210228428.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.309672797925724868457151381131.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.309672797925724868457151381131.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.161073793312426102774780216551.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161073793312426102774780216551.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.690929968028676628605553365896.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.690929968028676628605553365896.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.208737629504245244513001631764.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.208737629504245244513001631764.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.216652640878960522552873394709.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.216652640878960522552873394709.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.335866409407244673864352309754.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.335866409407244673864352309754.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.282512043257574309474415322775.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.282512043257574309474415322775.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.270390050141765094612147226290.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270390050141765094612147226290.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.146603910507557786636779705509.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.146603910507557786636779705509.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.561458563853929400124470098603.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.561458563853929400124470098603.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.162207236104936931957809623059.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162207236104936931957809623059.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.768276876111112560631432843476.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.768276876111112560631432843476.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.247769845138587733933485039556.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.247769845138587733933485039556.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.179049373636438705059720603192.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179049373636438705059720603192.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.145759169833745025756371695397.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.145759169833745025756371695397.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.952265563663939823135367733681.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.952265563663939823135367733681.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.655242448149322898770987310561.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.655242448149322898770987310561.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.186021279664749879526003668137.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.186021279664749879526003668137.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.331211682377519763144559212009.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.331211682377519763144559212009.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.308183340111270052562662456038.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.308183340111270052562662456038.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.663019255629770796363333877035.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.663019255629770796363333877035.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.310395752124284049604069960014.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.310395752124284049604069960014.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.161002239822118346732951898613.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161002239822118346732951898613.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.231002159523969307155990628066.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.231002159523969307155990628066.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.193408384740507320589857096592.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.193408384740507320589857096592.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.226456162308124493341905600418.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226456162308124493341905600418.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.326057189095429101398977448288.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.326057189095429101398977448288.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.861997885565255340442123234170.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.861997885565255340442123234170.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.231834776365874788440767645596.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.231834776365874788440767645596.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.100684836163890911914061745866.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100684836163890911914061745866.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.128059192202504367870633619224.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.128059192202504367870633619224.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.888291896309937415860209787179.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.888291896309937415860209787179.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.152684536713461901635595118048.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.152684536713461901635595118048.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.183843376225716802567192412456.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183843376225716802567192412456.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.287966244644280690737019247886.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.287966244644280690737019247886.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.910607280658963002048724648683.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.910607280658963002048724648683.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.163994693532965040247348251579.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.163994693532965040247348251579.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.935683764293840351008008793409.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.935683764293840351008008793409.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.197987940182806628828566429132.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.197987940182806628828566429132.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.802595762867498341201607992711.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.802595762867498341201607992711.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.259018373683540453277752706262.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259018373683540453277752706262.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.144943344795414353192059796098.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.144943344795414353192059796098.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.801945620899034889998809817499.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.801945620899034889998809817499.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.200558451375970945040979397866.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200558451375970945040979397866.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.334105754605642100456249422350.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334105754605642100456249422350.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.162718361851587451505896742103.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162718361851587451505896742103.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.970264865033574190975654369557.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.970264865033574190975654369557.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.306948744223170422945185006551.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306948744223170422945185006551.nii.gz"}, {"image": "subset_1/1.3.6.1.4.1.14519.5.2.1.6279.6001.106719103982792863757268101375.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106719103982792863757268101375.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.443400977949406454649939526179.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.443400977949406454649939526179.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.212346425055214308006918165305.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.212346425055214308006918165305.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.504324996863016748259361352296.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.504324996863016748259361352296.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.232071262560365924176679652948.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.232071262560365924176679652948.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.159996104466052855396410079250.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.159996104466052855396410079250.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.299767339686526858593516834230.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.299767339686526858593516834230.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.283733738239331719775105586296.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.283733738239331719775105586296.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.184412674007117333405073397832.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.184412674007117333405073397832.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.922852847124879997825997808179.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.922852847124879997825997808179.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.939152384493874708850321969356.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.939152384493874708850321969356.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.240969450540588211676803094518.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.240969450540588211676803094518.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.461155505515403114280165935891.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.461155505515403114280165935891.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.126704785377921920210612476953.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.126704785377921920210612476953.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.281967919138248195763602360723.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.281967919138248195763602360723.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.306788423710427765311352901943.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306788423710427765311352901943.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.199171741859530285887752432478.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199171741859530285887752432478.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.144883090372691745980459537053.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.144883090372691745980459537053.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.100621383016233746780170740405.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100621383016233746780170740405.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.147250707071097813243473865421.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.147250707071097813243473865421.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.143412474064515942785157561636.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143412474064515942785157561636.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.776429308535398795601496131524.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.776429308535398795601496131524.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.176362912420491262783064585333.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.176362912420491262783064585333.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.117383608379722740629083782428.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.117383608379722740629083782428.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.943403138251347598519939390311.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.943403138251347598519939390311.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.199975006921901879512837687266.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199975006921901879512837687266.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.156322145453198768801776721493.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.156322145453198768801776721493.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.227796349777753378641347819780.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227796349777753378641347819780.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.226383054119800793308721198594.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226383054119800793308721198594.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.311236942972970815890902714604.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.311236942972970815890902714604.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.270152671889301412052226973069.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270152671889301412052226973069.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.121993590721161347818774929286.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121993590721161347818774929286.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.235364978775280910367690540811.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.235364978775280910367690540811.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.280072876841890439628529365478.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.280072876841890439628529365478.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.225154811831720426832024114593.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.225154811831720426832024114593.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.102133688497886810253331438797.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.102133688497886810253331438797.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.139444426690868429919252698606.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139444426690868429919252698606.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.191301539558980174217770205256.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.191301539558980174217770205256.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.124663713663969377020085460568.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.124663713663969377020085460568.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.183924380327950237519832859527.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183924380327950237519832859527.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.159521777966998275980367008904.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.159521777966998275980367008904.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.885292267869246639232975687131.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.885292267869246639232975687131.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.153536305742006952753134773630.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153536305742006952753134773630.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.259227883564429312164962953756.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259227883564429312164962953756.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.267519732763035023633235877753.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.267519732763035023633235877753.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.137375498893536422914241295628.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.137375498893536422914241295628.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.466284753932369813717081722101.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.466284753932369813717081722101.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.557875302364105947813979213632.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.557875302364105947813979213632.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.265133389948279331857097127422.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265133389948279331857097127422.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.743969234977916254223533321294.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.743969234977916254223533321294.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.323753921818102744511069914832.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323753921818102744511069914832.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.192256506776434538421891524301.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.192256506776434538421891524301.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.302557165094691896097534021075.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.302557165094691896097534021075.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.213022585153512920098588556742.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.213022585153512920098588556742.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.253283426904813468115158375647.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.253283426904813468115158375647.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.283569726884265181140892667131.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.283569726884265181140892667131.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.470912100568074901744259213968.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.470912100568074901744259213968.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.223098610241551815995595311693.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.223098610241551815995595311693.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.199220738144407033276946096708.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199220738144407033276946096708.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.296863826932699509516219450076.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.296863826932699509516219450076.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.163901773171373940247829492387.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.163901773171373940247829492387.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.669518152156802508672627785405.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.669518152156802508672627785405.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.221945191226273284587353530424.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.221945191226273284587353530424.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.113586291551175790743673929831.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.113586291551175790743673929831.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.267957701183569638795986183786.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.267957701183569638795986183786.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.220205300714852483483213840572.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.220205300714852483483213840572.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.116492508532884962903000261147.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.116492508532884962903000261147.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.133378195429627807109985347209.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.133378195429627807109985347209.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.325580698241281352835338693869.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.325580698241281352835338693869.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.216526102138308489357443843021.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.216526102138308489357443843021.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.217589936421986638139451480826.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.217589936421986638139451480826.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.252634638822000832774167856951.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252634638822000832774167856951.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.217697417596902141600884006982.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.217697417596902141600884006982.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.253322967203074795232627653819.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.253322967203074795232627653819.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.803808126682275425758092691689.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.803808126682275425758092691689.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.964952370561266624992539111877.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.964952370561266624992539111877.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.187108608022306504546286626125.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187108608022306504546286626125.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.624425075947752229712087113746.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.624425075947752229712087113746.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.227885601428639043345478571594.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227885601428639043345478571594.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.190298296009658115773239776160.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.190298296009658115773239776160.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.172845185165807139298420209778.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.172845185165807139298420209778.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.142485715518010940961688015191.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.142485715518010940961688015191.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.707218743153927597786179232739.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.707218743153927597786179232739.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.156579001330474859527530187095.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.156579001330474859527530187095.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.217955041973656886482758642958.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.217955041973656886482758642958.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.692598144815688523679745963696.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.692598144815688523679745963696.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.230078008964732806419498631442.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.230078008964732806419498631442.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.265453131727473342790950829556.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265453131727473342790950829556.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.272259794130271010519952623746.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272259794130271010519952623746.nii.gz"}, {"image": "subset_2/1.3.6.1.4.1.14519.5.2.1.6279.6001.187966156856911682643615997798.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187966156856911682643615997798.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.194488534645348916700259325236.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194488534645348916700259325236.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.237428977311365557972720635401.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.237428977311365557972720635401.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.172573195301625265149778785969.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.172573195301625265149778785969.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.100953483028192176989979435275.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100953483028192176989979435275.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.268838889380981659524993261082.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.268838889380981659524993261082.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.203741923654363010377298352671.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.203741923654363010377298352671.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.404768898286087278137462774930.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.404768898286087278137462774930.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.278010349511857248000260557753.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.278010349511857248000260557753.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.191617711875409989053242965150.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.191617711875409989053242965150.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.191266041369462391833537519639.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.191266041369462391833537519639.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.323541312620128092852212458228.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323541312620128092852212458228.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.334022941831199910030220864961.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334022941831199910030220864961.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.276556509002726404418399209377.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.276556509002726404418399209377.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.487745546557477250336016826588.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.487745546557477250336016826588.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.199069398344356765037879821616.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199069398344356765037879821616.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.339142594937666268384335506819.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339142594937666268384335506819.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.319009811633846643966578282371.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.319009811633846643966578282371.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.268589491017129166376960414534.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.268589491017129166376960414534.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.225227615446398900698431118292.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.225227615446398900698431118292.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.135657246677982059395844827629.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.135657246677982059395844827629.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.205852555362702089950453265567.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.205852555362702089950453265567.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.125356649712550043958727288500.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.125356649712550043958727288500.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.244204120220889433826451158706.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244204120220889433826451158706.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.148447286464082095534651426689.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.148447286464082095534651426689.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.313283554967554803238484128406.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313283554967554803238484128406.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.254254303842550572473665729969.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254254303842550572473665729969.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.324567010179873305471925391582.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.324567010179873305471925391582.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.614147706162329660656328811671.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.614147706162329660656328811671.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.215086589927307766627151367533.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.215086589927307766627151367533.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.202187810895588720702176009630.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202187810895588720702176009630.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.177785764461425908755977367558.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177785764461425908755977367558.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.244447966386688625240438849169.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244447966386688625240438849169.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.398955972049286139436103068984.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.398955972049286139436103068984.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.324290109423920971676288828329.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.324290109423920971676288828329.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.822128649427327893802314908658.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.822128649427327893802314908658.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.199670099218798685977406484591.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199670099218798685977406484591.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.204303454658845815034433453512.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.204303454658845815034433453512.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.272190966764020277652079081128.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272190966764020277652079081128.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.961063442349005937536597225349.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.961063442349005937536597225349.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.277662902666135640561346462196.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.277662902666135640561346462196.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.314519596680450457855054746285.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.314519596680450457855054746285.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.202476538079060560282495099956.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202476538079060560282495099956.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.975426625618184773401026809852.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.975426625618184773401026809852.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.252697338970999211181671881792.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252697338970999211181671881792.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.603166427542096384265514998412.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.603166427542096384265514998412.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.106164978370116976238911317774.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106164978370116976238911317774.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.229664630348267553620068691756.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229664630348267553620068691756.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.292057261351416339496913597985.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292057261351416339496913597985.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.219349715895470349269596532320.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219349715895470349269596532320.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.160216916075817913953530562493.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.160216916075817913953530562493.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.269075535958871753309238331179.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.269075535958871753309238331179.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.167919147233131417984739058859.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.167919147233131417984739058859.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.210426531621179400035178209430.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.210426531621179400035178209430.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.154703816225841204080664115280.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.154703816225841204080664115280.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.208511362832825683639135205368.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.208511362832825683639135205368.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.969607480572818589276327766720.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.969607480572818589276327766720.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.145474881373882284343459153872.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.145474881373882284343459153872.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.275007193025729362844652516689.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275007193025729362844652516689.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.179730018513720561213088132029.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179730018513720561213088132029.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.244681063194071446501270815660.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244681063194071446501270815660.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.367204840301639918160517361062.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.367204840301639918160517361062.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.300246184547502297539521283806.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300246184547502297539521283806.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.336225579776978874775723463327.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.336225579776978874775723463327.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.710845873679853791427022019413.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.710845873679853791427022019413.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.171667800241622018839592854574.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.171667800241622018839592854574.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.970428941353693253759289796610.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.970428941353693253759289796610.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.202464973819273687476049035824.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202464973819273687476049035824.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.293593766328917170359373773080.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.293593766328917170359373773080.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.264090899378396711987322794314.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.264090899378396711987322794314.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.965620538050807352935663552285.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.965620538050807352935663552285.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.162845309248822193437735868939.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162845309248822193437735868939.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.100620385482151095585000946543.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100620385482151095585000946543.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.127965161564033605177803085629.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.127965161564033605177803085629.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.619372068417051974713149104919.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.619372068417051974713149104919.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.625270601160880745954773142570.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.625270601160880745954773142570.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.274052674198758621258447180130.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.274052674198758621258447180130.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.321465552859463184018938648244.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.321465552859463184018938648244.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.126631670596873065041988320084.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.126631670596873065041988320084.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.319066480138812986026181758474.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.319066480138812986026181758474.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.197063290812663596858124411210.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.197063290812663596858124411210.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.121391737347333465796214915391.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121391737347333465796214915391.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.177985905159808659201278495182.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177985905159808659201278495182.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.149463915556499304732434215056.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.149463915556499304732434215056.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.192419869605596446455526220766.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.192419869605596446455526220766.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.177685820605315926524514718990.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177685820605315926524514718990.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.481278873893653517789960724156.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.481278873893653517789960724156.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.842317928015463083368074520378.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.842317928015463083368074520378.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.123697637451437522065941162930.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.123697637451437522065941162930.nii.gz"}, {"image": "subset_3/1.3.6.1.4.1.14519.5.2.1.6279.6001.850739282072340578344345230132.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.850739282072340578344345230132.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.265775376735520890308424143898.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265775376735520890308424143898.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.161855583909753609742728521805.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161855583909753609742728521805.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.177252583002664900748714851615.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177252583002664900748714851615.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.100530488926682752765845212286.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100530488926682752765845212286.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.333319057944372470283038483725.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.333319057944372470283038483725.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.185226274332527104841463955058.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.185226274332527104841463955058.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.307835307280028057486413359377.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.307835307280028057486413359377.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.141430002307216644912805017227.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141430002307216644912805017227.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.141511313712034597336182402384.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141511313712034597336182402384.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.339546614783708685476232944897.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339546614783708685476232944897.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.259123825760999546551970425757.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259123825760999546551970425757.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.148229375703208214308676934766.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.148229375703208214308676934766.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.312704771348460502013249647868.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.312704771348460502013249647868.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.885168397833922082085837240429.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.885168397833922082085837240429.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.218476624578721885561483687176.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.218476624578721885561483687176.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.304676828064484590312919543151.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.304676828064484590312919543151.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.300146276266881736689307479986.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300146276266881736689307479986.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.146987333806092287055399155268.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.146987333806092287055399155268.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.780558315515979171413904604168.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.780558315515979171413904604168.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.164988920331211858091402361989.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.164988920331211858091402361989.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.216252660192313507027754194207.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.216252660192313507027754194207.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.897684031374557757145405000951.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.897684031374557757145405000951.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.316393351033132458296975008261.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.316393351033132458296975008261.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.779493719385047675154892222907.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.779493719385047675154892222907.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.232011770495640253949434620907.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.232011770495640253949434620907.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.463214953282361219537913355115.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.463214953282361219537913355115.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.103115201714075993579787468219.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.103115201714075993579787468219.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.390009458146468860187238398197.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.390009458146468860187238398197.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.248357157975955379661896491341.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.248357157975955379661896491341.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.631047517458234322522264161877.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.631047517458234322522264161877.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.265960756233787099041040311282.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265960756233787099041040311282.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.811825890493256320617655474043.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.811825890493256320617655474043.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.178680586845223339579041794709.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.178680586845223339579041794709.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.428038562098395445838061018440.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.428038562098395445838061018440.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.797637294244261543517154417124.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.797637294244261543517154417124.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.670107649586205629860363487713.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.670107649586205629860363487713.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.244590453955380448651329424024.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244590453955380448651329424024.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.230416590143922549745658357505.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.230416590143922549745658357505.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.385151742584074711135621089321.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.385151742584074711135621089321.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.211051626197585058967163339846.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.211051626197585058967163339846.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.156821379677057223126714881626.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.156821379677057223126714881626.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.200837896655745926888305239398.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200837896655745926888305239398.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.584871944187559733312703328980.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.584871944187559733312703328980.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.390513733720659266816639651938.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.390513733720659266816639651938.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.741709061958490690246385302477.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.741709061958490690246385302477.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.330425234131526435132846006585.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.330425234131526435132846006585.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.303865116731361029078599241306.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.303865116731361029078599241306.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.214800939017429618305208626314.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.214800939017429618305208626314.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.209269973797560820442292189762.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.209269973797560820442292189762.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.320967206808467952819309001585.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.320967206808467952819309001585.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.114195693932194925962391697338.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.114195693932194925962391697338.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.119806527488108718706404165837.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.119806527488108718706404165837.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.438308540025607517017949816111.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.438308540025607517017949816111.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.276710697414087561012670296643.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.276710697414087561012670296643.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.826829446346820089862659555750.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.826829446346820089862659555750.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.173101104804533997398137418032.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.173101104804533997398137418032.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.211071908915618528829547301883.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.211071908915618528829547301883.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.799582546798528864710752164515.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.799582546798528864710752164515.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.268992195564407418480563388746.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.268992195564407418480563388746.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.203179378754043776171267611064.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.203179378754043776171267611064.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.401389720232123950202941034290.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.401389720232123950202941034290.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.189483585244687808087477024767.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.189483585244687808087477024767.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.107351566259572521472765997306.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.107351566259572521472765997306.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.270215889102603268207599305185.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270215889102603268207599305185.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.242761658169703141430370511586.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.242761658169703141430370511586.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.163931625580639955914619627409.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.163931625580639955914619627409.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.287560874054243719452635194040.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.287560874054243719452635194040.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.252358625003143649770119512644.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252358625003143649770119512644.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.125067060506283419853742462394.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.125067060506283419853742462394.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.205993750485568250373835565680.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.205993750485568250373835565680.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.419601611032172899567156073142.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.419601611032172899567156073142.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.115386642382564804180764325545.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.115386642382564804180764325545.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.104780906131535625872840889059.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.104780906131535625872840889059.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.202643836890896697853521610450.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202643836890896697853521610450.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.329326052298830421573852261436.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.329326052298830421573852261436.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.228511122591230092662900221600.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.228511122591230092662900221600.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.320111824803959660037459294083.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.320111824803959660037459294083.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.204802250386343794613980417281.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.204802250386343794613980417281.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.232058316950007760548968840196.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.232058316950007760548968840196.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.198698492013538481395497694975.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.198698492013538481395497694975.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.122914038048856168343065566972.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.122914038048856168343065566972.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.449254134266555649028108149727.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.449254134266555649028108149727.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.337845202462615014431060697507.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.337845202462615014431060697507.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.304700823314998198591652152637.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.304700823314998198591652152637.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.261678072503577216586082745513.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.261678072503577216586082745513.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.910435939545691201820711078950.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.910435939545691201820711078950.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.211956804948320236390242845468.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.211956804948320236390242845468.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.145283812746259413053188838096.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.145283812746259413053188838096.nii.gz"}, {"image": "subset_4/1.3.6.1.4.1.14519.5.2.1.6279.6001.142154819868944114554521645782.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.142154819868944114554521645782.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.338114620394879648539943280992.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.338114620394879648539943280992.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.245349763807614756148761326488.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.245349763807614756148761326488.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.309955814083231537823157605135.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.309955814083231537823157605135.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.308655308958459380153492314021.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.308655308958459380153492314021.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.188484197846284733942365679565.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188484197846284733942365679565.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.174935793360491516757154875981.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174935793360491516757154875981.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.339882192295517122002429068974.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339882192295517122002429068974.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.285926554490515269336267972830.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.285926554490515269336267972830.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.205523326998654833765855998037.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.205523326998654833765855998037.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.246589849815292078281051154201.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.246589849815292078281051154201.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.234400932423244218697302970157.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.234400932423244218697302970157.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.133132722052053001903031735878.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.133132722052053001903031735878.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.101228986346984399347858840086.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.101228986346984399347858840086.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.160124400349792614505500125883.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.160124400349792614505500125883.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.258220324170977900491673635112.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.258220324170977900491673635112.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.504845428620607044098514803031.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.504845428620607044098514803031.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.132817748896065918417924920957.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.132817748896065918417924920957.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.201890795870532056891161597218.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.201890795870532056891161597218.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.160586340600816116143631200450.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.160586340600816116143631200450.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.750792629100457382099842515038.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.750792629100457382099842515038.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.219428004988664846407984058588.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219428004988664846407984058588.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.138894439026794145866157853158.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138894439026794145866157853158.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.244442540088515471945035689377.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.244442540088515471945035689377.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.229171189693734694696158152904.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229171189693734694696158152904.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.627998298349675613581885874395.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.627998298349675613581885874395.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.439153572396640163898529626096.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.439153572396640163898529626096.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.255999614855292116767517149228.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.255999614855292116767517149228.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.275986221854423197884953496664.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275986221854423197884953496664.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.273525289046256012743471155680.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.273525289046256012743471155680.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.328695385904874796172316226975.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.328695385904874796172316226975.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.334184846571549530235084187602.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334184846571549530235084187602.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.176638348958425792989125209419.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.176638348958425792989125209419.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.143782059748737055784173697516.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143782059748737055784173697516.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.110678335949765929063942738609.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.110678335949765929063942738609.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.100332161840553388986847034053.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100332161840553388986847034053.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.106419850406056634877579573537.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106419850406056634877579573537.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.178391668569567816549737454720.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.178391668569567816549737454720.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.112740418331256326754121315800.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.112740418331256326754121315800.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.980362852713685276785310240144.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.980362852713685276785310240144.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.323408652979949774528873200770.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323408652979949774528873200770.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.323899724653546164058849558431.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323899724653546164058849558431.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.138904664700896606480369521124.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138904664700896606480369521124.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.100398138793540579077826395208.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100398138793540579077826395208.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.219281726101239572270900838145.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219281726101239572270900838145.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.120196332569034738680965284519.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.120196332569034738680965284519.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.245391706475696258069508046497.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.245391706475696258069508046497.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.866845763956586959109892274084.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.866845763956586959109892274084.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.200841000324240313648595016964.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200841000324240313648595016964.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.290135156874098366424871975734.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.290135156874098366424871975734.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.129567032250534530765928856531.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129567032250534530765928856531.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.111780708132595903430640048766.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111780708132595903430640048766.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.338875090785618956575597613546.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.338875090785618956575597613546.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.188265424231150847356515802868.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188265424231150847356515802868.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.553241901808946577644850294647.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.553241901808946577644850294647.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.174907798609768549012640380786.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174907798609768549012640380786.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.246225645401227472829175288633.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.246225645401227472829175288633.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.394470743585708729682444806008.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.394470743585708729682444806008.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.299476369290630280560355838785.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.299476369290630280560355838785.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.238855414831158993232534884296.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.238855414831158993232534884296.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.745109871503276594185453478952.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.745109871503276594185453478952.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.725023183844147505748475581290.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.725023183844147505748475581290.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.910757789941076242457816491305.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.910757789941076242457816491305.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.262736997975960398949912434623.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.262736997975960398949912434623.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.257840703452266097926250569223.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.257840703452266097926250569223.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.693480911433291675609148051914.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.693480911433291675609148051914.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.296738183013079390785739615169.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.296738183013079390785739615169.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.154837327827713479309898027966.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.154837327827713479309898027966.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.596908385953413160131451426904.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.596908385953413160131451426904.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.224465398054769500989828256685.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.224465398054769500989828256685.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.143410010885830403003179808334.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143410010885830403003179808334.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.275755514659958628040305922764.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275755514659958628040305922764.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.162351539386551708034407968929.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.162351539386551708034407968929.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.986011151772797848993829243183.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.986011151772797848993829243183.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.118140393257625250121502185026.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.118140393257625250121502185026.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.134638281277099121660656324702.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.134638281277099121660656324702.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.842980983137518332429408284002.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.842980983137518332429408284002.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.291539125579672469833850180824.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.291539125579672469833850180824.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.183056151780567460322586876100.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183056151780567460322586876100.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.190144948425835566841437565646.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.190144948425835566841437565646.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.194246472548954252250399902051.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194246472548954252250399902051.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.140239815496047437552471323962.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.140239815496047437552471323962.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.338104567770715523699587505022.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.338104567770715523699587505022.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.413896555982844732694353377538.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.413896555982844732694353377538.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.301582691063019848479942618641.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.301582691063019848479942618641.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.205615524269596458818376243313.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.205615524269596458818376243313.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.238042459915048190592571019348.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.238042459915048190592571019348.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.152706273988004688708784163325.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.152706273988004688708784163325.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.397202838387416555106806022938.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.397202838387416555106806022938.nii.gz"}, {"image": "subset_5/1.3.6.1.4.1.14519.5.2.1.6279.6001.111258527162678142285870245028.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111258527162678142285870245028.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.297251044869095073091780740645.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.297251044869095073091780740645.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.306558074682524259000586270818.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306558074682524259000586270818.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.249450003033735700817635168066.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249450003033735700817635168066.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.803987517543436570820681016103.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.803987517543436570820681016103.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.266009527139315622265711325223.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.266009527139315622265711325223.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.167237290696350215427953159586.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.167237290696350215427953159586.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.240630002689062442926543993263.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.240630002689062442926543993263.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.309955999522338651429118207446.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.309955999522338651429118207446.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.168737928729363683423228050295.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168737928729363683423228050295.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.161067514225109999586362698069.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161067514225109999586362698069.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.321935195060268166151738328001.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.321935195060268166151738328001.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.275849601663847251574860892603.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.275849601663847251574860892603.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.475325201787910087416720919680.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.475325201787910087416720919680.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.338447145504282422142824032832.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.338447145504282422142824032832.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.254138388912084634057282064266.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254138388912084634057282064266.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.119304665257760307862874140576.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.119304665257760307862874140576.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.282779922503707013097174625409.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.282779922503707013097174625409.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.123654356399290048011621921476.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.123654356399290048011621921476.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.235217371152464582553341729176.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.235217371152464582553341729176.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.226564372605239604660221582288.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226564372605239604660221582288.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.877026508860018521147620598474.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.877026508860018521147620598474.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.658611160253017715059194304729.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.658611160253017715059194304729.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.251215764736737018371915284679.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.251215764736737018371915284679.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.171682845383273105440297561095.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.171682845383273105440297561095.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.307921770358136677021532761235.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.307921770358136677021532761235.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.173556680294801532247454313511.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.173556680294801532247454313511.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.513023675145166449943177283490.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.513023675145166449943177283490.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.245248446973732759194067808002.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.245248446973732759194067808002.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.725236073737175770730904408416.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.725236073737175770730904408416.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.130765375502800983459674173881.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.130765375502800983459674173881.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.955688628308192728558382581802.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.955688628308192728558382581802.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.465032801496479029639448332481.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.465032801496479029639448332481.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.669435869708883155232318480131.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.669435869708883155232318480131.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.149893110752986700464921264055.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.149893110752986700464921264055.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.316911475886263032009840828684.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.316911475886263032009840828684.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.176030616406569931557298712518.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.176030616406569931557298712518.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.308153138776443962077214577161.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.308153138776443962077214577161.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.463588161905537526756964393219.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.463588161905537526756964393219.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.170921541362033046216100409521.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.170921541362033046216100409521.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.241717018262666382493757419144.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.241717018262666382493757419144.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.329404588567903628160652715124.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.329404588567903628160652715124.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.170052181746004939527661217512.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.170052181746004939527661217512.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.252814707117018427472206147014.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252814707117018427472206147014.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.948414623428298219623354433437.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.948414623428298219623354433437.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.404457313935200882843898832756.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.404457313935200882843898832756.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.129007566048223160327836686225.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129007566048223160327836686225.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.315770913282450940389971401304.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.315770913282450940389971401304.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.652347820272212119124022644822.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.652347820272212119124022644822.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.221017801605543296514746423389.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.221017801605543296514746423389.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.119515474430718803379832249911.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.119515474430718803379832249911.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.182798854785392200340436516930.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.182798854785392200340436516930.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.618434772073433276874225174904.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.618434772073433276874225174904.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.330544495001617450666819906758.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.330544495001617450666819906758.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.164790817284381538042494285101.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.164790817284381538042494285101.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.120842785645314664964010792308.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.120842785645314664964010792308.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.414288023902112119945238126594.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.414288023902112119945238126594.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.292194861362266467652267941663.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292194861362266467652267941663.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.147325126373007278009743173696.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.147325126373007278009743173696.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.329624439086643515259182406526.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.329624439086643515259182406526.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.313756547848086902190878548835.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313756547848086902190878548835.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.270951128717816232360812849541.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270951128717816232360812849541.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.277452631455527999380186898011.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.277452631455527999380186898011.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.247816269490470394602288565775.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.247816269490470394602288565775.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.168985655485163461062675655739.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168985655485163461062675655739.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.233652865358649579816568545171.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.233652865358649579816568545171.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.150684298696437181894923266019.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.150684298696437181894923266019.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.117040183261056772902616195387.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.117040183261056772902616195387.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.190937805243443708408459490152.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.190937805243443708408459490152.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.569096986145782511000054443951.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.569096986145782511000054443951.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.316900421002460665752357657094.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.316900421002460665752357657094.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.200725988589959521302320481687.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200725988589959521302320481687.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.286217539434358186648717203667.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286217539434358186648717203667.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.153646219551578201092527860224.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153646219551578201092527860224.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.315187221221054114974341475212.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.315187221221054114974341475212.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.156016499715048493339281864474.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.156016499715048493339281864474.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.151764021165118974848436095034.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.151764021165118974848436095034.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.106630482085576298661469304872.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106630482085576298661469304872.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.561423049201987049884663740668.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.561423049201987049884663740668.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.107109359065300889765026303943.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.107109359065300889765026303943.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.341557859428950960906150406596.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.341557859428950960906150406596.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.254473943359963613733707320244.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254473943359963613733707320244.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.119209873306155771318545953948.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.119209873306155771318545953948.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.233433352108264931671753343044.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.233433352108264931671753343044.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.174168737938619557573021395302.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174168737938619557573021395302.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.136830368929967292376608088362.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.136830368929967292376608088362.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.255409701134762680010928250229.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.255409701134762680010928250229.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.290410217650314119074833254861.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.290410217650314119074833254861.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.416701701108520592702405866796.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.416701701108520592702405866796.nii.gz"}, {"image": "subset_6/1.3.6.1.4.1.14519.5.2.1.6279.6001.177888806135892723698313903329.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177888806135892723698313903329.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.116097642684124305074876564522.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.116097642684124305074876564522.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.900182736599353600185270496549.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.900182736599353600185270496549.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.908250781706513856628130123235.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.908250781706513856628130123235.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.270788655216695628640355888562.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.270788655216695628640355888562.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.223650122819238796121876338881.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.223650122819238796121876338881.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.122621219961396951727742490470.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.122621219961396951727742490470.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.143813757344903170810482790787.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143813757344903170810482790787.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.248425363469507808613979846863.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.248425363469507808613979846863.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.328789598898469177563438457842.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.328789598898469177563438457842.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.105495028985881418176186711228.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.105495028985881418176186711228.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.550599855064600241623943717588.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.550599855064600241623943717588.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.994459772950022352718462251777.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.994459772950022352718462251777.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.187803155574314810830688534991.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187803155574314810830688534991.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.238019241099704094018548301753.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.238019241099704094018548301753.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.111496024928645603833332252962.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111496024928645603833332252962.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.116703382344406837243058680403.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.116703382344406837243058680403.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.227707494413800460340110762069.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227707494413800460340110762069.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.219618492426142913407827034169.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219618492426142913407827034169.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.447468612991222399440694673357.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.447468612991222399440694673357.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.163217526257871051722166468085.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.163217526257871051722166468085.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.230675342744370103160629638194.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.230675342744370103160629638194.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.297433269262659217151107535012.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.297433269262659217151107535012.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.139889514693390832525232698200.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139889514693390832525232698200.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.249404938669582150398726875826.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249404938669582150398726875826.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.141149610914910880857802344415.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141149610914910880857802344415.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.106379658920626694402549886949.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.106379658920626694402549886949.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.125124219978170516876304987559.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.125124219978170516876304987559.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.113679818447732724990336702075.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.113679818447732724990336702075.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.295462530340364058116953738925.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.295462530340364058116953738925.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.130036599816889919308975074972.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.130036599816889919308975074972.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.271307051432838466826189754230.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.271307051432838466826189754230.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.276351267409869539593937734609.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.276351267409869539593937734609.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.199282854229880908602362094937.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199282854229880908602362094937.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.307946352302138765071461362398.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.307946352302138765071461362398.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.242624386080831911167122628616.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.242624386080831911167122628616.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.613212850444255764524630781782.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.613212850444255764524630781782.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.233001470265230594739708503198.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.233001470265230594739708503198.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.944888107209008719031293531091.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.944888107209008719031293531091.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.114249388265341701207347458535.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.114249388265341701207347458535.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.483655032093002252444764787700.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.483655032093002252444764787700.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.897279226481700053115245043064.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.897279226481700053115245043064.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.362762275895885013176610377950.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.362762275895885013176610377950.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.339039410276356623209709113755.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339039410276356623209709113755.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.266581250778073944645044950856.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.266581250778073944645044950856.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.264251211689085893915477907261.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.264251211689085893915477907261.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.957384617596077920906744920611.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.957384617596077920906744920611.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.214252223927572015414741039150.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.214252223927572015414741039150.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.664989286137882319237192185951.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.664989286137882319237192185951.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.179209990684978588019929720099.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179209990684978588019929720099.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.332829333783605240302521201463.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.332829333783605240302521201463.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.686193079844756926365065559979.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.686193079844756926365065559979.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.323535944958374186208096541480.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323535944958374186208096541480.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.171177995014336749670107905732.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.171177995014336749670107905732.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.336198008634390022174744544656.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.336198008634390022174744544656.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.294120933998772507043263238704.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.294120933998772507043263238704.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.252709517998555732486024866345.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.252709517998555732486024866345.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.215640837032688688030770057224.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.215640837032688688030770057224.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.837810280808122125183730411210.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.837810280808122125183730411210.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.170825539570536865106681134236.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.170825539570536865106681134236.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.261700367741314729940340271960.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.261700367741314729940340271960.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.749871569713868632259874663577.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.749871569713868632259874663577.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.315918264676377418120578391325.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.315918264676377418120578391325.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.167500254299688235071950909530.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.167500254299688235071950909530.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.280125803152924778388346920341.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.280125803152924778388346920341.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.256542095129414948017808425649.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.256542095129414948017808425649.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.272348349298439120568330857680.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272348349298439120568330857680.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.129982010889624423230394257528.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129982010889624423230394257528.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.314836406260772370397541392345.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.314836406260772370397541392345.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.195913706607582347421429908613.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.195913706607582347421429908613.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.228934821089041845791238006047.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.228934821089041845791238006047.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.279300249795483097365868125932.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.279300249795483097365868125932.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.664409965623578819357819577077.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.664409965623578819357819577077.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.185154482385982570363528682299.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.185154482385982570363528682299.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.159665703190517688573100822213.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.159665703190517688573100822213.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.248360766706804179966476685510.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.248360766706804179966476685510.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.771831598853841017505646275338.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.771831598853841017505646275338.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.140253591510022414496468423138.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.140253591510022414496468423138.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.592821488053137951302246128864.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.592821488053137951302246128864.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.151669338315069779994664893123.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.151669338315069779994664893123.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.246758220302211646532176593724.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.246758220302211646532176593724.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.131150737314367975651717513386.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.131150737314367975651717513386.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.245546033414728092794968890929.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.245546033414728092794968890929.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.174692377730646477496286081479.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174692377730646477496286081479.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.279953669991076107785464313394.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.279953669991076107785464313394.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.249032660919473722154870746474.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249032660919473722154870746474.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.306112617218006614029386065035.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306112617218006614029386065035.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.226889213794065160713547677129.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226889213794065160713547677129.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.303066851236267189733420290986.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.303066851236267189733420290986.nii.gz"}, {"image": "subset_7/1.3.6.1.4.1.14519.5.2.1.6279.6001.323426705628838942177546503237.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323426705628838942177546503237.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.339484970190920330170416228517.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.339484970190920330170416228517.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.776800177074349870648765614630.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.776800177074349870648765614630.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.257515388956260258681136624817.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.257515388956260258681136624817.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.246178337114401749164850220976.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.246178337114401749164850220976.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.724562063158320418413995627171.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.724562063158320418413995627171.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.219254430927834326484477690403.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219254430927834326484477690403.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.225515255547637437801620523312.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.225515255547637437801620523312.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.226152078193253087875725735761.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.226152078193253087875725735761.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.175318131822744218104175746898.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.175318131822744218104175746898.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.283878926524838648426928238498.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.283878926524838648426928238498.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.292049618819567427252971059233.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292049618819567427252971059233.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.206028343897359374907954580114.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.206028343897359374907954580114.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.397522780537301776672854630421.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.397522780537301776672854630421.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.882070241245008756731854510592.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.882070241245008756731854510592.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.300136985030081433029390459071.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300136985030081433029390459071.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.286422846896797433168187085942.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286422846896797433168187085942.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.272123398257168239653655006815.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272123398257168239653655006815.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.675543413149938600000570588203.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.675543413149938600000570588203.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.296066944953051278419805374238.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.296066944953051278419805374238.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.204287915902811325371247860532.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.204287915902811325371247860532.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.792381786708289670758399079830.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.792381786708289670758399079830.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.206097113343059612247503064658.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.206097113343059612247503064658.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.254957696184671649675053562027.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254957696184671649675053562027.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.301462380687644451483231621986.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.301462380687644451483231621986.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.204566802718283633558802774757.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.204566802718283633558802774757.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.108193664222196923321844991231.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.108193664222196923321844991231.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.486999111981013268988489262668.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.486999111981013268988489262668.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.249314567767437206995861966896.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249314567767437206995861966896.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.487268565754493433372433148666.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.487268565754493433372433148666.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.236698827306171960683086245994.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.236698827306171960683086245994.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.153181766344026020914478182395.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153181766344026020914478182395.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.203425588524695836343069893813.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.203425588524695836343069893813.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.194766721609772924944646251928.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194766721609772924944646251928.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.198016798894102791158686961192.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.198016798894102791158686961192.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.213854687290736562463866711534.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.213854687290736562463866711534.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.305887072264491016857673607285.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.305887072264491016857673607285.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.671278273674156798801285503514.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.671278273674156798801285503514.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.168833925301530155818375859047.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.168833925301530155818375859047.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.306520140119968755187868602181.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306520140119968755187868602181.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.188059920088313909273628445208.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188059920088313909273628445208.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.701514276942509393419164159551.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.701514276942509393419164159551.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.297964221542942838344351735414.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.297964221542942838344351735414.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.939216568327879462530496768794.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.939216568327879462530496768794.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.622243923620914676263059698181.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.622243923620914676263059698181.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.299806338046301317870803017534.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.299806338046301317870803017534.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.373433682859788429397781158572.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.373433682859788429397781158572.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.766881513533845439335142582269.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.766881513533845439335142582269.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.297988578825170426663869669862.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.297988578825170426663869669862.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.137773550852881583165286615668.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.137773550852881583165286615668.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.518487185634324801733841260431.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.518487185634324801733841260431.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.286627485198831346082954437212.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286627485198831346082954437212.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.143622857676008763729469324839.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.143622857676008763729469324839.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.997611074084993415992563148335.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.997611074084993415992563148335.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.317613170669207528926259976488.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.317613170669207528926259976488.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.200513183558872708878454294671.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.200513183558872708878454294671.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.239358021703233250639913775427.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.239358021703233250639913775427.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.161633200801003804714818844696.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161633200801003804714818844696.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.257383535269991165447822992959.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.257383535269991165447822992959.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.100225287222365663678666836860.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.100225287222365663678666836860.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.259124675432205040899951626253.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259124675432205040899951626253.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.131939324905446238286154504249.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.131939324905446238286154504249.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.336102335330125765000317290445.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.336102335330125765000317290445.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.241083615484551649610616348856.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.241083615484551649610616348856.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.183982839679953938397312236359.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.183982839679953938397312236359.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.149041668385192796520281592139.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.149041668385192796520281592139.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.608029415915051219877530734559.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.608029415915051219877530734559.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.288701997968615460794642979503.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.288701997968615460794642979503.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.271220641987745483198036913951.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.271220641987745483198036913951.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.931383239747372227838946053237.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.931383239747372227838946053237.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.478062284228419671253422844986.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.478062284228419671253422844986.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.346115813056769250958550383763.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.346115813056769250958550383763.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.302403227435841351528721627052.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.302403227435841351528721627052.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.774060103415303828812229821954.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.774060103415303828812229821954.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.324649110927013926557500550446.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.324649110927013926557500550446.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.888615810685807330497715730842.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.888615810685807330497715730842.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.207341668080525761926965850679.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.207341668080525761926965850679.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.177086402277715068525592995222.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.177086402277715068525592995222.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.262873069163227096134627700599.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.262873069163227096134627700599.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.897161587681142256575045076919.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.897161587681142256575045076919.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.161821150841552408667852639317.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.161821150841552408667852639317.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.247060297988514823071467295949.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.247060297988514823071467295949.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.454273545863197752384437758130.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.454273545863197752384437758130.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.202283133206014258077705539227.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202283133206014258077705539227.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.253317247142837717905329340520.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.253317247142837717905329340520.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.153732973534937692357111055819.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153732973534937692357111055819.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.179943248049071805421192715219.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179943248049071805421192715219.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.172243743899615313644757844726.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.172243743899615313644757844726.nii.gz"}, {"image": "subset_8/1.3.6.1.4.1.14519.5.2.1.6279.6001.814122498113547115932318256859.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.814122498113547115932318256859.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.215785045378334625097907422785.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.215785045378334625097907422785.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.195557219224169985110295082004.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.195557219224169985110295082004.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.199261544234308780356714831537.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.199261544234308780356714831537.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.187694838527128312070807533473.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187694838527128312070807533473.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.387954549120924524005910602207.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.387954549120924524005910602207.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.124822907934319930841506266464.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.124822907934319930841506266464.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.176869045992276345870480098568.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.176869045992276345870480098568.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.292994770358625142596171316474.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292994770358625142596171316474.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.765459236550358748053283544075.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.765459236550358748053283544075.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.771741891125176943862272696845.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.771741891125176943862272696845.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.217754016294471278921686508169.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.217754016294471278921686508169.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.182192086929819295877506541021.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.182192086929819295877506541021.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.392861216720727557882279374324.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.392861216720727557882279374324.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.193721075067404532739943086458.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.193721075067404532739943086458.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.250481236093201801255751845296.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.250481236093201801255751845296.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.174449669706458092793093760291.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.174449669706458092793093760291.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.237915456403882324748189195892.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.237915456403882324748189195892.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.309901913847714156367981722205.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.309901913847714156367981722205.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.330043769832606379655473292782.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.330043769832606379655473292782.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.229860476925100292554329427970.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229860476925100292554329427970.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.272344603176687884771013620823.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272344603176687884771013620823.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.334166493392278943610545989413.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.334166493392278943610545989413.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.121108220866971173712229588402.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121108220866971173712229588402.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.268030488196493755113553009785.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.268030488196493755113553009785.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.436403998650924660479049012235.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.436403998650924660479049012235.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.311476128731958142981941696518.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.311476128731958142981941696518.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.212608679077007918190529579976.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.212608679077007918190529579976.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.300392272203629213913702120739.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300392272203629213913702120739.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.254929810944557499537650429296.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.254929810944557499537650429296.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.194632613233275988184244485809.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.194632613233275988184244485809.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.222052723822248889877676736332.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.222052723822248889877676736332.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.153985109349433321657655488650.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.153985109349433321657655488650.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.558286136379689377915919180358.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.558286136379689377915919180358.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.139577698050713461261415990027.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139577698050713461261415990027.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.229960820686439513664996214638.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229960820686439513664996214638.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.124656777236468248920498636247.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.124656777236468248920498636247.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.229096941293122177107846044795.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.229096941293122177107846044795.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.765930210026773090100532964804.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.765930210026773090100532964804.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.145510611155363050427743946446.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.145510611155363050427743946446.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.170706757615202213033480003264.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.170706757615202213033480003264.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.927394449308471452920270961822.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.927394449308471452920270961822.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.114914167428485563471327801935.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.114914167428485563471327801935.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.337005960787660957389988207064.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.337005960787660957389988207064.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.291156498203266896953765649282.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.291156498203266896953765649282.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.440226700369921575481834344455.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.440226700369921575481834344455.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.167661207884826429102690781600.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.167661207884826429102690781600.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.237215747217294006286437405216.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.237215747217294006286437405216.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.265780642925621389994857727416.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265780642925621389994857727416.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.712472578497712558367294720243.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.712472578497712558367294720243.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.340158437895922179455019686521.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.340158437895922179455019686521.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.109882169963817627559804568094.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.109882169963817627559804568094.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.328944769569002417592093467626.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.328944769569002417592093467626.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.141345499716190654505508410197.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.141345499716190654505508410197.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.286061375572911414226912429210.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.286061375572911414226912429210.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.603126300703296693942875967838.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.603126300703296693942875967838.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.148935306123327835217659769212.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.148935306123327835217659769212.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.112767175295249119452142211437.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.112767175295249119452142211437.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.300693623747082239407271583452.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300693623747082239407271583452.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.340012777775661021262977442176.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.340012777775661021262977442176.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.196251645377731223510086726530.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.196251645377731223510086726530.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.259453428008507791234730686014.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.259453428008507791234730686014.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.220596530836092324070084384692.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.220596530836092324070084384692.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.855232435861303786204450738044.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.855232435861303786204450738044.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.230491296081537726468075344411.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.230491296081537726468075344411.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.215104063467523905369326175410.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.215104063467523905369326175410.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.946129570505893110165820050204.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.946129570505893110165820050204.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.747803439040091794717626507402.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.747803439040091794717626507402.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.150097650621090951325113116280.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.150097650621090951325113116280.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.129650136453746261130135157590.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129650136453746261130135157590.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.733642690503782454656013446707.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.733642690503782454656013446707.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.292576688635952269497781991202.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.292576688635952269497781991202.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.193964947698259739624715468431.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.193964947698259739624715468431.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.188619674701053082195613114069.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188619674701053082195613114069.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.134519406153127654901640638633.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.134519406153127654901640638633.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.173931884906244951746140865701.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.173931884906244951746140865701.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.121805476976020513950614465787.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.121805476976020513950614465787.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.330643702676971528301859647742.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.330643702676971528301859647742.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.306140003699110313373771452136.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.306140003699110313373771452136.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.472487466001405705666001578363.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.472487466001405705666001578363.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.188385286346390202873004762827.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188385286346390202873004762827.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.227968442353440630355230778531.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227968442353440630355230778531.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.138674679709964033277400089532.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138674679709964033277400089532.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.312127933722985204808706697221.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.312127933722985204808706697221.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.138813197521718693188313387015.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138813197521718693188313387015.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.179683407589764683292800449011.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.179683407589764683292800449011.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.300270516469599170290456821227.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.300270516469599170290456821227.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.265570697208310960298668720669.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.265570697208310960298668720669.nii.gz"}, {"image": "subset_9/1.3.6.1.4.1.14519.5.2.1.6279.6001.102681962408431413578140925249.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.102681962408431413578140925249.nii.gz"}], "validation": [{"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.227962600322799211676960828223.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.227962600322799211676960828223.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.905371958588660410240398317235.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.905371958588660410240398317235.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.564534197011295112247542153557.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.564534197011295112247542153557.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.129055977637338639741695800950.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.129055977637338639741695800950.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.219087313261026510628926082729.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219087313261026510628926082729.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.313334055029671473836954456733.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313334055029671473836954456733.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.109002525524522225658609808059.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.109002525524522225658609808059.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.219909753224298157409438012179.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.219909753224298157409438012179.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.128023902651233986592378348912.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.128023902651233986592378348912.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.188376349804761988217597754952.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188376349804761988217597754952.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.640729228179368154416184318668.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.640729228179368154416184318668.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.323302986710576400812869264321.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.323302986710576400812869264321.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.241570579760883349458693655367.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.241570579760883349458693655367.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.310626494937915759224334597176.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.310626494937915759224334597176.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.277445975068759205899107114231.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.277445975068759205899107114231.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.250863365157630276148828903732.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.250863365157630276148828903732.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.525937963993475482158828421281.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.525937963993475482158828421281.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.202811684116768680758082619196.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.202811684116768680758082619196.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.313835996725364342034830119490.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313835996725364342034830119490.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.294188507421106424248264912111.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.294188507421106424248264912111.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.187451715205085403623595258748.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.187451715205085403623595258748.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.126264578931778258890371755354.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.126264578931778258890371755354.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.511347030803753100045216493273.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.511347030803753100045216493273.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.108197895896446896160048741492.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.108197895896446896160048741492.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.250438451287314206124484591986.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.250438451287314206124484591986.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.313605260055394498989743099991.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.313605260055394498989743099991.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.272042302501586336192628818865.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.272042302501586336192628818865.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.302134342469412607966016057827.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.302134342469412607966016057827.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.724251104254976962355686318345.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.724251104254976962355686318345.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.826812708000318290301835871780.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.826812708000318290301835871780.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.249530219848512542668813996730.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.249530219848512542668813996730.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.975254950136384517744116790879.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.975254950136384517744116790879.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.310548927038333190233889983845.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.310548927038333190233889983845.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.126121460017257137098781143514.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.126121460017257137098781143514.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.144438612068946916340281098509.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.144438612068946916340281098509.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.566816709786169715745131047975.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.566816709786169715745131047975.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.657775098760536289051744981056.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.657775098760536289051744981056.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.111172165674661221381920536987.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.111172165674661221381920536987.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.269689294231892620436462818860.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.269689294231892620436462818860.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.138080888843357047811238713686.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.138080888843357047811238713686.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.105756658031515062000744821260.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.105756658031515062000744821260.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.898642529028521482602829374444.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.898642529028521482602829374444.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.146429221666426688999739595820.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.146429221666426688999739595820.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.188209889686363159853715266493.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.188209889686363159853715266493.nii.gz"}, {"image": "subset_0/1.3.6.1.4.1.14519.5.2.1.6279.6001.139713436241461669335487719526.nii.gz", "label": "seg-lungs-LUNA16/1.3.6.1.4.1.14519.5.2.1.6279.6001.139713436241461669335487719526.nii.gz"}]}
================================================
FILE: jsons/dataset_TCIAcovid19_0.json
================================================
{"training": [{"image": "data/volume-covid19-A-0269.nii.gz"}, {"image": "data/volume-covid19-A-0012.nii.gz"}, {"image": "data/volume-covid19-A-0721_day008.nii.gz"}, {"image": "data/volume-covid19-A-0154.nii.gz"}, {"image": "data/volume-covid19-A-0358.nii.gz"}, {"image": "data/volume-covid19-A-0734_day002.nii.gz"}, {"image": "data/volume-covid19-A-0304.nii.gz"}, {"image": "data/volume-covid19-A-0640.nii.gz"}, {"image": "data/volume-covid19-A-0263.nii.gz"}, {"image": "data/volume-covid19-A-0737_day000.nii.gz"}, {"image": "data/volume-covid19-A-0253.nii.gz"}, {"image": "data/volume-covid19-A-0388.nii.gz"}, {"image": "data/volume-covid19-A-0708_day049.nii.gz"}, {"image": "data/volume-covid19-A-0007.nii.gz"}, {"image": "data/volume-covid19-A-0175.nii.gz"}, {"image": "data/volume-covid19-A-0523.nii.gz"}, {"image": "data/volume-covid19-A-0138.nii.gz"}, {"image": "data/volume-covid19-A-0675.nii.gz"}, {"image": "data/volume-covid19-A-0308.nii.gz"}, {"image": "data/volume-covid19-A-0025.nii.gz"}, {"image": "data/volume-covid19-A-0318_1.nii.gz"}, {"image": "data/volume-covid19-A-0366.nii.gz"}, {"image": "data/volume-covid19-A-0282.nii.gz"}, {"image": "data/volume-covid19-A-0733_day069.nii.gz"}, {"image": "data/volume-covid19-A-0261_0.nii.gz"}, {"image": "data/volume-covid19-A-0463.nii.gz"}, {"image": "data/volume-covid19-A-0180_0.nii.gz"}, {"image": "data/volume-covid19-A-0220.nii.gz"}, {"image": "data/volume-covid19-A-0298.nii.gz"}, {"image": "data/volume-covid19-A-0573.nii.gz"}, {"image": "data/volume-covid19-A-0140.nii.gz"}, {"image": "data/volume-covid19-A-0702_day008.nii.gz"}, {"image": "data/volume-covid19-A-0648.nii.gz"}, {"image": "data/volume-covid19-A-0600.nii.gz"}, {"image": "data/volume-covid19-A-0111.nii.gz"}, {"image": "data/volume-covid19-A-0418.nii.gz"}, {"image": "data/volume-covid19-A-0084.nii.gz"}, {"image": "data/volume-covid19-A-0221.nii.gz"}, {"image": "data/volume-covid19-A-0569.nii.gz"}, {"image": "data/volume-covid19-A-0598.nii.gz"}, {"image": "data/volume-covid19-A-0421.nii.gz"}, {"image": "data/volume-covid19-A-0703_day000.nii.gz"}, {"image": "data/volume-covid19-A-0074_0.nii.gz"}, {"image": "data/volume-covid19-A-0683.nii.gz"}, {"image": "data/volume-covid19-A-0369_0.nii.gz"}, {"image": "data/volume-covid19-A-0329.nii.gz"}, {"image": "data/volume-covid19-A-0455.nii.gz"}, {"image": "data/volume-covid19-A-0720_day002.nii.gz"}, {"image": "data/volume-covid19-A-0021.nii.gz"}, {"image": "data/volume-covid19-A-0352.nii.gz"}, {"image": "data/volume-covid19-A-0454.nii.gz"}, {"image": "data/volume-covid19-A-0032.nii.gz"}, {"image": "data/volume-covid19-A-0315.nii.gz"}, {"image": "data/volume-covid19-A-0017.nii.gz"}, {"image": "data/volume-covid19-A-0123.nii.gz"}, {"image": "data/volume-covid19-A-0734_day008.nii.gz"}, {"image": "data/volume-covid19-A-0394.nii.gz"}, {"image": "data/volume-covid19-A-0125.nii.gz"}, {"image": "data/volume-covid19-A-0056.nii.gz"}, {"image": "data/volume-covid19-A-0213.nii.gz"}, {"image": "data/volume-covid19-A-0229.nii.gz"}, {"image": "data/volume-covid19-A-0737_day013.nii.gz"}, {"image": "data/volume-covid19-A-0515.nii.gz"}, {"image": "data/volume-covid19-A-0420.nii.gz"}, {"image": "data/volume-covid19-A-0417.nii.gz"}, {"image": "data/volume-covid19-A-0426.nii.gz"}, {"image": "data/volume-covid19-A-0107.nii.gz"}, {"image": "data/volume-covid19-A-0710_day002.nii.gz"}, {"image": "data/volume-covid19-A-0722_day018.nii.gz"}, {"image": "data/volume-covid19-A-0535.nii.gz"}, {"image": "data/volume-covid19-A-0005.nii.gz"}, {"image": "data/volume-covid19-A-0022.nii.gz"}, {"image": "data/volume-covid19-A-0079.nii.gz"}, {"image": "data/volume-covid19-A-0416.nii.gz"}, {"image": "data/volume-covid19-A-0458.nii.gz"}, {"image": "data/volume-covid19-A-0560.nii.gz"}, {"image": "data/volume-covid19-A-0449.nii.gz"}, {"image": "data/volume-covid19-A-0580.nii.gz"}, {"image": "data/volume-covid19-A-0737_day003.nii.gz"}, {"image": "data/volume-covid19-A-0721_day020.nii.gz"}, {"image": "data/volume-covid19-A-0545.nii.gz"}, {"image": "data/volume-covid19-A-0733_day004.nii.gz"}, {"image": "data/volume-covid19-A-0047_0.nii.gz"}, {"image": "data/volume-covid19-A-0106.nii.gz"}, {"image": "data/volume-covid19-A-0593.nii.gz"}, {"image": "data/volume-covid19-A-0715_day019.nii.gz"}, {"image": "data/volume-covid19-A-0655.nii.gz"}, {"image": "data/volume-covid19-A-0687.nii.gz"}, {"image": "data/volume-covid19-A-0692.nii.gz"}, {"image": "data/volume-covid19-A-0373.nii.gz"}, {"image": "data/volume-covid19-A-0647.nii.gz"}, {"image": "data/volume-covid19-A-0161.nii.gz"}, {"image": "data/volume-covid19-A-0002.nii.gz"}, {"image": "data/volume-covid19-A-0685.nii.gz"}, {"image": "data/volume-covid19-A-0083.nii.gz"}, {"image": "data/volume-covid19-A-0680.nii.gz"}, {"image": "data/volume-covid19-A-0349.nii.gz"}, {"image": "data/volume-covid19-A-0390.nii.gz"}, {"image": "data/volume-covid19-A-0603.nii.gz"}, {"image": "data/volume-covid19-A-0716_day008.nii.gz"}, {"image": "data/volume-covid19-A-0372.nii.gz"}, {"image": "data/volume-covid19-A-0703_day006.nii.gz"}, {"image": "data/volume-covid19-A-0433.nii.gz"}, {"image": "data/volume-covid19-A-0691.nii.gz"}, {"image": "data/volume-covid19-A-0443.nii.gz"}, {"image": "data/volume-covid19-A-0261_1.nii.gz"}, {"image": "data/volume-covid19-A-0509.nii.gz"}, {"image": "data/volume-covid19-A-0704_day006.nii.gz"}, {"image": "data/volume-covid19-A-0231.nii.gz"}, {"image": "data/volume-covid19-A-0542.nii.gz"}, {"image": "data/volume-covid19-A-0492.nii.gz"}, {"image": "data/volume-covid19-A-0717_day052.nii.gz"}, {"image": "data/volume-covid19-A-0597.nii.gz"}, {"image": "data/volume-covid19-A-0067.nii.gz"}, {"image": "data/volume-covid19-A-0666.nii.gz"}, {"image": "data/volume-covid19-A-0723_day043.nii.gz"}, {"image": "data/volume-covid19-A-0239.nii.gz"}, {"image": "data/volume-covid19-A-0048.nii.gz"}, {"image": "data/volume-covid19-A-0481.nii.gz"}, {"image": "data/volume-covid19-A-0673.nii.gz"}, {"image": "data/volume-covid19-A-0716_day000.nii.gz"}, {"image": "data/volume-covid19-A-0053.nii.gz"}, {"image": "data/volume-covid19-A-0178.nii.gz"}, {"image": "data/volume-covid19-A-0198.nii.gz"}, {"image": "data/volume-covid19-A-0428.nii.gz"}, {"image": "data/volume-covid19-A-0035.nii.gz"}, {"image": "data/volume-covid19-A-0189_0.nii.gz"}, {"image": "data/volume-covid19-A-0602.nii.gz"}, {"image": "data/volume-covid19-A-0029.nii.gz"}, {"image": "data/volume-covid19-A-0577.nii.gz"}, {"image": "data/volume-covid19-A-0638.nii.gz"}, {"image": "data/volume-covid19-A-0008.nii.gz"}, {"image": "data/volume-covid19-A-0710_day024.nii.gz"}, {"image": "data/volume-covid19-A-0391.nii.gz"}, {"image": "data/volume-covid19-A-0562.nii.gz"}, {"image": "data/volume-covid19-A-0383_0.nii.gz"}, {"image": "data/volume-covid19-A-0723_day000.nii.gz"}, {"image": "data/volume-covid19-A-0092.nii.gz"}, {"image": "data/volume-covid19-A-0196_0.nii.gz"}, {"image": "data/volume-covid19-A-0671.nii.gz"}, {"image": "data/volume-covid19-A-0697.nii.gz"}, {"image": "data/volume-covid19-A-0033.nii.gz"}, {"image": "data/volume-covid19-A-0584.nii.gz"}, {"image": "data/volume-covid19-A-0131.nii.gz"}, {"image": "data/volume-covid19-A-0468.nii.gz"}, {"image": "data/volume-covid19-A-0527.nii.gz"}, {"image": "data/volume-covid19-A-0703_day010.nii.gz"}, {"image": "data/volume-covid19-A-0558.nii.gz"}, {"image": "data/volume-covid19-A-0241.nii.gz"}, {"image": "data/volume-covid19-A-0259.nii.gz"}, {"image": "data/volume-covid19-A-0216.nii.gz"}, {"image": "data/volume-covid19-A-0612.nii.gz"}, {"image": "data/volume-covid19-A-0736_day000.nii.gz"}, {"image": "data/volume-covid19-A-0009.nii.gz"}, {"image": "data/volume-covid19-A-0710_day007.nii.gz"}, {"image": "data/volume-covid19-A-0088.nii.gz"}, {"image": "data/volume-covid19-A-0581.nii.gz"}, {"image": "data/volume-covid19-A-0393.nii.gz"}, {"image": "data/volume-covid19-A-0554.nii.gz"}, {"image": "data/volume-covid19-A-0736_day010.nii.gz"}, {"image": "data/volume-covid19-A-0335.nii.gz"}, {"image": "data/volume-covid19-A-0483.nii.gz"}, {"image": "data/volume-covid19-A-0473.nii.gz"}, {"image": "data/volume-covid19-A-0401.nii.gz"}, {"image": "data/volume-covid19-A-0502.nii.gz"}, {"image": "data/volume-covid19-A-0714_day006.nii.gz"}, {"image": "data/volume-covid19-A-0467.nii.gz"}, {"image": "data/volume-covid19-A-0361.nii.gz"}, {"image": "data/volume-covid19-A-0028_1.nii.gz"}, {"image": "data/volume-covid19-A-0232.nii.gz"}, {"image": "data/volume-covid19-A-0202_1.nii.gz"}, {"image": "data/volume-covid19-A-0363.nii.gz"}, {"image": "data/volume-covid19-A-0500.nii.gz"}, {"image": "data/volume-covid19-A-0112.nii.gz"}, {"image": "data/volume-covid19-A-0145.nii.gz"}, {"image": "data/volume-covid19-A-0271.nii.gz"}, {"image": "data/volume-covid19-A-0332.nii.gz"}, {"image": "data/volume-covid19-A-0281.nii.gz"}, {"image": "data/volume-covid19-A-0171_0.nii.gz"}, {"image": "data/volume-covid19-A-0336.nii.gz"}, {"image": "data/volume-covid19-A-0531.nii.gz"}, {"image": "data/volume-covid19-A-0579.nii.gz"}, {"image": "data/volume-covid19-A-0713_day017.nii.gz"}, {"image": "data/volume-covid19-A-0695.nii.gz"}, {"image": "data/volume-covid19-A-0708_day018.nii.gz"}, {"image": "data/volume-covid19-A-0639.nii.gz"}, {"image": "data/volume-covid19-A-0607.nii.gz"}, {"image": "data/volume-covid19-A-0082.nii.gz"}, {"image": "data/volume-covid19-A-0174.nii.gz"}, {"image": "data/volume-covid19-A-0257.nii.gz"}, {"image": "data/volume-covid19-A-0260.nii.gz"}, {"image": "data/volume-covid19-A-0240.nii.gz"}, {"image": "data/volume-covid19-A-0126.nii.gz"}, {"image": "data/volume-covid19-A-0661.nii.gz"}, {"image": "data/volume-covid19-A-0036.nii.gz"}, {"image": "data/volume-covid19-A-0410.nii.gz"}, {"image": "data/volume-covid19-A-0398.nii.gz"}, {"image": "data/volume-covid19-A-0665.nii.gz"}, {"image": "data/volume-covid19-A-0493.nii.gz"}, {"image": "data/volume-covid19-A-0706_day015.nii.gz"}, {"image": "data/volume-covid19-A-0619.nii.gz"}, {"image": "data/volume-covid19-A-0708_day000.nii.gz"}, {"image": "data/volume-covid19-A-0440.nii.gz"}, {"image": "data/volume-covid19-A-0326.nii.gz"}, {"image": "data/volume-covid19-A-0556.nii.gz"}, {"image": "data/volume-covid19-A-0644.nii.gz"}, {"image": "data/volume-covid19-A-0080.nii.gz"}, {"image": "data/volume-covid19-A-0375.nii.gz"}, {"image": "data/volume-covid19-A-0503.nii.gz"}, {"image": "data/volume-covid19-A-0662.nii.gz"}, {"image": "data/volume-covid19-A-0299.nii.gz"}, {"image": "data/volume-covid19-A-0234.nii.gz"}, {"image": "data/volume-covid19-A-0546.nii.gz"}, {"image": "data/volume-covid19-A-0344.nii.gz"}, {"image": "data/volume-covid19-A-0149.nii.gz"}, {"image": "data/volume-covid19-A-0738_day052.nii.gz"}, {"image": "data/volume-covid19-A-0645.nii.gz"}, {"image": "data/volume-covid19-A-0272.nii.gz"}, {"image": "data/volume-covid19-A-0177.nii.gz"}, {"image": "data/volume-covid19-A-0738_day010.nii.gz"}, {"image": "data/volume-covid19-A-0631.nii.gz"}, {"image": "data/volume-covid19-A-0570.nii.gz"}, {"image": "data/volume-covid19-A-0737_day006.nii.gz"}, {"image": "data/volume-covid19-A-0709_day005.nii.gz"}, {"image": "data/volume-covid19-A-0287_1.nii.gz"}, {"image": "data/volume-covid19-A-0262.nii.gz"}, {"image": "data/volume-covid19-A-0223.nii.gz"}, {"image": "data/volume-covid19-A-0444.nii.gz"}, {"image": "data/volume-covid19-A-0723_day004.nii.gz"}, {"image": "data/volume-covid19-A-0702_day050.nii.gz"}, {"image": "data/volume-covid19-A-0703_day016.nii.gz"}, {"image": "data/volume-covid19-A-0715_day012.nii.gz"}, {"image": "data/volume-covid19-A-0115.nii.gz"}, {"image": "data/volume-covid19-A-0643.nii.gz"}, {"image": "data/volume-covid19-A-0448.nii.gz"}, {"image": "data/volume-covid19-A-0575.nii.gz"}, {"image": "data/volume-covid19-A-0110.nii.gz"}, {"image": "data/volume-covid19-A-0290.nii.gz"}, {"image": "data/volume-covid19-A-0676.nii.gz"}, {"image": "data/volume-covid19-A-0287_0.nii.gz"}, {"image": "data/volume-covid19-A-0381_1.nii.gz"}, {"image": "data/volume-covid19-A-0594.nii.gz"}, {"image": "data/volume-covid19-A-0702_day021.nii.gz"}, {"image": "data/volume-covid19-A-0700_day015.nii.gz"}, {"image": "data/volume-covid19-A-0658.nii.gz"}, {"image": "data/volume-covid19-A-0735_day021.nii.gz"}, {"image": "data/volume-covid19-A-0702_day014.nii.gz"}, {"image": "data/volume-covid19-A-0038.nii.gz"}, {"image": "data/volume-covid19-A-0169.nii.gz"}, {"image": "data/volume-covid19-A-0651.nii.gz"}, {"image": "data/volume-covid19-A-0561.nii.gz"}, {"image": "data/volume-covid19-A-0284.nii.gz"}, {"image": "data/volume-covid19-A-0701_day000.nii.gz"}, {"image": "data/volume-covid19-A-0637.nii.gz"}, {"image": "data/volume-covid19-A-0074_1.nii.gz"}, {"image": "data/volume-covid19-A-0237.nii.gz"}, {"image": "data/volume-covid19-A-0128.nii.gz"}, {"image": "data/volume-covid19-A-0707_day000.nii.gz"}, {"image": "data/volume-covid19-A-0212_0.nii.gz"}, {"image": "data/volume-covid19-A-0437.nii.gz"}, {"image": "data/volume-covid19-A-0707_day060.nii.gz"}, {"image": "data/volume-covid19-A-0374.nii.gz"}, {"image": "data/volume-covid19-A-0170.nii.gz"}, {"image": "data/volume-covid19-A-0327.nii.gz"}, {"image": "data/volume-covid19-A-0185.nii.gz"}, {"image": "data/volume-covid19-A-0517.nii.gz"}, {"image": "data/volume-covid19-A-0324.nii.gz"}, {"image": "data/volume-covid19-A-0117.nii.gz"}, {"image": "data/volume-covid19-A-0736_day014.nii.gz"}, {"image": "data/volume-covid19-A-0310.nii.gz"}, {"image": "data/volume-covid19-A-0078.nii.gz"}, {"image": "data/volume-covid19-A-0081.nii.gz"}, {"image": "data/volume-covid19-A-0547.nii.gz"}, {"image": "data/volume-covid19-A-0738_day000.nii.gz"}, {"image": "data/volume-covid19-A-0085.nii.gz"}, {"image": "data/volume-covid19-A-0267.nii.gz"}, {"image": "data/volume-covid19-A-0582.nii.gz"}, {"image": "data/volume-covid19-A-0474.nii.gz"}, {"image": "data/volume-covid19-A-0718_day002.nii.gz"}, {"image": "data/volume-covid19-A-0227.nii.gz"}, {"image": "data/volume-covid19-A-0432.nii.gz"}, {"image": "data/volume-covid19-A-0150.nii.gz"}, {"image": "data/volume-covid19-A-0345.nii.gz"}, {"image": "data/volume-covid19-A-0629.nii.gz"}, {"image": "data/volume-covid19-A-0385.nii.gz"}, {"image": "data/volume-covid19-A-0165.nii.gz"}, {"image": "data/volume-covid19-A-0700_day056.nii.gz"}, {"image": "data/volume-covid19-A-0530.nii.gz"}, {"image": "data/volume-covid19-A-0507.nii.gz"}, {"image": "data/volume-covid19-A-0709_day000.nii.gz"}, {"image": "data/volume-covid19-A-0674.nii.gz"}, {"image": "data/volume-covid19-A-0293.nii.gz"}, {"image": "data/volume-covid19-A-0485.nii.gz"}, {"image": "data/volume-covid19-A-0722_day008.nii.gz"}, {"image": "data/volume-covid19-A-0586.nii.gz"}, {"image": "data/volume-covid19-A-0244.nii.gz"}, {"image": "data/volume-covid19-A-0713_day050.nii.gz"}, {"image": "data/volume-covid19-A-0608.nii.gz"}, {"image": "data/volume-covid19-A-0615.nii.gz"}, {"image": "data/volume-covid19-A-0100.nii.gz"}, {"image": "data/volume-covid19-A-0054.nii.gz"}, {"image": "data/volume-covid19-A-0498.nii.gz"}, {"image": "data/volume-covid19-A-0070.nii.gz"}, {"image": "data/volume-covid19-A-0479.nii.gz"}, {"image": "data/volume-covid19-A-0031.nii.gz"}, {"image": "data/volume-covid19-A-0425.nii.gz"}, {"image": "data/volume-covid19-A-0252.nii.gz"}, {"image": "data/volume-covid19-A-0663.nii.gz"}, {"image": "data/volume-covid19-A-0387.nii.gz"}, {"image": "data/volume-covid19-A-0203.nii.gz"}, {"image": "data/volume-covid19-A-0708_day003.nii.gz"}, {"image": "data/volume-covid19-A-0153.nii.gz"}, {"image": "data/volume-covid19-A-0472.nii.gz"}, {"image": "data/volume-covid19-A-0490.nii.gz"}, {"image": "data/volume-covid19-A-0397.nii.gz"}, {"image": "data/volume-covid19-A-0505.nii.gz"}, {"image": "data/volume-covid19-A-0320.nii.gz"}, {"image": "data/volume-covid19-A-0616.nii.gz"}, {"image": "data/volume-covid19-A-0510.nii.gz"}, {"image": "data/volume-covid19-A-0694.nii.gz"}, {"image": "data/volume-covid19-A-0550.nii.gz"}, {"image": "data/volume-covid19-A-0291_0.nii.gz"}, {"image": "data/volume-covid19-A-0116.nii.gz"}, {"image": "data/volume-covid19-A-0279.nii.gz"}, {"image": "data/volume-covid19-A-0734_day005.nii.gz"}, {"image": "data/volume-covid19-A-0590.nii.gz"}, {"image": "data/volume-covid19-A-0690.nii.gz"}, {"image": "data/volume-covid19-A-0578.nii.gz"}, {"image": "data/volume-covid19-A-0277.nii.gz"}, {"image": "data/volume-covid19-A-0195.nii.gz"}, {"image": "data/volume-covid19-A-0617.nii.gz"}, {"image": "data/volume-covid19-A-0325.nii.gz"}, {"image": "data/volume-covid19-A-0610.nii.gz"}, {"image": "data/volume-covid19-A-0011.nii.gz"}, {"image": "data/volume-covid19-A-0714_day000.nii.gz"}, {"image": "data/volume-covid19-A-0351.nii.gz"}, {"image": "data/volume-covid19-A-0353.nii.gz"}, {"image": "data/volume-covid19-A-0636.nii.gz"}, {"image": "data/volume-covid19-A-0670.nii.gz"}, {"image": "data/volume-covid19-A-0713_day012.nii.gz"}, {"image": "data/volume-covid19-A-0156.nii.gz"}, {"image": "data/volume-covid19-A-0682.nii.gz"}, {"image": "data/volume-covid19-A-0157.nii.gz"}, {"image": "data/volume-covid19-A-0548.nii.gz"}, {"image": "data/volume-covid19-A-0303_0.nii.gz"}, {"image": "data/volume-covid19-A-0212_1.nii.gz"}, {"image": "data/volume-covid19-A-0275.nii.gz"}, {"image": "data/volume-covid19-A-0717_day005.nii.gz"}, {"image": "data/volume-covid19-A-0707_day002.nii.gz"}, {"image": "data/volume-covid19-A-0430.nii.gz"}, {"image": "data/volume-covid19-A-0004.nii.gz"}, {"image": "data/volume-covid19-A-0099.nii.gz"}, {"image": "data/volume-covid19-A-0027_1.nii.gz"}, {"image": "data/volume-covid19-A-0529.nii.gz"}, {"image": "data/volume-covid19-A-0190.nii.gz"}, {"image": "data/volume-covid19-A-0075.nii.gz"}, {"image": "data/volume-covid19-A-0606.nii.gz"}, {"image": "data/volume-covid19-A-0064.nii.gz"}, {"image": "data/volume-covid19-A-0718_day000.nii.gz"}, {"image": "data/volume-covid19-A-0735_day010.nii.gz"}, {"image": "data/volume-covid19-A-0042.nii.gz"}, {"image": "data/volume-covid19-A-0522.nii.gz"}, {"image": "data/volume-covid19-A-0249.nii.gz"}, {"image": "data/volume-covid19-A-0718_day054.nii.gz"}, {"image": "data/volume-covid19-A-0342.nii.gz"}, {"image": "data/volume-covid19-A-0094.nii.gz"}, {"image": "data/volume-covid19-A-0016.nii.gz"}, {"image": "data/volume-covid19-A-0047_1.nii.gz"}, {"image": "data/volume-covid19-A-0439.nii.gz"}, {"image": "data/volume-covid19-A-0635.nii.gz"}, {"image": "data/volume-covid19-A-0370.nii.gz"}, {"image": "data/volume-covid19-A-0006.nii.gz"}, {"image": "data/volume-covid19-A-0219.nii.gz"}, {"image": "data/volume-covid19-A-0091.nii.gz"}, {"image": "data/volume-covid19-A-0355.nii.gz"}, {"image": "data/volume-covid19-A-0109.nii.gz"}, {"image": "data/volume-covid19-A-0069.nii.gz"}, {"image": "data/volume-covid19-A-0377.nii.gz"}, {"image": "data/volume-covid19-A-0405.nii.gz"}, {"image": "data/volume-covid19-A-0343.nii.gz"}, {"image": "data/volume-covid19-A-0071.nii.gz"}, {"image": "data/volume-covid19-A-0604.nii.gz"}, {"image": "data/volume-covid19-A-0371.nii.gz"}, {"image": "data/volume-covid19-A-0722_day003.nii.gz"}, {"image": "data/volume-covid19-A-0362.nii.gz"}, {"image": "data/volume-covid19-A-0596.nii.gz"}, {"image": "data/volume-covid19-A-0576.nii.gz"}, {"image": "data/volume-covid19-A-0357.nii.gz"}, {"image": "data/volume-covid19-A-0681.nii.gz"}, {"image": "data/volume-covid19-A-0480.nii.gz"}, {"image": "data/volume-covid19-A-0184_1.nii.gz"}, {"image": "data/volume-covid19-A-0139.nii.gz"}, {"image": "data/volume-covid19-A-0559.nii.gz"}, {"image": "data/volume-covid19-A-0423.nii.gz"}, {"image": "data/volume-covid19-A-0013.nii.gz"}, {"image": "data/volume-covid19-A-0181.nii.gz"}, {"image": "data/volume-covid19-A-0659.nii.gz"}, {"image": "data/volume-covid19-A-0499.nii.gz"}, {"image": "data/volume-covid19-A-0464.nii.gz"}, {"image": "data/volume-covid19-A-0331.nii.gz"}, {"image": "data/volume-covid19-A-0735_day004.nii.gz"}, {"image": "data/volume-covid19-A-0118.nii.gz"}, {"image": "data/volume-covid19-A-0180_1.nii.gz"}, {"image": "data/volume-covid19-A-0166.nii.gz"}, {"image": "data/volume-covid19-A-0066.nii.gz"}, {"image": "data/volume-covid19-A-0536.nii.gz"}, {"image": "data/volume-covid19-A-0096.nii.gz"}, {"image": "data/volume-covid19-A-0512.nii.gz"}, {"image": "data/volume-covid19-A-0713_day008.nii.gz"}, {"image": "data/volume-covid19-A-0571.nii.gz"}, {"image": "data/volume-covid19-A-0346.nii.gz"}, {"image": "data/volume-covid19-A-0129.nii.gz"}, {"image": "data/volume-covid19-A-0222.nii.gz"}, {"image": "data/volume-covid19-A-0184_0.nii.gz"}, {"image": "data/volume-covid19-A-0737_day044.nii.gz"}, {"image": "data/volume-covid19-A-0289.nii.gz"}, {"image": "data/volume-covid19-A-0609.nii.gz"}, {"image": "data/volume-covid19-A-0356.nii.gz"}, {"image": "data/volume-covid19-A-0669.nii.gz"}, {"image": "data/volume-covid19-A-0186.nii.gz"}, {"image": "data/volume-covid19-A-0189_1.nii.gz"}, {"image": "data/volume-covid19-A-0553.nii.gz"}, {"image": "data/volume-covid19-A-0625.nii.gz"}, {"image": "data/volume-covid19-A-0641.nii.gz"}, {"image": "data/volume-covid19-A-0010.nii.gz"}, {"image": "data/volume-covid19-A-0041.nii.gz"}, {"image": "data/volume-covid19-A-0657.nii.gz"}, {"image": "data/volume-covid19-A-0703_day021.nii.gz"}, {"image": "data/volume-covid19-A-0314.nii.gz"}, {"image": "data/volume-covid19-A-0504.nii.gz"}, {"image": "data/volume-covid19-A-0413.nii.gz"}, {"image": "data/volume-covid19-A-0019.nii.gz"}, {"image": "data/volume-covid19-A-0202_0.nii.gz"}, {"image": "data/volume-covid19-A-0422.nii.gz"}, {"image": "data/volume-covid19-A-0233.nii.gz"}, {"image": "data/volume-covid19-A-0585.nii.gz"}, {"image": "data/volume-covid19-A-0328.nii.gz"}, {"image": "data/volume-covid19-A-0487.nii.gz"}, {"image": "data/volume-covid19-A-0296.nii.gz"}, {"image": "data/volume-covid19-A-0414.nii.gz"}, {"image": "data/volume-covid19-A-0630.nii.gz"}, {"image": "data/volume-covid19-A-0323.nii.gz"}, {"image": "data/volume-covid19-A-0679.nii.gz"}, {"image": "data/volume-covid19-A-0164.nii.gz"}, {"image": "data/volume-covid19-A-0702_day011.nii.gz"}, {"image": "data/volume-covid19-A-0392.nii.gz"}, {"image": "data/volume-covid19-A-0710_day000.nii.gz"}, {"image": "data/volume-covid19-A-0330.nii.gz"}, {"image": "data/volume-covid19-A-0046.nii.gz"}, {"image": "data/volume-covid19-A-0270.nii.gz"}, {"image": "data/volume-covid19-A-0678.nii.gz"}, {"image": "data/volume-covid19-A-0633.nii.gz"}, {"image": "data/volume-covid19-A-0734_day012.nii.gz"}, {"image": "data/volume-covid19-A-0144.nii.gz"}, {"image": "data/volume-covid19-A-0715_day052.nii.gz"}, {"image": "data/volume-covid19-A-0402.nii.gz"}, {"image": "data/volume-covid19-A-0519.nii.gz"}, {"image": "data/volume-covid19-A-0382.nii.gz"}, {"image": "data/volume-covid19-A-0183.nii.gz"}, {"image": "data/volume-covid19-A-0540.nii.gz"}, {"image": "data/volume-covid19-A-0668.nii.gz"}, {"image": "data/volume-covid19-A-0475.nii.gz"}, {"image": "data/volume-covid19-A-0722_day000.nii.gz"}, {"image": "data/volume-covid19-A-0001.nii.gz"}, {"image": "data/volume-covid19-A-0484.nii.gz"}, {"image": "data/volume-covid19-A-0201.nii.gz"}, {"image": "data/volume-covid19-A-0722_day011.nii.gz"}, {"image": "data/volume-covid19-A-0710_day013.nii.gz"}, {"image": "data/volume-covid19-A-0660.nii.gz"}, {"image": "data/volume-covid19-A-0516.nii.gz"}, {"image": "data/volume-covid19-A-0721_day000.nii.gz"}, {"image": "data/volume-covid19-A-0686.nii.gz"}, {"image": "data/volume-covid19-A-0288.nii.gz"}, {"image": "data/volume-covid19-A-0256_0.nii.gz"}, {"image": "data/volume-covid19-A-0614.nii.gz"}, {"image": "data/volume-covid19-A-0039.nii.gz"}, {"image": "data/volume-covid19-A-0708_day011.nii.gz"}, {"image": "data/volume-covid19-A-0258.nii.gz"}, {"image": "data/volume-covid19-A-0242.nii.gz"}, {"image": "data/volume-covid19-A-0098.nii.gz"}, {"image": "data/volume-covid19-A-0412.nii.gz"}, {"image": "data/volume-covid19-A-0549.nii.gz"}, {"image": "data/volume-covid19-A-0235.nii.gz"}, {"image": "data/volume-covid19-A-0407.nii.gz"}, {"image": "data/volume-covid19-A-0461.nii.gz"}, {"image": "data/volume-covid19-A-0618.nii.gz"}, {"image": "data/volume-covid19-A-0719_day000.nii.gz"}, {"image": "data/volume-covid19-A-0403.nii.gz"}, {"image": "data/volume-covid19-A-0442.nii.gz"}, {"image": "data/volume-covid19-A-0478.nii.gz"}, {"image": "data/volume-covid19-A-0706_day011.nii.gz"}, {"image": "data/volume-covid19-A-0450.nii.gz"}, {"image": "data/volume-covid19-A-0142.nii.gz"}, {"image": "data/volume-covid19-A-0136.nii.gz"}, {"image": "data/volume-covid19-A-0506.nii.gz"}, {"image": "data/volume-covid19-A-0360.nii.gz"}, {"image": "data/volume-covid19-A-0488.nii.gz"}, {"image": "data/volume-covid19-A-0713_day000.nii.gz"}, {"image": "data/volume-covid19-A-0340.nii.gz"}, {"image": "data/volume-covid19-A-0457.nii.gz"}, {"image": "data/volume-covid19-A-0557.nii.gz"}, {"image": "data/volume-covid19-A-0622.nii.gz"}, {"image": "data/volume-covid19-A-0134.nii.gz"}, {"image": "data/volume-covid19-A-0020.nii.gz"}, {"image": "data/volume-covid19-A-0102.nii.gz"}, {"image": "data/volume-covid19-A-0255.nii.gz"}, {"image": "data/volume-covid19-A-0268.nii.gz"}, {"image": "data/volume-covid19-A-0406.nii.gz"}, {"image": "data/volume-covid19-A-0438.nii.gz"}, {"image": "data/volume-covid19-A-0632.nii.gz"}, {"image": "data/volume-covid19-A-0254.nii.gz"}, {"image": "data/volume-covid19-A-0624.nii.gz"}, {"image": "data/volume-covid19-A-0524.nii.gz"}, {"image": "data/volume-covid19-A-0305.nii.gz"}, {"image": "data/volume-covid19-A-0589.nii.gz"}, {"image": "data/volume-covid19-A-0247.nii.gz"}, {"image": "data/volume-covid19-A-0369_1.nii.gz"}, {"image": "data/volume-covid19-A-0445.nii.gz"}, {"image": "data/volume-covid19-A-0000.nii.gz"}, {"image": "data/volume-covid19-A-0130.nii.gz"}, {"image": "data/volume-covid19-A-0037.nii.gz"}, {"image": "data/volume-covid19-A-0204.nii.gz"}, {"image": "data/volume-covid19-A-0319.nii.gz"}, {"image": "data/volume-covid19-A-0030.nii.gz"}, {"image": "data/volume-covid19-A-0073.nii.gz"}, {"image": "data/volume-covid19-A-0034.nii.gz"}, {"image": "data/volume-covid19-A-0588.nii.gz"}, {"image": "data/volume-covid19-A-0316.nii.gz"}, {"image": "data/volume-covid19-A-0404.nii.gz"}, {"image": "data/volume-covid19-A-0626.nii.gz"}, {"image": "data/volume-covid19-A-0068.nii.gz"}, {"image": "data/volume-covid19-A-0179.nii.gz"}, {"image": "data/volume-covid19-A-0720_day000.nii.gz"}, {"image": "data/volume-covid19-A-0435.nii.gz"}, {"image": "data/volume-covid19-A-0710_day057.nii.gz"}, {"image": "data/volume-covid19-A-0246.nii.gz"}, {"image": "data/volume-covid19-A-0228.nii.gz"}, {"image": "data/volume-covid19-A-0627.nii.gz"}, {"image": "data/volume-covid19-A-0303_1.nii.gz"}, {"image": "data/volume-covid19-A-0057.nii.gz"}, {"image": "data/volume-covid19-A-0133.nii.gz"}, {"image": "data/volume-covid19-A-0698.nii.gz"}, {"image": "data/volume-covid19-A-0567.nii.gz"}, {"image": "data/volume-covid19-A-0533.nii.gz"}, {"image": "data/volume-covid19-A-0620.nii.gz"}, {"image": "data/volume-covid19-A-0537.nii.gz"}, {"image": "data/volume-covid19-A-0147.nii.gz"}, {"image": "data/volume-covid19-A-0114.nii.gz"}, {"image": "data/volume-covid19-A-0347.nii.gz"}, {"image": "data/volume-covid19-A-0256_1.nii.gz"}, {"image": "data/volume-covid19-A-0605.nii.gz"}, {"image": "data/volume-covid19-A-0735_day052.nii.gz"}, {"image": "data/volume-covid19-A-0501.nii.gz"}, {"image": "data/volume-covid19-A-0592.nii.gz"}, {"image": "data/volume-covid19-A-0089.nii.gz"}, {"image": "data/volume-covid19-A-0735_day000.nii.gz"}, {"image": "data/volume-covid19-A-0408.nii.gz"}, {"image": "data/volume-covid19-A-0318_0.nii.gz"}, {"image": "data/volume-covid19-A-0135.nii.gz"}, {"image": "data/volume-covid19-A-0477.nii.gz"}, {"image": "data/volume-covid19-A-0653.nii.gz"}, {"image": "data/volume-covid19-A-0424.nii.gz"}, {"image": "data/volume-covid19-A-0552.nii.gz"}, {"image": "data/volume-covid19-A-0090.nii.gz"}, {"image": "data/volume-covid19-A-0028_0.nii.gz"}, {"image": "data/volume-covid19-A-0137.nii.gz"}, {"image": "data/volume-covid19-A-0199.nii.gz"}, {"image": "data/volume-covid19-A-0538.nii.gz"}, {"image": "data/volume-covid19-A-0664.nii.gz"}, {"image": "data/volume-covid19-A-0295.nii.gz"}, {"image": "data/volume-covid19-A-0359.nii.gz"}, {"image": "data/volume-covid19-A-0532.nii.gz"}, {"image": "data/volume-covid19-A-0736_day050.nii.gz"}, {"image": "data/volume-covid19-A-0708_day008.nii.gz"}, {"image": "data/volume-covid19-A-0714_day051.nii.gz"}, {"image": "data/volume-covid19-A-0706_day049.nii.gz"}, {"image": "data/volume-covid19-A-0705_day000.nii.gz"}, {"image": "data/volume-covid19-A-0285.nii.gz"}, {"image": "data/volume-covid19-A-0566.nii.gz"}, {"image": "data/volume-covid19-A-0667.nii.gz"}, {"image": "data/volume-covid19-A-0301.nii.gz"}, {"image": "data/volume-covid19-A-0378.nii.gz"}, {"image": "data/volume-covid19-A-0101.nii.gz"}, {"image": "data/volume-covid19-A-0734_day000.nii.gz"}, {"image": "data/volume-covid19-A-0446.nii.gz"}, {"image": "data/volume-covid19-A-0196_1.nii.gz"}, {"image": "data/volume-covid19-A-0543.nii.gz"}, {"image": "data/volume-covid19-A-0706_day006.nii.gz"}, {"image": "data/volume-covid19-A-0451.nii.gz"}, {"image": "data/volume-covid19-A-0595.nii.gz"}, {"image": "data/volume-covid19-A-0427.nii.gz"}, {"image": "data/volume-covid19-A-0384.nii.gz"}, {"image": "data/volume-covid19-A-0302.nii.gz"}, {"image": "data/volume-covid19-A-0146.nii.gz"}, {"image": "data/volume-covid19-A-0341.nii.gz"}, {"image": "data/volume-covid19-A-0564.nii.gz"}, {"image": "data/volume-covid19-A-0591.nii.gz"}, {"image": "data/volume-covid19-A-0194.nii.gz"}, {"image": "data/volume-covid19-A-0712_day022.nii.gz"}, {"image": "data/volume-covid19-A-0076.nii.gz"}, {"image": "data/volume-covid19-A-0555.nii.gz"}, {"image": "data/volume-covid19-A-0273.nii.gz"}, {"image": "data/volume-covid19-A-0447.nii.gz"}, {"image": "data/volume-covid19-A-0339.nii.gz"}, {"image": "data/volume-covid19-A-0309.nii.gz"}, {"image": "data/volume-covid19-A-0023.nii.gz"}, {"image": "data/volume-covid19-A-0672.nii.gz"}, {"image": "data/volume-covid19-A-0043.nii.gz"}, {"image": "data/volume-covid19-A-0469.nii.gz"}, {"image": "data/volume-covid19-A-0311.nii.gz"}, {"image": "data/volume-covid19-A-0654.nii.gz"}, {"image": "data/volume-covid19-A-0286.nii.gz"}, {"image": "data/volume-covid19-A-0688.nii.gz"}, {"image": "data/volume-covid19-A-0621.nii.gz"}, {"image": "data/volume-covid19-A-0719_day060.nii.gz"}, {"image": "data/volume-covid19-A-0226.nii.gz"}, {"image": "data/volume-covid19-A-0095.nii.gz"}, {"image": "data/volume-covid19-A-0306.nii.gz"}, {"image": "data/volume-covid19-A-0108.nii.gz"}, {"image": "data/volume-covid19-A-0051.nii.gz"}, {"image": "data/volume-covid19-A-0077.nii.gz"}, {"image": "data/volume-covid19-A-0225.nii.gz"}, {"image": "data/volume-covid19-A-0705_day004.nii.gz"}, {"image": "data/volume-covid19-A-0689.nii.gz"}, {"image": "data/volume-covid19-A-0124.nii.gz"}, {"image": "data/volume-covid19-A-0072.nii.gz"}, {"image": "data/volume-covid19-A-0040.nii.gz"}, {"image": "data/volume-covid19-A-0702_day062.nii.gz"}, {"image": "data/volume-covid19-A-0491.nii.gz"}, {"image": "data/volume-covid19-A-0539.nii.gz"}, {"image": "data/volume-covid19-A-0350.nii.gz"}, {"image": "data/volume-covid19-A-0702_day025.nii.gz"}, {"image": "data/volume-covid19-A-0337.nii.gz"}, {"image": "data/volume-covid19-A-0158.nii.gz"}, {"image": "data/volume-covid19-A-0470.nii.gz"}, {"image": "data/volume-covid19-A-0396.nii.gz"}, {"image": "data/volume-covid19-A-0121.nii.gz"}, {"image": "data/volume-covid19-A-0574.nii.gz"}, {"image": "data/volume-covid19-A-0526.nii.gz"}, {"image": "data/volume-covid19-A-0436.nii.gz"}, {"image": "data/volume-covid19-A-0494.nii.gz"}, {"image": "data/volume-covid19-A-0014.nii.gz"}, {"image": "data/volume-covid19-A-0465.nii.gz"}, {"image": "data/volume-covid19-A-0702_day000.nii.gz"}, {"image": "data/volume-covid19-A-0173.nii.gz"}, {"image": "data/volume-covid19-A-0706_day000.nii.gz"}, {"image": "data/volume-covid19-A-0684.nii.gz"}, {"image": "data/volume-covid19-A-0193.nii.gz"}, {"image": "data/volume-covid19-A-0541.nii.gz"}, {"image": "data/volume-covid19-A-0551.nii.gz"}, {"image": "data/volume-covid19-A-0044.nii.gz"}, {"image": "data/volume-covid19-A-0206.nii.gz"}, {"image": "data/volume-covid19-A-0365.nii.gz"}, {"image": "data/volume-covid19-A-0214.nii.gz"}, {"image": "data/volume-covid19-A-0188.nii.gz"}, {"image": "data/volume-covid19-A-0736_day007.nii.gz"}, {"image": "data/volume-covid19-A-0143.nii.gz"}, {"image": "data/volume-covid19-A-0601.nii.gz"}, {"image": "data/volume-covid19-A-0063.nii.gz"}, {"image": "data/volume-covid19-A-0313.nii.gz"}, {"image": "data/volume-covid19-A-0415.nii.gz"}, {"image": "data/volume-covid19-A-0703_day003.nii.gz"}, {"image": "data/volume-covid19-A-0528.nii.gz"}, {"image": "data/volume-covid19-A-0476.nii.gz"}, {"image": "data/volume-covid19-A-0563.nii.gz"}, {"image": "data/volume-covid19-A-0568.nii.gz"}, {"image": "data/volume-covid19-A-0354.nii.gz"}, {"image": "data/volume-covid19-A-0704_day000.nii.gz"}, {"image": "data/volume-covid19-A-0104.nii.gz"}, {"image": "data/volume-covid19-A-0613.nii.gz"}, {"image": "data/volume-covid19-A-0652.nii.gz"}, {"image": "data/volume-covid19-A-0738_day002.nii.gz"}, {"image": "data/volume-covid19-A-0245.nii.gz"}, {"image": "data/volume-covid19-A-0700_day007.nii.gz"}, {"image": "data/volume-covid19-A-0291_1.nii.gz"}, {"image": "data/volume-covid19-A-0383_1.nii.gz"}, {"image": "data/volume-covid19-A-0276.nii.gz"}, {"image": "data/volume-covid19-A-0187.nii.gz"}, {"image": "data/volume-covid19-A-0338.nii.gz"}, {"image": "data/volume-covid19-A-0650.nii.gz"}, {"image": "data/volume-covid19-A-0452.nii.gz"}, {"image": "data/volume-covid19-A-0419.nii.gz"}, {"image": "data/volume-covid19-A-0453.nii.gz"}, {"image": "data/volume-covid19-A-0719_day002.nii.gz"}, {"image": "data/volume-covid19-A-0701_day006.nii.gz"}, {"image": "data/volume-covid19-A-0718_day017.nii.gz"}, {"image": "data/volume-covid19-A-0348.nii.gz"}, {"image": "data/volume-covid19-A-0086.nii.gz"}, {"image": "data/volume-covid19-A-0055.nii.gz"}, {"image": "data/volume-covid19-A-0274.nii.gz"}, {"image": "data/volume-covid19-A-0482.nii.gz"}, {"image": "data/volume-covid19-A-0587.nii.gz"}, {"image": "data/volume-covid19-A-0599.nii.gz"}, {"image": "data/volume-covid19-A-0431.nii.gz"}, {"image": "data/volume-covid19-A-0456.nii.gz"}, {"image": "data/volume-covid19-A-0294.nii.gz"}, {"image": "data/volume-covid19-A-0218.nii.gz"}, {"image": "data/volume-covid19-A-0712_day068.nii.gz"}, {"image": "data/volume-covid19-A-0381_0.nii.gz"}, {"image": "data/volume-covid19-A-0696.nii.gz"}, {"image": "data/volume-covid19-A-0623.nii.gz"}, {"image": "data/volume-covid19-A-0514.nii.gz"}, {"image": "data/volume-covid19-A-0441.nii.gz"}, {"image": "data/volume-covid19-A-0266.nii.gz"}, {"image": "data/volume-covid19-A-0699.nii.gz"}, {"image": "data/volume-covid19-A-0518.nii.gz"}, {"image": "data/volume-covid19-A-0333.nii.gz"}, {"image": "data/volume-covid19-A-0712_day005.nii.gz"}, {"image": "data/volume-covid19-A-0045.nii.gz"}, {"image": "data/volume-covid19-A-0460.nii.gz"}, {"image": "data/volume-covid19-A-0386.nii.gz"}, {"image": "data/volume-covid19-A-0152.nii.gz"}, {"image": "data/volume-covid19-A-0734_day020.nii.gz"}, {"image": "data/volume-covid19-A-0525.nii.gz"}, {"image": "data/volume-covid19-A-0544.nii.gz"}, {"image": "data/volume-covid19-A-0376.nii.gz"}, {"image": "data/volume-covid19-A-0251.nii.gz"}, {"image": "data/volume-covid19-A-0462.nii.gz"}, {"image": "data/volume-covid19-A-0733_day000.nii.gz"}, {"image": "data/volume-covid19-A-0151.nii.gz"}, {"image": "data/volume-covid19-A-0715_day007.nii.gz"}, {"image": "data/volume-covid19-A-0486.nii.gz"}], "validation": [{"image": "data/volume-covid19-A-0656.nii.gz"}, {"image": "data/volume-covid19-A-0495.nii.gz"}, {"image": "data/volume-covid19-A-0705_day043.nii.gz"}, {"image": "data/volume-covid19-A-0264.nii.gz"}, {"image": "data/volume-covid19-A-0300.nii.gz"}, {"image": "data/volume-covid19-A-0722_day047.nii.gz"}, {"image": "data/volume-covid19-A-0434.nii.gz"}, {"image": "data/volume-covid19-A-0015.nii.gz"}, {"image": "data/volume-covid19-A-0700_day010.nii.gz"}, {"image": "data/volume-covid19-A-0411.nii.gz"}, {"image": "data/volume-covid19-A-0572.nii.gz"}, {"image": "data/volume-covid19-A-0003.nii.gz"}, {"image": "data/volume-covid19-A-0693.nii.gz"}, {"image": "data/volume-covid19-A-0715_day000.nii.gz"}, {"image": "data/volume-covid19-A-0307.nii.gz"}, {"image": "data/volume-covid19-A-0489.nii.gz"}, {"image": "data/volume-covid19-A-0521.nii.gz"}, {"image": "data/volume-covid19-A-0215.nii.gz"}, {"image": "data/volume-covid19-A-0400.nii.gz"}, {"image": "data/volume-covid19-A-0280.nii.gz"}, {"image": "data/volume-covid19-A-0236.nii.gz"}, {"image": "data/volume-covid19-A-0167_1.nii.gz"}, {"image": "data/volume-covid19-A-0733_day007.nii.gz"}, {"image": "data/volume-covid19-A-0701_day055.nii.gz"}, {"image": "data/volume-covid19-A-0513.nii.gz"}, {"image": "data/volume-covid19-A-0712_day000.nii.gz"}, {"image": "data/volume-covid19-A-0182.nii.gz"}, {"image": "data/volume-covid19-A-0511.nii.gz"}, {"image": "data/volume-covid19-A-0321.nii.gz"}, {"image": "data/volume-covid19-A-0120.nii.gz"}, {"image": "data/volume-covid19-A-0367.nii.gz"}, {"image": "data/volume-covid19-A-0534.nii.gz"}, {"image": "data/volume-covid19-A-0700_day000.nii.gz"}, {"image": "data/volume-covid19-A-0168.nii.gz"}, {"image": "data/volume-covid19-A-0649.nii.gz"}, {"image": "data/volume-covid19-A-0409.nii.gz"}, {"image": "data/volume-covid19-A-0018.nii.gz"}, {"image": "data/volume-covid19-A-0191.nii.gz"}, {"image": "data/volume-covid19-A-0297.nii.gz"}, {"image": "data/volume-covid19-A-0429.nii.gz"}, {"image": "data/volume-covid19-A-0238.nii.gz"}, {"image": "data/volume-covid19-A-0717_day000.nii.gz"}, {"image": "data/volume-covid19-A-0380.nii.gz"}, {"image": "data/volume-covid19-A-0646.nii.gz"}, {"image": "data/volume-covid19-A-0026.nii.gz"}, {"image": "data/volume-covid19-A-0176.nii.gz"}, {"image": "data/volume-covid19-A-0611.nii.gz"}, {"image": "data/volume-covid19-A-0224.nii.gz"}, {"image": "data/volume-covid19-A-0628.nii.gz"}]}
================================================
FILE: jsons/flare23.json
================================================
{"training": [{"image": "imagesTr2200/1-500/FLARE23_0001_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0002_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0003_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0004_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0005_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0006_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0007_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0008_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0009_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0010_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0011_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0012_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0013_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0014_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0015_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0016_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0017_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0018_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0019_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0020_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0021_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0022_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0023_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0024_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0025_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0026_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0027_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0028_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0029_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0030_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0031_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0032_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0033_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0034_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0035_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0036_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0037_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0038_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0039_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0040_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0041_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0042_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0043_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0044_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0045_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0046_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0047_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0048_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0049_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0050_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0051_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0052_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0053_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0054_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0055_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0056_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0057_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0058_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0059_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0060_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0061_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0062_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0063_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0064_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0065_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0066_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0067_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0068_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0069_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0070_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0071_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0072_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0073_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0074_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0075_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0076_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0077_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0078_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0079_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0080_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0081_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0082_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0083_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0084_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0085_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0086_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0087_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0088_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0089_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0090_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0091_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0092_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0093_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0094_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0095_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0096_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0097_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0098_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0099_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0100_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0101_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0102_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0103_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0104_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0105_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0106_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0107_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0108_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0109_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0110_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0111_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0112_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0113_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0114_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0115_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0116_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0117_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0118_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0119_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0120_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0121_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0122_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0123_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0124_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0125_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0126_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0127_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0128_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0129_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0130_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0131_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0132_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0133_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0134_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0135_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0136_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0137_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0138_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0139_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0140_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0141_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0142_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0143_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0144_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0145_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0146_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0147_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0148_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0149_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0150_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0151_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0152_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0153_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0154_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0155_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0156_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0157_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0158_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0159_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0160_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0161_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0162_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0163_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0164_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0165_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0166_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0167_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0168_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0169_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0170_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0171_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0172_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0173_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0174_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0175_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0176_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0177_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0178_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0179_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0180_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0181_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0182_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0183_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0184_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0185_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0186_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0187_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0188_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0189_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0190_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0191_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0192_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0193_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0194_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0195_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0196_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0197_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0198_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0199_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0200_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0201_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0202_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0203_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0204_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0205_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0206_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0207_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0208_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0209_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0210_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0211_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0212_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0213_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0214_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0215_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0216_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0217_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0218_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0219_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0220_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0221_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0222_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0223_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0224_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0225_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0226_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0227_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0228_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0229_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0230_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0231_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0232_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0233_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0234_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0235_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0236_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0237_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0238_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0239_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0240_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0241_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0242_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0243_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0244_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0245_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0246_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0247_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0248_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0249_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0250_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0251_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0252_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0253_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0254_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0255_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0256_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0257_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0258_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0259_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0260_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0261_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0262_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0263_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0264_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0265_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0266_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0267_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0268_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0269_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0270_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0271_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0272_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0273_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0274_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0275_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0276_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0277_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0278_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0279_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0280_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0281_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0282_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0283_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0284_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0285_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0286_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0287_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0288_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0289_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0290_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0291_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0292_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0293_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0294_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0295_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0296_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0297_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0298_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0299_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0300_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0301_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0302_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0303_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0304_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0305_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0306_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0307_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0308_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0309_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0310_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0311_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0312_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0313_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0314_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0315_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0316_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0317_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0318_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0319_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0320_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0321_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0322_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0323_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0324_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0325_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0326_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0327_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0328_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0329_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0330_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0331_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0332_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0333_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0334_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0335_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0336_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0337_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0338_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0339_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0340_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0341_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0342_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0343_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0344_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0345_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0346_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0347_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0348_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0349_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0350_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0351_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0352_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0353_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0354_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0355_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0356_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0357_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0358_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0359_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0360_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0361_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0362_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0363_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0364_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0365_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0366_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0367_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0368_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0369_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0370_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0371_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0372_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0373_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0374_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0375_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0376_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0377_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0378_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0379_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0380_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0381_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0382_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0383_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0384_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0385_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0386_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0387_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0388_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0389_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0390_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0391_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0392_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0393_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0394_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0395_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0396_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0397_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0398_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0399_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0400_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0401_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0402_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0403_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0404_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0405_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0406_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0407_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0408_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0409_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0410_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0411_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0412_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0413_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0414_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0415_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0416_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0417_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0418_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0419_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0420_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0421_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0422_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0423_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0424_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0425_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0426_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0427_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0428_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0429_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0430_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0431_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0432_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0433_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0434_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0435_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0436_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0437_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0438_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0439_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0440_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0441_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0442_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0443_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0444_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0445_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0446_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0447_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0448_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0449_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0450_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0451_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0452_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0453_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0454_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0455_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0456_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0457_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0458_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0459_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0460_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0461_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0462_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0463_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0464_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0465_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0466_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0467_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0468_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0469_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0470_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0471_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0472_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0473_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0474_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0475_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0476_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0477_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0478_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0479_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0480_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0481_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0482_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0483_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0484_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0485_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0486_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0487_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0488_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0489_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0490_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0491_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0492_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0493_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0494_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0495_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0496_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0497_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0498_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0499_0000.nii.gz"}, {"image": "imagesTr2200/1-500/FLARE23_0500_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1001_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1002_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1003_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1004_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1005_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1006_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1007_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1008_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1009_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1010_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1011_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1012_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1013_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1014_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1015_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1016_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1017_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1018_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1019_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1020_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1021_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1022_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1023_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1024_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1025_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1026_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1027_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1028_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1029_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1030_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1031_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1032_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1033_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1034_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1035_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1036_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1037_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1038_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1039_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1040_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1041_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1042_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1043_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1044_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1045_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1046_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1047_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1048_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1049_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1050_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1051_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1052_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1053_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1054_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1055_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1056_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1057_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1058_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1059_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1060_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1061_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1062_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1063_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1064_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1065_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1066_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1067_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1068_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1069_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1070_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1071_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1072_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1073_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1074_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1075_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1076_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1077_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1078_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1079_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1080_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1081_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1082_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1083_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1084_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1085_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1086_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1087_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1088_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1089_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1090_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1091_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1092_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1093_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1094_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1095_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1096_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1097_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1098_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1099_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1100_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1101_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1102_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1103_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1104_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1105_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1106_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1107_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1108_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1109_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1110_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1111_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1112_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1113_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1114_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1115_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1116_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1117_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1118_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1119_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1120_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1121_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1122_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1123_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1124_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1125_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1126_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1127_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1128_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1129_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1130_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1131_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1132_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1133_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1134_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1135_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1136_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1137_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1138_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1139_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1140_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1141_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1142_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1143_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1144_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1145_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1146_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1147_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1148_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1149_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1150_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1151_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1152_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1153_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1154_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1155_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1156_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1157_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1158_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1159_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1160_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1161_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1162_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1163_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1164_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1165_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1166_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1167_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1168_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1169_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1170_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1171_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1172_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1173_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1174_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1175_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1176_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1177_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1178_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1179_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1180_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1181_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1182_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1183_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1184_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1185_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1186_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1187_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1188_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1189_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1190_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1191_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1192_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1193_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1194_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1195_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1196_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1197_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1198_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1199_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1200_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1201_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1202_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1203_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1204_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1205_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1206_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1207_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1208_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1209_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1210_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1211_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1212_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1213_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1214_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1215_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1216_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1217_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1218_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1219_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1220_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1221_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1222_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1223_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1224_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1225_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1226_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1227_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1228_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1229_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1230_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1231_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1232_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1233_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1234_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1235_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1236_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1237_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1238_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1239_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1240_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1241_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1242_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1243_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1244_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1245_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1246_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1247_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1248_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1249_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1250_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1251_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1252_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1253_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1254_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1255_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1256_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1257_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1258_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1259_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1260_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1261_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1262_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1263_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1264_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1265_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1266_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1267_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1268_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1269_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1270_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1271_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1272_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1273_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1274_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1275_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1276_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1277_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1278_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1279_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1280_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1281_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1282_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1283_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1284_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1285_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1286_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1287_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1288_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1289_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1290_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1291_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1292_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1293_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1294_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1295_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1296_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1297_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1298_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1299_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1300_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1301_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1302_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1303_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1304_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1305_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1306_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1307_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1308_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1309_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1310_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1311_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1312_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1313_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1314_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1315_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1316_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1317_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1318_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1319_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1320_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1321_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1322_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1323_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1324_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1325_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1326_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1327_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1328_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1329_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1330_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1331_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1332_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1333_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1334_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1335_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1336_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1337_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1338_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1339_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1340_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1341_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1342_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1343_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1344_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1345_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1346_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1347_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1348_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1349_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1350_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1351_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1352_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1353_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1354_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1355_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1356_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1357_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1358_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1359_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1360_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1361_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1362_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1363_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1364_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1365_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1366_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1367_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1368_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1369_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1370_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1371_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1372_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1373_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1374_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1375_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1376_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1377_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1378_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1379_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1380_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1381_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1382_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1383_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1384_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1385_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1386_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1387_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1388_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1389_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1390_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1391_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1392_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1393_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1394_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1395_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1396_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1397_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1398_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1399_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1400_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1401_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1402_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1403_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1404_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1405_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1406_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1407_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1408_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1409_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1410_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1411_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1412_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1413_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1414_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1415_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1416_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1417_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1418_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1419_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1420_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1421_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1422_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1423_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1424_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1425_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1426_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1427_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1428_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1429_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1430_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1431_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1432_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1433_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1434_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1435_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1436_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1437_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1438_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1439_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1440_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1441_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1442_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1443_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1444_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1445_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1446_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1447_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1448_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1449_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1450_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1451_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1452_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1453_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1454_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1455_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1456_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1457_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1458_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1459_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1460_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1461_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1462_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1463_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1464_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1465_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1466_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1467_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1468_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1469_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1470_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1471_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1472_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1473_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1474_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1475_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1476_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1477_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1478_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1479_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1480_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1481_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1482_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1483_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1484_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1485_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1486_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1487_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1488_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1489_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1490_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1491_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1492_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1493_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1494_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1495_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1496_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1497_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1498_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1499_0000.nii.gz"}, {"image": "imagesTr2200/1001-1500/FLARE23_1500_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1501_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1502_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1503_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1504_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1505_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1506_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1507_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1508_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1509_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1510_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1511_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1512_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1513_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1514_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1515_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1516_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1517_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1518_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1519_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1520_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1521_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1522_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1523_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1524_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1525_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1526_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1527_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1528_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1529_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1530_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1531_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1532_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1533_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1534_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1535_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1536_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1537_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1538_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1539_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1540_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1541_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1542_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1543_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1544_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1545_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1546_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1547_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1548_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1549_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1550_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1551_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1552_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1553_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1554_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1555_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1556_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1557_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1558_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1559_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1560_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1561_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1562_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1563_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1564_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1565_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1566_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1567_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1568_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1569_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1570_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1571_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1572_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1573_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1574_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1575_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1576_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1577_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1578_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1579_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1580_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1581_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1582_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1583_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1584_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1585_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1586_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1587_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1588_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1589_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1590_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1591_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1592_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1593_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1594_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1595_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1596_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1597_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1598_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1599_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1600_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1601_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1602_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1603_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1604_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1605_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1606_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1607_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1608_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1609_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1610_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1611_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1612_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1613_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1614_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1615_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1616_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1617_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1618_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1619_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1620_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1621_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1622_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1623_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1624_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1625_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1626_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1627_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1628_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1629_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1630_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1631_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1632_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1633_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1634_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1635_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1636_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1637_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1638_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1639_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1640_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1641_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1642_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1643_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1644_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1645_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1646_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1647_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1648_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1649_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1650_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1651_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1652_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1653_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1654_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1655_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1656_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1657_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1658_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1659_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1660_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1661_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1662_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1663_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1664_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1665_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1666_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1667_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1668_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1669_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1670_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1671_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1672_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1673_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1674_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1675_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1676_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1677_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1678_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1679_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1680_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1681_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1682_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1683_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1684_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1685_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1686_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1687_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1688_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1689_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1690_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1691_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1692_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1693_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1694_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1695_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1696_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1697_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1698_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1699_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1700_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1701_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1702_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1703_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1704_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1705_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1706_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1707_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1708_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1709_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1710_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1711_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1712_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1713_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1714_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1715_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1716_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1717_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1718_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1719_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1720_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1721_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1722_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1723_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1724_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1725_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1726_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1727_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1728_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1729_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1730_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1731_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1732_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1733_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1734_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1735_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1736_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1737_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1738_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1739_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1740_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1741_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1742_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1743_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1744_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1745_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1746_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1747_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1748_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1749_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1750_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1751_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1752_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1753_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1754_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1755_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1756_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1757_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1758_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1759_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1760_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1761_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1762_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1763_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1764_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1765_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1766_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1767_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1768_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1769_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1770_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1771_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1772_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1773_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1774_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1775_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1776_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1777_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1778_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1779_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1780_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1781_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1782_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1783_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1784_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1785_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1786_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1787_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1788_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1789_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1790_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1791_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1792_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1793_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1794_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1795_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1796_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1797_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1798_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1799_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1800_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1801_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1802_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1803_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1804_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1805_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1806_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1807_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1808_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1809_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1810_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1811_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1812_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1813_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1814_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1815_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1816_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1817_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1818_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1819_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1820_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1821_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1822_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1823_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1824_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1825_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1826_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1827_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1828_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1829_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1830_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1831_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1832_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1833_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1834_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1835_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1836_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1837_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1838_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1839_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1840_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1841_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1842_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1843_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1844_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1845_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1846_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1847_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1848_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1849_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1850_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1851_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1852_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1853_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1854_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1855_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1856_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1857_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1858_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1859_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1860_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1861_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1862_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1863_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1864_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1865_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1866_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1867_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1868_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1869_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1870_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1871_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1872_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1873_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1874_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1875_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1876_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1877_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1878_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1879_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1880_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1881_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1882_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1883_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1884_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1885_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1886_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1887_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1888_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1889_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1890_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1891_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1892_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1893_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1894_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1895_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1896_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1897_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1898_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1899_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1900_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1901_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1902_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1903_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1904_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1905_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1906_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1907_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1908_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1909_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1910_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1911_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1912_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1913_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1914_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1915_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1916_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1917_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1918_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1919_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1920_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1921_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1922_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1923_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1924_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1925_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1926_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1927_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1928_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1929_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1930_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1931_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1932_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1933_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1934_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1935_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1936_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1937_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1938_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1939_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1940_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1941_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1942_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1943_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1944_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1945_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1946_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1947_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1948_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1949_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1950_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1951_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1952_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1953_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1954_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1955_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1956_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1957_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1958_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1959_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1960_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1961_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1962_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1963_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1964_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1965_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1966_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1967_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1968_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1969_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1970_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1971_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1972_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1973_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1974_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1975_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1976_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1977_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1978_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1979_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1980_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1981_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1982_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1983_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1984_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1985_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1986_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1987_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1988_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1989_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1990_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1991_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1992_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1993_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1994_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1995_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1996_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1997_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1998_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_1999_0000.nii.gz"}, {"image": "imagesTr2200/1501-2000/FLARE23_2000_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2001_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2002_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2003_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2004_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2005_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2006_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2007_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2008_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2009_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2010_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2011_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2012_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2013_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2014_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2015_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2016_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2017_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2018_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2019_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2020_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2021_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2022_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2023_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2024_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2025_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2026_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2027_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2028_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2029_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2030_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2031_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2032_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2033_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2034_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2035_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2036_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2037_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2038_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2039_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2040_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2041_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2042_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2043_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2044_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2045_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2046_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2047_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2048_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2049_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2050_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2051_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2052_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2053_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2054_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2055_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2056_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2057_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2058_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2059_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2060_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2061_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2062_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2063_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2064_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2065_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2066_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2067_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2068_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2069_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2070_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2071_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2072_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2073_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2074_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2075_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2076_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2077_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2078_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2079_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2080_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2081_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2082_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2083_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2084_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2085_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2086_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2087_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2088_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2089_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2090_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2091_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2092_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2093_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2094_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2095_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2096_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2097_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2098_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2099_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2100_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2101_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2102_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2103_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2104_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2105_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2106_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2107_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2108_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2109_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2110_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2111_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2112_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2113_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2114_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2115_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2116_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2117_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2118_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2119_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2120_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2121_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2122_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2123_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2124_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2125_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2126_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2127_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2128_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2129_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2130_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2131_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2132_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2133_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2134_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2135_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2136_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2137_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2138_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2139_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2140_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2141_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2142_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2143_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2144_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2145_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2146_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2147_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2148_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2149_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2150_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2151_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2152_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2153_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2154_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2155_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2156_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2157_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2158_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2159_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2160_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2161_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2162_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2163_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2164_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2165_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2166_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2167_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2168_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2169_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2170_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2171_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2172_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2173_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2174_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2175_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2176_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2177_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2178_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2179_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2180_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2181_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2182_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2183_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2184_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2185_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2186_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2187_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2188_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2189_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2190_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2191_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2192_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2193_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2194_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2195_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2196_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2197_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2198_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2199_0000.nii.gz"}, {"image": "imagesTr2200/2001-2200/FLARE23_2200_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0501_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0502_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0503_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0504_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0505_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0506_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0507_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0508_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0509_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0510_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0511_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0512_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0513_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0514_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0515_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0516_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0517_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0518_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0519_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0520_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0521_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0522_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0523_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0524_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0525_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0526_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0527_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0528_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0529_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0530_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0531_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0532_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0533_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0534_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0535_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0536_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0537_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0538_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0539_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0540_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0541_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0542_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0543_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0544_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0545_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0546_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0547_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0548_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0549_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0550_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0551_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0552_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0553_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0554_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0555_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0556_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0557_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0558_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0559_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0560_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0561_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0562_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0563_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0564_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0565_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0566_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0567_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0568_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0569_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0570_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0571_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0572_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0573_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0574_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0575_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0576_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0577_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0578_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0579_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0580_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0581_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0582_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0583_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0584_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0585_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0586_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0587_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0588_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0589_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0590_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0591_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0592_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0593_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0594_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0595_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0596_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0597_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0598_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0599_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0600_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0601_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0602_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0603_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0604_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0605_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0606_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0607_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0608_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0609_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0610_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0611_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0612_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0613_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0614_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0615_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0616_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0617_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0618_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0619_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0620_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0621_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0622_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0623_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0624_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0625_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0626_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0627_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0628_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0629_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0630_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0631_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0632_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0633_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0634_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0635_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0636_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0637_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0638_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0639_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0640_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0641_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0642_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0643_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0644_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0645_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0646_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0647_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0648_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0649_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0650_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0651_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0652_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0653_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0654_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0655_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0656_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0657_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0658_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0659_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0660_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0661_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0662_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0663_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0664_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0665_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0666_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0667_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0668_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0669_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0670_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0671_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0672_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0673_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0674_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0675_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0676_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0677_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0678_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0679_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0680_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0681_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0682_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0683_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0684_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0685_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0686_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0687_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0688_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0689_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0690_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0691_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0692_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0693_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0694_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0695_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0696_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0697_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0698_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0699_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0700_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0701_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0702_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0703_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0704_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0705_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0706_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0707_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0708_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0709_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0710_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0711_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0712_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0713_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0714_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0715_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0716_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0717_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0718_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0719_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0720_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0721_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0722_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0723_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0724_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0725_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0726_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0727_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0728_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0729_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0730_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0731_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0732_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0733_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0734_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0735_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0736_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0737_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0738_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0739_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0740_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0741_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0742_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0743_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0744_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0745_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0746_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0747_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0748_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0749_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0750_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0751_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0752_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0753_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0754_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0755_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0756_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0757_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0758_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0759_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0760_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0761_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0762_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0763_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0764_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0765_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0766_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0767_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0768_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0769_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0770_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0771_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0772_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0773_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0774_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0775_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0776_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0777_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0778_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0779_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0780_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0781_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0782_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0783_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0784_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0785_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0786_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0787_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0788_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0789_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0790_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0791_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0792_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0793_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0794_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0795_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0796_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0797_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0798_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0799_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0800_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0801_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0802_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0803_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0804_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0805_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0806_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0807_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0808_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0809_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0810_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0811_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0812_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0813_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0814_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0815_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0816_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0817_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0818_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0819_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0820_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0821_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0822_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0823_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0824_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0825_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0826_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0827_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0828_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0829_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0830_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0831_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0832_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0833_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0834_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0835_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0836_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0837_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0838_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0839_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0840_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0841_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0842_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0843_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0844_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0845_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0846_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0847_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0848_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0849_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0850_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0851_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0852_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0853_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0854_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0855_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0856_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0857_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0858_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0859_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0860_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0861_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0862_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0863_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0864_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0865_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0866_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0867_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0868_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0869_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0870_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0871_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0872_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0873_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0874_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0875_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0876_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0877_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0878_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0879_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0880_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0881_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0882_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0883_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0884_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0885_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0886_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0887_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0888_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0889_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0890_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0891_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0892_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0893_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0894_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0895_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0896_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0897_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0898_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0899_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0900_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0901_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0902_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0903_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0904_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0905_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0906_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0907_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0908_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0909_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0910_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0911_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0912_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0913_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0914_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0915_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0916_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0917_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0918_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0919_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0920_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0921_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0922_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0923_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0924_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0925_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0926_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0927_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0928_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0929_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0930_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0931_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0932_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0933_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0934_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0935_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0936_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0937_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0938_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0939_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0940_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0941_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0942_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0943_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0944_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0945_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0946_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0947_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0948_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0949_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0950_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0951_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0952_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0953_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0954_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0955_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0956_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0957_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0958_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0959_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0960_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0961_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0962_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0963_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0964_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0965_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0966_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0967_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0968_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0969_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0970_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0971_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0972_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0973_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0974_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0975_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0976_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0977_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0978_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0979_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0980_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0981_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0982_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0983_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0984_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0985_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0986_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0987_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0988_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0989_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0990_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0991_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0992_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0993_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0994_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0995_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0996_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0997_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0998_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_0999_0000.nii.gz"}, {"image": "imagesTr2200/501-1000/FLARE23_1000_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3101_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3102_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3103_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3104_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3105_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3106_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3107_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3108_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3109_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3110_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3111_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3112_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3113_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3114_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3115_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3116_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3117_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3118_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3119_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3120_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3121_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3122_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3123_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3124_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3125_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3126_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3127_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3128_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3129_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3130_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3131_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3132_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3133_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3134_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3135_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3136_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3137_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3138_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3139_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3140_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3141_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3142_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3143_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3144_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3145_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3146_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3147_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3148_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3149_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3150_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3151_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3152_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3153_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3154_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3155_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3156_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3157_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3158_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3159_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3160_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3161_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3162_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3163_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3164_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3165_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3166_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3167_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3168_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3169_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3170_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3171_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3172_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3173_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3174_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3175_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3176_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3177_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3178_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3179_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3180_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3181_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3182_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3183_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3184_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3185_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3186_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3187_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3188_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3189_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3190_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3191_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3192_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3193_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3194_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3195_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3196_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3197_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3198_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3199_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3200_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3201_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3202_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3203_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3204_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3205_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3206_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3207_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3208_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3209_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3210_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3211_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3212_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3213_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3214_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3215_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3216_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3217_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3218_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3219_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3220_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3221_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3222_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3223_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3224_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3225_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3226_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3227_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3228_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3229_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3230_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3231_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3232_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3233_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3234_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3235_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3236_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3237_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3238_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3239_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3240_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3241_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3242_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3243_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3244_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3245_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3246_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3247_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3248_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3249_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3250_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3251_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3252_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3253_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3254_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3255_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3256_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3257_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3258_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3259_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3260_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3261_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3262_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3263_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3264_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3265_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3266_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3267_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3268_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3269_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3270_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3271_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3272_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3273_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3274_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3275_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3276_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3277_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3278_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3279_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3280_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3281_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3282_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3283_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3284_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3285_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3286_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3287_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3288_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3289_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3290_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3291_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3292_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3293_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3294_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3295_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3296_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3297_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3298_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3299_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3300_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3301_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3302_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3303_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3304_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3305_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3306_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3307_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3308_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3309_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3310_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3311_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3312_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3313_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3314_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3315_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3316_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3317_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3318_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3319_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3320_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3321_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3322_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3323_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3324_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3325_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3326_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3327_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3328_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3329_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3330_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3331_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3332_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3333_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3334_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3335_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3336_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3337_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3338_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3339_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3340_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3341_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3342_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3343_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3344_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3345_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3346_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3347_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3348_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3349_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3350_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3351_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3352_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3353_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3354_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3355_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3356_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3357_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3358_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3359_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3360_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3361_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3362_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3363_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3364_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3365_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3366_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3367_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3368_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3369_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3370_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3371_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3372_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3373_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3374_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3375_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3376_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3377_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3378_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3379_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3380_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3381_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3382_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3383_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3384_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3385_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3386_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3387_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3388_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3389_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3390_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3391_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3392_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3393_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3394_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3395_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3396_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3397_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3398_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3399_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3400_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3401_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3402_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3403_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3404_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3405_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3406_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3407_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3408_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3409_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3410_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3411_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3412_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3413_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3414_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3415_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3416_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3417_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3418_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3419_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3420_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3421_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3422_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3423_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3424_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3425_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3426_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3427_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3428_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3429_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3430_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3431_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3432_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3433_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3434_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3435_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3436_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3437_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3438_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3439_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3440_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3441_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3442_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3443_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3444_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3445_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3446_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3447_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3448_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3449_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3450_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3451_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3452_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3453_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3454_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3455_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3456_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3457_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3458_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3459_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3460_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3461_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3462_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3463_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3464_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3465_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3466_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3467_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3468_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3469_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3470_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3471_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3472_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3473_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3474_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3475_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3476_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3477_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3478_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3479_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3480_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3481_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3482_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3483_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3484_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3485_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3486_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3487_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3488_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3489_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3490_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3491_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3492_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3493_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3494_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3495_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3496_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3497_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3498_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3499_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3500_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3501_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3502_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3503_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3504_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3505_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3506_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3507_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3508_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3509_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3510_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3511_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3512_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3513_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3514_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3515_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3516_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3517_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3518_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3519_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3520_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3521_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3522_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3523_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3524_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3525_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3526_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3527_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3528_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3529_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3530_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3531_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3532_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3533_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3534_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3535_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3536_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3537_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3538_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3539_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3540_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3541_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3542_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3543_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3544_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3545_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3546_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3547_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3548_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3549_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3550_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3551_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3552_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3553_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3554_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3555_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3556_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3557_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3558_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3559_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3560_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3561_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3562_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3563_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3564_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3565_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3566_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3567_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3568_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3569_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3570_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3571_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3572_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3573_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3574_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3575_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3576_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3577_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3578_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3579_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3580_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3581_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3582_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3583_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3584_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3585_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3586_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3587_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3588_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3589_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3590_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3591_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3592_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3593_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3594_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3595_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3596_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3597_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3598_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3599_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3600_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3601_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3602_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3603_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3604_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3605_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3606_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3607_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3608_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3609_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3610_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3611_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3612_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3613_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3614_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3615_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3616_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3617_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3618_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3619_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3620_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3621_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3622_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3623_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3624_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3625_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3626_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3627_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3628_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3629_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3630_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3631_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3632_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3633_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3634_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3635_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3636_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3637_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3638_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3639_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3640_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3641_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3642_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3643_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3644_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3645_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3646_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3647_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3648_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3649_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3650_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3651_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3652_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3653_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3654_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3655_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3656_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3657_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3658_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3659_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3660_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3661_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3662_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3663_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3664_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3665_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3666_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3667_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3668_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3669_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3670_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3671_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3672_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3673_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3674_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3675_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3676_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3677_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3678_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3679_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3680_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3681_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3682_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3683_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3684_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3685_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3686_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3687_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3688_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3689_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3690_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3691_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3692_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3693_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3694_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3695_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3696_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3697_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3698_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3699_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3700_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3701_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3702_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3703_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3704_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3705_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3706_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3707_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3708_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3709_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3710_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3711_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3712_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3713_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3714_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3715_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3716_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3717_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3718_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3719_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3720_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3721_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3722_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3723_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3724_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3725_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3726_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3727_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3728_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3729_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3730_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3731_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3732_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3733_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3734_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3735_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3736_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3737_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3738_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3739_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3740_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3741_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3742_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3743_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3744_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3745_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3746_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3747_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3748_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3749_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3750_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3751_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3752_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3753_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3754_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3755_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3756_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3757_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3758_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3759_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3760_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3761_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3762_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3763_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3764_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3765_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3766_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3767_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3768_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3769_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3770_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3771_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3772_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3773_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3774_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3775_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3776_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3777_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3778_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3779_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3780_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3781_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3782_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3783_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3784_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3785_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3786_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3787_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3788_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3789_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3790_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3791_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3792_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3793_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3794_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3795_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3796_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3797_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3798_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3799_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3800_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3801_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3802_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3803_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3804_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3805_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3806_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3807_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3808_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3809_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3810_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3811_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3812_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3813_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3814_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3815_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3816_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3817_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3818_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3819_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3820_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3821_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3822_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3823_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3824_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3825_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3826_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3827_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3828_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3829_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3830_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3831_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3832_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3833_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3834_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3835_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3836_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3837_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3838_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3839_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3840_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3841_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3842_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3843_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3844_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3845_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3846_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3847_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3848_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3849_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3850_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3851_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3852_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3853_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3854_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3855_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3856_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3857_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3858_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3859_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3860_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3861_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3862_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3863_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3864_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3865_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3866_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3867_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3868_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3869_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3870_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3871_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3872_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3873_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3874_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3875_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3876_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3877_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3878_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3879_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3880_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3881_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3882_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3883_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3884_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3885_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3886_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3887_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3888_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3889_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3890_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3891_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3892_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3893_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3894_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3895_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3896_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3897_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3898_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3899_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3900_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3901_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3902_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3903_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3904_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3905_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3906_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3907_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3908_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3909_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3910_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3911_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3912_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3913_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3914_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3915_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3916_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3917_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3918_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3919_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3920_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3921_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3922_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3923_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3924_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3925_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3926_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3927_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3928_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3929_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3930_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3931_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3932_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3933_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3934_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3935_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3936_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3937_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3938_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3939_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3940_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3941_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3942_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3943_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3944_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3945_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3946_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3947_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3948_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3949_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3950_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3951_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3952_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3953_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3954_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3955_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3956_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3957_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3958_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3959_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3960_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3961_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3962_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3963_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3964_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3965_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3966_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3967_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3968_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3969_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3970_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3971_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3972_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3973_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3974_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3975_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3976_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3977_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3978_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3979_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3980_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3981_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3982_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3983_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3984_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3985_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3986_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3987_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3988_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3989_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3990_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3991_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3992_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3993_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3994_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3995_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3996_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3997_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3998_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_3999_0000.nii.gz"}, {"image": "unlabelTr1800/unlabel3101-4000/FLARE23_4000_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2201_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2202_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2203_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2204_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2205_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2206_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2207_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2208_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2209_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2210_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2211_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2212_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2213_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2214_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2215_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2216_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2217_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2218_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2219_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2220_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2221_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2222_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2223_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2224_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2225_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2226_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2227_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2228_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2229_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2230_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2231_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2232_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2233_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2234_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2235_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2236_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2237_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2238_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2239_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2240_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2241_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2242_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2243_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2244_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2245_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2246_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2247_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2248_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2249_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2250_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2251_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2252_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2253_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2254_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2255_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2256_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2257_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2258_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2259_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2260_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2261_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2262_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2263_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2264_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2265_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2266_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2267_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2268_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2269_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2270_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2271_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2272_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2273_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2274_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2275_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2276_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2277_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2278_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2279_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2280_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2281_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2282_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2283_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2284_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2285_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2286_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2287_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2288_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2289_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2290_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2291_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2292_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2293_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2294_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2295_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2296_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2297_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2298_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2299_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2300_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2301_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2302_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2303_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2304_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2305_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2306_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2307_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2308_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2309_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2310_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2311_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2312_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2313_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2314_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2315_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2316_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2317_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2318_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2319_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2320_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2321_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2322_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2323_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2324_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2325_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2326_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2327_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2328_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2329_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2330_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2331_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2332_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2333_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2334_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2335_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2336_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2337_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2338_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2339_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2340_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2341_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2342_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2343_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2344_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2345_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2346_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2347_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2348_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2349_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2350_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2351_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2352_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2353_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2354_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2355_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2356_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2357_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2358_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2359_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2360_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2361_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2362_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2363_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2364_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2365_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2366_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2367_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2368_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2369_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2370_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2371_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2372_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2373_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2374_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2375_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2376_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2377_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2378_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2379_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2380_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2381_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2382_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2383_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2384_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2385_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2386_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2387_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2388_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2389_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2390_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2391_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2392_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2393_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2394_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2395_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2396_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2397_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2398_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2399_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2400_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2401_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2402_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2403_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2404_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2405_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2406_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2407_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2408_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2409_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2410_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2411_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2412_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2413_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2414_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2415_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2416_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2417_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2418_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2419_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2420_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2421_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2422_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2423_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2424_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2425_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2426_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2427_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2428_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2429_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2430_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2431_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2432_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2433_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2434_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2435_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2436_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2437_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2438_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2439_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2440_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2441_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2442_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2443_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2444_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2445_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2446_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2447_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2448_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2449_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2450_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2451_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2452_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2453_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2454_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2455_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2456_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2457_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2458_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2459_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2460_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2461_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2462_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2463_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2464_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2465_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2466_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2467_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2468_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2469_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2470_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2471_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2472_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2473_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2474_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2475_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2476_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2477_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2478_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2479_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2480_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2481_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2482_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2483_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2484_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2485_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2486_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2487_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2488_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2489_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2490_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2491_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2492_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2493_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2494_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2495_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2496_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2497_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2498_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2499_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2500_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2501_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2502_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2503_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2504_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2505_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2506_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2507_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2508_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2509_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2510_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2511_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2512_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2513_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2514_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2515_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2516_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2517_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2518_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2519_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2520_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2521_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2522_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2523_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2524_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2525_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2526_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2527_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2528_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2529_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2530_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2531_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2532_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2533_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2534_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2535_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2536_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2537_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2538_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2539_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2540_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2541_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2542_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2543_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2544_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2545_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2546_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2547_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2548_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2549_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2550_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2551_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2552_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2553_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2554_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2555_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2556_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2557_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2558_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2559_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2560_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2561_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2562_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2563_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2564_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2565_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2566_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2567_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2568_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2569_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2570_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2571_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2572_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2573_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2574_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2575_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2576_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2577_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2578_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2579_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2580_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2581_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2582_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2583_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2584_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2585_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2586_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2587_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2588_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2589_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2590_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2591_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2592_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2593_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2594_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2595_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2596_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2597_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2598_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2599_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2600_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2601_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2602_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2603_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2604_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2605_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2606_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2607_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2608_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2609_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2610_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2611_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2612_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2613_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2614_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2615_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2616_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2617_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2618_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2619_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2620_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2621_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2622_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2623_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2624_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2625_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2626_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2627_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2628_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2629_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2630_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2631_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2632_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2633_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2634_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2635_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2636_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2637_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2638_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2639_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2640_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2641_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2642_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2643_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2644_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2645_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2646_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2647_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2648_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2649_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2650_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2651_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2652_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2653_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2654_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2655_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2656_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2657_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2658_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2659_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2660_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2661_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2662_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2663_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2664_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2665_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2666_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2667_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2668_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2669_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2670_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2671_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2672_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2673_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2674_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2675_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2676_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2677_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2678_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2679_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2680_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2681_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2682_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2683_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2684_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2685_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2686_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2687_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2688_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2689_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2690_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2691_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2692_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2693_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2694_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2695_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2696_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2697_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2698_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2699_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2700_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2701_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2702_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2703_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2704_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2705_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2706_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2707_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2708_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2709_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2710_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2711_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2712_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2713_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2714_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2715_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2716_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2717_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2718_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2719_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2720_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2721_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2722_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2723_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2724_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2725_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2726_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2727_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2728_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2729_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2730_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2731_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2732_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2733_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2734_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2735_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2736_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2737_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2738_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2739_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2740_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2741_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2742_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2743_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2744_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2745_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2746_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2747_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2748_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2749_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2750_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2751_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2752_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2753_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2754_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2755_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2756_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2757_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2758_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2759_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2760_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2761_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2762_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2763_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2764_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2765_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2766_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2767_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2768_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2769_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2770_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2771_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2772_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2773_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2774_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2775_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2776_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2777_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2778_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2779_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2780_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2781_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2782_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2783_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2784_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2785_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2786_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2787_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2788_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2789_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2790_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2791_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2792_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2793_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2794_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2795_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2796_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2797_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2798_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2799_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2800_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2801_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2802_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2803_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2804_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2805_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2806_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2807_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2808_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2809_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2810_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2811_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2812_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2813_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2814_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2815_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2816_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2817_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2818_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2819_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2820_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2821_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2822_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2823_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2824_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2825_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2826_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2827_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2828_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2829_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2830_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2831_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2832_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2833_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2834_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2835_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2836_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2837_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2838_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2839_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2840_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2841_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2842_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2843_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2844_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2845_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2846_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2847_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2848_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2849_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2850_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2851_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2852_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2853_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2854_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2855_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2856_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2857_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2858_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2859_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2860_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2861_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2862_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2863_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2864_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2865_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2866_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2867_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2868_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2869_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2870_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2871_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2872_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2873_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2874_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2875_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2876_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2877_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2878_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2879_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2880_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2881_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2882_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2883_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2884_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2885_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2886_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2887_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2888_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2889_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2890_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2891_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2892_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2893_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2894_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2895_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2896_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2897_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2898_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2899_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2900_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2901_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2902_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2903_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2904_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2905_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2906_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2907_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2908_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2909_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2910_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2911_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2912_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2913_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2914_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2915_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2916_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2917_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2918_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2919_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2920_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2921_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2922_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2923_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2924_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2925_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2926_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2927_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2928_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2929_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2930_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2931_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2932_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2933_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2934_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2935_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2936_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2937_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2938_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2939_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2940_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2941_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2942_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2943_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2944_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2945_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2946_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2947_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2948_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2949_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2950_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2951_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2952_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2953_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2954_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2955_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2956_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2957_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2958_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2959_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2960_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2961_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2962_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2963_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2964_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2965_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2966_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2967_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2968_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2969_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2970_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2971_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2972_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2973_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2974_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2975_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2976_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2977_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2978_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2979_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2980_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2981_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2982_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2983_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2984_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2985_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2986_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2987_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2988_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2989_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2990_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2991_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2992_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2993_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2994_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2995_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2996_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2997_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2998_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_2999_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3000_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3001_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3002_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3003_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3004_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3005_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3006_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3007_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3008_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3009_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3010_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3011_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3012_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3013_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3014_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3015_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3016_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3017_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3018_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3019_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3020_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3021_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3022_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3023_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3024_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3025_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3026_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3027_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3028_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3029_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3030_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3031_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3032_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3033_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3034_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3035_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3036_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3037_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3038_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3039_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3040_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3041_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3042_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3043_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3044_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3045_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3046_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3047_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3048_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3049_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3050_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3051_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3052_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3053_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3054_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3055_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3056_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3057_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3058_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3059_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3060_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3061_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3062_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3063_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3064_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3065_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3066_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3067_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3068_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3069_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3070_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3071_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3072_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3073_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3074_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3075_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3076_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3077_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3078_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3079_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3080_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3081_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3082_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3083_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3084_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3085_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3086_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3087_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3088_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3089_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3090_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3091_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3092_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3093_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3094_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3095_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3096_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3097_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3098_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3099_0000.nii.gz"}, {"image": "unlabelTr1800/unlabelTr2201-3100/FLARE23_3100_0000.nii.gz"}]}
================================================
FILE: jsons/stoic21.json
================================================
{"training": [{"image": "nii_gz/9994.nii.gz"}, {"image": "nii_gz/9978.nii.gz"}, {"image": "nii_gz/9974.nii.gz"}, {"image": "nii_gz/9968.nii.gz"}, {"image": "nii_gz/9966.nii.gz"}, {"image": "nii_gz/996.nii.gz"}, {"image": "nii_gz/9957.nii.gz"}, {"image": "nii_gz/9948.nii.gz"}, {"image": "nii_gz/9947.nii.gz"}, {"image": "nii_gz/9937.nii.gz"}, {"image": "nii_gz/9934.nii.gz"}, {"image": "nii_gz/9933.nii.gz"}, {"image": "nii_gz/9929.nii.gz"}, {"image": "nii_gz/9922.nii.gz"}, {"image": "nii_gz/9921.nii.gz"}, {"image": "nii_gz/9920.nii.gz"}, {"image": "nii_gz/9918.nii.gz"}, {"image": "nii_gz/9909.nii.gz"}, {"image": "nii_gz/9899.nii.gz"}, {"image": "nii_gz/9893.nii.gz"}, {"image": "nii_gz/9888.nii.gz"}, {"image": "nii_gz/9887.nii.gz"}, {"image": "nii_gz/987.nii.gz"}, {"image": "nii_gz/9862.nii.gz"}, {"image": "nii_gz/9857.nii.gz"}, {"image": "nii_gz/9853.nii.gz"}, {"image": "nii_gz/9849.nii.gz"}, {"image": "nii_gz/9844.nii.gz"}, {"image": "nii_gz/984.nii.gz"}, {"image": "nii_gz/9837.nii.gz"}, {"image": "nii_gz/9832.nii.gz"}, {"image": "nii_gz/9818.nii.gz"}, {"image": "nii_gz/9810.nii.gz"}, {"image": "nii_gz/9801.nii.gz"}, {"image": "nii_gz/9789.nii.gz"}, {"image": "nii_gz/9788.nii.gz"}, {"image": "nii_gz/9783.nii.gz"}, {"image": "nii_gz/9776.nii.gz"}, {"image": "nii_gz/9773.nii.gz"}, {"image": "nii_gz/9770.nii.gz"}, {"image": "nii_gz/9766.nii.gz"}, {"image": "nii_gz/976.nii.gz"}, {"image": "nii_gz/9759.nii.gz"}, {"image": "nii_gz/9758.nii.gz"}, {"image": "nii_gz/9755.nii.gz"}, {"image": "nii_gz/9754.nii.gz"}, {"image": "nii_gz/9752.nii.gz"}, {"image": "nii_gz/9749.nii.gz"}, {"image": "nii_gz/9741.nii.gz"}, {"image": "nii_gz/9735.nii.gz"}, {"image": "nii_gz/9733.nii.gz"}, {"image": "nii_gz/9732.nii.gz"}, {"image": "nii_gz/9728.nii.gz"}, {"image": "nii_gz/9724.nii.gz"}, {"image": "nii_gz/9723.nii.gz"}, {"image": "nii_gz/9720.nii.gz"}, {"image": "nii_gz/9716.nii.gz"}, {"image": "nii_gz/9711.nii.gz"}, {"image": "nii_gz/9709.nii.gz"}, {"image": "nii_gz/9708.nii.gz"}, {"image": "nii_gz/9706.nii.gz"}, {"image": "nii_gz/97.nii.gz"}, {"image": "nii_gz/9696.nii.gz"}, {"image": "nii_gz/9692.nii.gz"}, {"image": "nii_gz/9678.nii.gz"}, {"image": "nii_gz/9676.nii.gz"}, {"image": "nii_gz/9673.nii.gz"}, {"image": "nii_gz/9670.nii.gz"}, {"image": "nii_gz/9661.nii.gz"}, {"image": "nii_gz/9658.nii.gz"}, {"image": "nii_gz/9650.nii.gz"}, {"image": "nii_gz/9647.nii.gz"}, {"image": "nii_gz/9645.nii.gz"}, {"image": "nii_gz/9644.nii.gz"}, {"image": "nii_gz/9641.nii.gz"}, {"image": "nii_gz/9630.nii.gz"}, {"image": "nii_gz/9626.nii.gz"}, {"image": "nii_gz/9621.nii.gz"}, {"image": "nii_gz/9620.nii.gz"}, {"image": "nii_gz/959.nii.gz"}, {"image": "nii_gz/9586.nii.gz"}, {"image": "nii_gz/9585.nii.gz"}, {"image": "nii_gz/9584.nii.gz"}, {"image": "nii_gz/9582.nii.gz"}, {"image": "nii_gz/9577.nii.gz"}, {"image": "nii_gz/9572.nii.gz"}, {"image": "nii_gz/9562.nii.gz"}, {"image": "nii_gz/9561.nii.gz"}, {"image": "nii_gz/955.nii.gz"}, {"image": "nii_gz/9546.nii.gz"}, {"image": "nii_gz/9543.nii.gz"}, {"image": "nii_gz/9533.nii.gz"}, {"image": "nii_gz/9531.nii.gz"}, {"image": "nii_gz/9520.nii.gz"}, {"image": "nii_gz/952.nii.gz"}, {"image": "nii_gz/9515.nii.gz"}, {"image": "nii_gz/9498.nii.gz"}, {"image": "nii_gz/9497.nii.gz"}, {"image": "nii_gz/9490.nii.gz"}, {"image": "nii_gz/9489.nii.gz"}, {"image": "nii_gz/9487.nii.gz"}, {"image": "nii_gz/9480.nii.gz"}, {"image": "nii_gz/9476.nii.gz"}, {"image": "nii_gz/9473.nii.gz"}, {"image": "nii_gz/947.nii.gz"}, {"image": "nii_gz/9459.nii.gz"}, {"image": "nii_gz/9450.nii.gz"}, {"image": "nii_gz/9448.nii.gz"}, {"image": "nii_gz/9445.nii.gz"}, {"image": "nii_gz/9441.nii.gz"}, {"image": "nii_gz/944.nii.gz"}, {"image": "nii_gz/9433.nii.gz"}, {"image": "nii_gz/9427.nii.gz"}, {"image": "nii_gz/9421.nii.gz"}, {"image": "nii_gz/9420.nii.gz"}, {"image": "nii_gz/9410.nii.gz"}, {"image": "nii_gz/9403.nii.gz"}, {"image": "nii_gz/9395.nii.gz"}, {"image": "nii_gz/9387.nii.gz"}, {"image": "nii_gz/9383.nii.gz"}, {"image": "nii_gz/9379.nii.gz"}, {"image": "nii_gz/9372.nii.gz"}, {"image": "nii_gz/9366.nii.gz"}, {"image": "nii_gz/9360.nii.gz"}, {"image": "nii_gz/9359.nii.gz"}, {"image": "nii_gz/9354.nii.gz"}, {"image": "nii_gz/9348.nii.gz"}, {"image": "nii_gz/9341.nii.gz"}, {"image": "nii_gz/9337.nii.gz"}, {"image": "nii_gz/9329.nii.gz"}, {"image": "nii_gz/9325.nii.gz"}, {"image": "nii_gz/9320.nii.gz"}, {"image": "nii_gz/9318.nii.gz"}, {"image": "nii_gz/9313.nii.gz"}, {"image": "nii_gz/9311.nii.gz"}, {"image": "nii_gz/9304.nii.gz"}, {"image": "nii_gz/930.nii.gz"}, {"image": "nii_gz/9285.nii.gz"}, {"image": "nii_gz/9272.nii.gz"}, {"image": "nii_gz/927.nii.gz"}, {"image": "nii_gz/9269.nii.gz"}, {"image": "nii_gz/9267.nii.gz"}, {"image": "nii_gz/9252.nii.gz"}, {"image": "nii_gz/925.nii.gz"}, {"image": "nii_gz/9237.nii.gz"}, {"image": "nii_gz/9234.nii.gz"}, {"image": "nii_gz/9228.nii.gz"}, {"image": "nii_gz/9223.nii.gz"}, {"image": "nii_gz/9220.nii.gz"}, {"image": "nii_gz/9218.nii.gz"}, {"image": "nii_gz/9211.nii.gz"}, {"image": "nii_gz/9210.nii.gz"}, {"image": "nii_gz/9207.nii.gz"}, {"image": "nii_gz/9205.nii.gz"}, {"image": "nii_gz/9201.nii.gz"}, {"image": "nii_gz/92.nii.gz"}, {"image": "nii_gz/9195.nii.gz"}, {"image": "nii_gz/9189.nii.gz"}, {"image": "nii_gz/9180.nii.gz"}, {"image": "nii_gz/9176.nii.gz"}, {"image": "nii_gz/9169.nii.gz"}, {"image": "nii_gz/9168.nii.gz"}, {"image": "nii_gz/9165.nii.gz"}, {"image": "nii_gz/916.nii.gz"}, {"image": "nii_gz/9157.nii.gz"}, {"image": "nii_gz/9154.nii.gz"}, {"image": "nii_gz/9153.nii.gz"}, {"image": "nii_gz/9141.nii.gz"}, {"image": "nii_gz/9128.nii.gz"}, {"image": "nii_gz/9116.nii.gz"}, {"image": "nii_gz/9113.nii.gz"}, {"image": "nii_gz/9111.nii.gz"}, {"image": "nii_gz/9104.nii.gz"}, {"image": "nii_gz/9102.nii.gz"}, {"image": "nii_gz/9098.nii.gz"}, {"image": "nii_gz/9096.nii.gz"}, {"image": "nii_gz/9075.nii.gz"}, {"image": "nii_gz/9072.nii.gz"}, {"image": "nii_gz/9071.nii.gz"}, {"image": "nii_gz/9058.nii.gz"}, {"image": "nii_gz/9056.nii.gz"}, {"image": "nii_gz/9044.nii.gz"}, {"image": "nii_gz/9043.nii.gz"}, {"image": "nii_gz/9032.nii.gz"}, {"image": "nii_gz/9023.nii.gz"}, {"image": "nii_gz/902.nii.gz"}, {"image": "nii_gz/901.nii.gz"}, {"image": "nii_gz/9004.nii.gz"}, {"image": "nii_gz/9003.nii.gz"}, {"image": "nii_gz/9002.nii.gz"}, {"image": "nii_gz/9001.nii.gz"}, {"image": "nii_gz/8999.nii.gz"}, {"image": "nii_gz/8998.nii.gz"}, {"image": "nii_gz/8996.nii.gz"}, {"image": "nii_gz/8977.nii.gz"}, {"image": "nii_gz/8974.nii.gz"}, {"image": "nii_gz/8972.nii.gz"}, {"image": "nii_gz/8970.nii.gz"}, {"image": "nii_gz/8958.nii.gz"}, {"image": "nii_gz/8952.nii.gz"}, {"image": "nii_gz/8950.nii.gz"}, {"image": "nii_gz/8949.nii.gz"}, {"image": "nii_gz/8948.nii.gz"}, {"image": "nii_gz/8945.nii.gz"}, {"image": "nii_gz/8932.nii.gz"}, {"image": "nii_gz/8930.nii.gz"}, {"image": "nii_gz/8928.nii.gz"}, {"image": "nii_gz/8926.nii.gz"}, {"image": "nii_gz/892.nii.gz"}, {"image": "nii_gz/8917.nii.gz"}, {"image": "nii_gz/8910.nii.gz"}, {"image": "nii_gz/891.nii.gz"}, {"image": "nii_gz/8907.nii.gz"}, {"image": "nii_gz/8906.nii.gz"}, {"image": "nii_gz/8892.nii.gz"}, {"image": "nii_gz/8889.nii.gz"}, {"image": "nii_gz/8880.nii.gz"}, {"image": "nii_gz/8868.nii.gz"}, {"image": "nii_gz/886.nii.gz"}, {"image": "nii_gz/8853.nii.gz"}, {"image": "nii_gz/8845.nii.gz"}, {"image": "nii_gz/8839.nii.gz"}, {"image": "nii_gz/8838.nii.gz"}, {"image": "nii_gz/8837.nii.gz"}, {"image": "nii_gz/8833.nii.gz"}, {"image": "nii_gz/883.nii.gz"}, {"image": "nii_gz/8828.nii.gz"}, {"image": "nii_gz/8823.nii.gz"}, {"image": "nii_gz/8813.nii.gz"}, {"image": "nii_gz/8807.nii.gz"}, {"image": "nii_gz/8804.nii.gz"}, {"image": "nii_gz/88.nii.gz"}, {"image": "nii_gz/8792.nii.gz"}, {"image": "nii_gz/8790.nii.gz"}, {"image": "nii_gz/8788.nii.gz"}, {"image": "nii_gz/8784.nii.gz"}, {"image": "nii_gz/8782.nii.gz"}, {"image": "nii_gz/8780.nii.gz"}, {"image": "nii_gz/8776.nii.gz"}, {"image": "nii_gz/8766.nii.gz"}, {"image": "nii_gz/8765.nii.gz"}, {"image": "nii_gz/8758.nii.gz"}, {"image": "nii_gz/8757.nii.gz"}, {"image": "nii_gz/8752.nii.gz"}, {"image": "nii_gz/8751.nii.gz"}, {"image": "nii_gz/8746.nii.gz"}, {"image": "nii_gz/8736.nii.gz"}, {"image": "nii_gz/8734.nii.gz"}, {"image": "nii_gz/8726.nii.gz"}, {"image": "nii_gz/8724.nii.gz"}, {"image": "nii_gz/8708.nii.gz"}, {"image": "nii_gz/8703.nii.gz"}, {"image": "nii_gz/8699.nii.gz"}, {"image": "nii_gz/8694.nii.gz"}, {"image": "nii_gz/8692.nii.gz"}, {"image": "nii_gz/8690.nii.gz"}, {"image": "nii_gz/8682.nii.gz"}, {"image": "nii_gz/868.nii.gz"}, {"image": "nii_gz/8675.nii.gz"}, {"image": "nii_gz/8673.nii.gz"}, {"image": "nii_gz/8670.nii.gz"}, {"image": "nii_gz/8656.nii.gz"}, {"image": "nii_gz/8644.nii.gz"}, {"image": "nii_gz/8631.nii.gz"}, {"image": "nii_gz/8626.nii.gz"}, {"image": "nii_gz/8625.nii.gz"}, {"image": "nii_gz/8622.nii.gz"}, {"image": "nii_gz/8617.nii.gz"}, {"image": "nii_gz/8613.nii.gz"}, {"image": "nii_gz/861.nii.gz"}, {"image": "nii_gz/8609.nii.gz"}, {"image": "nii_gz/8607.nii.gz"}, {"image": "nii_gz/8603.nii.gz"}, {"image": "nii_gz/8597.nii.gz"}, {"image": "nii_gz/8585.nii.gz"}, {"image": "nii_gz/8582.nii.gz"}, {"image": "nii_gz/8577.nii.gz"}, {"image": "nii_gz/8576.nii.gz"}, {"image": "nii_gz/8568.nii.gz"}, {"image": "nii_gz/8563.nii.gz"}, {"image": "nii_gz/8560.nii.gz"}, {"image": "nii_gz/8557.nii.gz"}, {"image": "nii_gz/8551.nii.gz"}, {"image": "nii_gz/8541.nii.gz"}, {"image": "nii_gz/8540.nii.gz"}, {"image": "nii_gz/8537.nii.gz"}, {"image": "nii_gz/8534.nii.gz"}, {"image": "nii_gz/8530.nii.gz"}, {"image": "nii_gz/8529.nii.gz"}, {"image": "nii_gz/8521.nii.gz"}, {"image": "nii_gz/8519.nii.gz"}, {"image": "nii_gz/8511.nii.gz"}, {"image": "nii_gz/8509.nii.gz"}, {"image": "nii_gz/8504.nii.gz"}, {"image": "nii_gz/8500.nii.gz"}, {"image": "nii_gz/85.nii.gz"}, {"image": "nii_gz/8495.nii.gz"}, {"image": "nii_gz/8489.nii.gz"}, {"image": "nii_gz/8488.nii.gz"}, {"image": "nii_gz/8486.nii.gz"}, {"image": "nii_gz/8483.nii.gz"}, {"image": "nii_gz/848.nii.gz"}, {"image": "nii_gz/8479.nii.gz"}, {"image": "nii_gz/8478.nii.gz"}, {"image": "nii_gz/8470.nii.gz"}, {"image": "nii_gz/8469.nii.gz"}, {"image": "nii_gz/8467.nii.gz"}, {"image": "nii_gz/8462.nii.gz"}, {"image": "nii_gz/8457.nii.gz"}, {"image": "nii_gz/8453.nii.gz"}, {"image": "nii_gz/8449.nii.gz"}, {"image": "nii_gz/8442.nii.gz"}, {"image": "nii_gz/8440.nii.gz"}, {"image": "nii_gz/8439.nii.gz"}, {"image": "nii_gz/8432.nii.gz"}, {"image": "nii_gz/8429.nii.gz"}, {"image": "nii_gz/8426.nii.gz"}, {"image": "nii_gz/842.nii.gz"}, {"image": "nii_gz/8417.nii.gz"}, {"image": "nii_gz/8416.nii.gz"}, {"image": "nii_gz/8413.nii.gz"}, {"image": "nii_gz/8410.nii.gz"}, {"image": "nii_gz/8407.nii.gz"}, {"image": "nii_gz/8401.nii.gz"}, {"image": "nii_gz/839.nii.gz"}, {"image": "nii_gz/8389.nii.gz"}, {"image": "nii_gz/8382.nii.gz"}, {"image": "nii_gz/838.nii.gz"}, {"image": "nii_gz/8366.nii.gz"}, {"image": "nii_gz/8364.nii.gz"}, {"image": "nii_gz/836.nii.gz"}, {"image": "nii_gz/8359.nii.gz"}, {"image": "nii_gz/8350.nii.gz"}, {"image": "nii_gz/8348.nii.gz"}, {"image": "nii_gz/8338.nii.gz"}, {"image": "nii_gz/8337.nii.gz"}, {"image": "nii_gz/8325.nii.gz"}, {"image": "nii_gz/8322.nii.gz"}, {"image": "nii_gz/8319.nii.gz"}, {"image": "nii_gz/8318.nii.gz"}, {"image": "nii_gz/831.nii.gz"}, {"image": "nii_gz/8303.nii.gz"}, {"image": "nii_gz/83.nii.gz"}, {"image": "nii_gz/8290.nii.gz"}, {"image": "nii_gz/8281.nii.gz"}, {"image": "nii_gz/8278.nii.gz"}, {"image": "nii_gz/827.nii.gz"}, {"image": "nii_gz/8268.nii.gz"}, {"image": "nii_gz/8265.nii.gz"}, {"image": "nii_gz/8263.nii.gz"}, {"image": "nii_gz/8260.nii.gz"}, {"image": "nii_gz/8256.nii.gz"}, {"image": "nii_gz/8237.nii.gz"}, {"image": "nii_gz/8229.nii.gz"}, {"image": "nii_gz/8228.nii.gz"}, {"image": "nii_gz/8227.nii.gz"}, {"image": "nii_gz/8222.nii.gz"}, {"image": "nii_gz/8221.nii.gz"}, {"image": "nii_gz/822.nii.gz"}, {"image": "nii_gz/8209.nii.gz"}, {"image": "nii_gz/8202.nii.gz"}, {"image": "nii_gz/8198.nii.gz"}, {"image": "nii_gz/8194.nii.gz"}, {"image": "nii_gz/8190.nii.gz"}, {"image": "nii_gz/8189.nii.gz"}, {"image": "nii_gz/8188.nii.gz"}, {"image": "nii_gz/8187.nii.gz"}, {"image": "nii_gz/8180.nii.gz"}, {"image": "nii_gz/8170.nii.gz"}, {"image": "nii_gz/8164.nii.gz"}, {"image": "nii_gz/8158.nii.gz"}, {"image": "nii_gz/8156.nii.gz"}, {"image": "nii_gz/8152.nii.gz"}, {"image": "nii_gz/8147.nii.gz"}, {"image": "nii_gz/8145.nii.gz"}, {"image": "nii_gz/8144.nii.gz"}, {"image": "nii_gz/8142.nii.gz"}, {"image": "nii_gz/814.nii.gz"}, {"image": "nii_gz/8137.nii.gz"}, {"image": "nii_gz/8133.nii.gz"}, {"image": "nii_gz/8130.nii.gz"}, {"image": "nii_gz/8113.nii.gz"}, {"image": "nii_gz/8112.nii.gz"}, {"image": "nii_gz/8109.nii.gz"}, {"image": "nii_gz/8105.nii.gz"}, {"image": "nii_gz/8103.nii.gz"}, {"image": "nii_gz/8102.nii.gz"}, {"image": "nii_gz/8094.nii.gz"}, {"image": "nii_gz/8093.nii.gz"}, {"image": "nii_gz/8090.nii.gz"}, {"image": "nii_gz/8085.nii.gz"}, {"image": "nii_gz/8081.nii.gz"}, {"image": "nii_gz/8072.nii.gz"}, {"image": "nii_gz/8060.nii.gz"}, {"image": "nii_gz/8056.nii.gz"}, {"image": "nii_gz/8028.nii.gz"}, {"image": "nii_gz/8027.nii.gz"}, {"image": "nii_gz/802.nii.gz"}, {"image": "nii_gz/8018.nii.gz"}, {"image": "nii_gz/8015.nii.gz"}, {"image": "nii_gz/7995.nii.gz"}, {"image": "nii_gz/7982.nii.gz"}, {"image": "nii_gz/7981.nii.gz"}, {"image": "nii_gz/7977.nii.gz"}, {"image": "nii_gz/7973.nii.gz"}, {"image": "nii_gz/7959.nii.gz"}, {"image": "nii_gz/7947.nii.gz"}, {"image": "nii_gz/7940.nii.gz"}, {"image": "nii_gz/7924.nii.gz"}, {"image": "nii_gz/792.nii.gz"}, {"image": "nii_gz/7918.nii.gz"}, {"image": "nii_gz/7915.nii.gz"}, {"image": "nii_gz/7906.nii.gz"}, {"image": "nii_gz/7904.nii.gz"}, {"image": "nii_gz/7903.nii.gz"}, {"image": "nii_gz/7901.nii.gz"}, {"image": "nii_gz/7893.nii.gz"}, {"image": "nii_gz/789.nii.gz"}, {"image": "nii_gz/7888.nii.gz"}, {"image": "nii_gz/7886.nii.gz"}, {"image": "nii_gz/7885.nii.gz"}, {"image": "nii_gz/7871.nii.gz"}, {"image": "nii_gz/7867.nii.gz"}, {"image": "nii_gz/7866.nii.gz"}, {"image": "nii_gz/785.nii.gz"}, {"image": "nii_gz/7843.nii.gz"}, {"image": "nii_gz/7842.nii.gz"}, {"image": "nii_gz/7840.nii.gz"}, {"image": "nii_gz/7831.nii.gz"}, {"image": "nii_gz/7828.nii.gz"}, {"image": "nii_gz/7818.nii.gz"}, {"image": "nii_gz/7814.nii.gz"}, {"image": "nii_gz/781.nii.gz"}, {"image": "nii_gz/7808.nii.gz"}, {"image": "nii_gz/7806.nii.gz"}, {"image": "nii_gz/7805.nii.gz"}, {"image": "nii_gz/7803.nii.gz"}, {"image": "nii_gz/7802.nii.gz"}, {"image": "nii_gz/78.nii.gz"}, {"image": "nii_gz/7798.nii.gz"}, {"image": "nii_gz/7792.nii.gz"}, {"image": "nii_gz/7788.nii.gz"}, {"image": "nii_gz/7782.nii.gz"}, {"image": "nii_gz/7777.nii.gz"}, {"image": "nii_gz/7763.nii.gz"}, {"image": "nii_gz/7758.nii.gz"}, {"image": "nii_gz/7752.nii.gz"}, {"image": "nii_gz/7750.nii.gz"}, {"image": "nii_gz/7749.nii.gz"}, {"image": "nii_gz/7744.nii.gz"}, {"image": "nii_gz/7741.nii.gz"}, {"image": "nii_gz/7735.nii.gz"}, {"image": "nii_gz/7734.nii.gz"}, {"image": "nii_gz/7733.nii.gz"}, {"image": "nii_gz/7730.nii.gz"}, {"image": "nii_gz/7728.nii.gz"}, {"image": "nii_gz/7718.nii.gz"}, {"image": "nii_gz/7716.nii.gz"}, {"image": "nii_gz/771.nii.gz"}, {"image": "nii_gz/7708.nii.gz"}, {"image": "nii_gz/7703.nii.gz"}, {"image": "nii_gz/7702.nii.gz"}, {"image": "nii_gz/7696.nii.gz"}, {"image": "nii_gz/7694.nii.gz"}, {"image": "nii_gz/7692.nii.gz"}, {"image": "nii_gz/7689.nii.gz"}, {"image": "nii_gz/7688.nii.gz"}, {"image": "nii_gz/7684.nii.gz"}, {"image": "nii_gz/768.nii.gz"}, {"image": "nii_gz/7673.nii.gz"}, {"image": "nii_gz/7669.nii.gz"}, {"image": "nii_gz/7663.nii.gz"}, {"image": "nii_gz/7656.nii.gz"}, {"image": "nii_gz/7655.nii.gz"}, {"image": "nii_gz/7653.nii.gz"}, {"image": "nii_gz/7652.nii.gz"}, {"image": "nii_gz/7651.nii.gz"}, {"image": "nii_gz/7648.nii.gz"}, {"image": "nii_gz/7638.nii.gz"}, {"image": "nii_gz/7634.nii.gz"}, {"image": "nii_gz/7624.nii.gz"}, {"image": "nii_gz/7619.nii.gz"}, {"image": "nii_gz/7615.nii.gz"}, {"image": "nii_gz/7614.nii.gz"}, {"image": "nii_gz/7613.nii.gz"}, {"image": "nii_gz/7611.nii.gz"}, {"image": "nii_gz/7609.nii.gz"}, {"image": "nii_gz/7582.nii.gz"}, {"image": "nii_gz/7572.nii.gz"}, {"image": "nii_gz/757.nii.gz"}, {"image": "nii_gz/7563.nii.gz"}, {"image": "nii_gz/755.nii.gz"}, {"image": "nii_gz/7537.nii.gz"}, {"image": "nii_gz/7531.nii.gz"}, {"image": "nii_gz/753.nii.gz"}, {"image": "nii_gz/7529.nii.gz"}, {"image": "nii_gz/7517.nii.gz"}, {"image": "nii_gz/7516.nii.gz"}, {"image": "nii_gz/7503.nii.gz"}, {"image": "nii_gz/7501.nii.gz"}, {"image": "nii_gz/7500.nii.gz"}, {"image": "nii_gz/750.nii.gz"}, {"image": "nii_gz/7499.nii.gz"}, {"image": "nii_gz/749.nii.gz"}, {"image": "nii_gz/7488.nii.gz"}, {"image": "nii_gz/7487.nii.gz"}, {"image": "nii_gz/7484.nii.gz"}, {"image": "nii_gz/7480.nii.gz"}, {"image": "nii_gz/7470.nii.gz"}, {"image": "nii_gz/747.nii.gz"}, {"image": "nii_gz/7464.nii.gz"}, {"image": "nii_gz/7459.nii.gz"}, {"image": "nii_gz/7448.nii.gz"}, {"image": "nii_gz/7447.nii.gz"}, {"image": "nii_gz/7443.nii.gz"}, {"image": "nii_gz/7434.nii.gz"}, {"image": "nii_gz/7433.nii.gz"}, {"image": "nii_gz/7432.nii.gz"}, {"image": "nii_gz/7430.nii.gz"}, {"image": "nii_gz/7429.nii.gz"}, {"image": "nii_gz/7428.nii.gz"}, {"image": "nii_gz/7421.nii.gz"}, {"image": "nii_gz/7420.nii.gz"}, {"image": "nii_gz/7412.nii.gz"}, {"image": "nii_gz/7410.nii.gz"}, {"image": "nii_gz/7408.nii.gz"}, {"image": "nii_gz/7407.nii.gz"}, {"image": "nii_gz/74.nii.gz"}, {"image": "nii_gz/7382.nii.gz"}, {"image": "nii_gz/7378.nii.gz"}, {"image": "nii_gz/7377.nii.gz"}, {"image": "nii_gz/7376.nii.gz"}, {"image": "nii_gz/7349.nii.gz"}, {"image": "nii_gz/7342.nii.gz"}, {"image": "nii_gz/7333.nii.gz"}, {"image": "nii_gz/7331.nii.gz"}, {"image": "nii_gz/7324.nii.gz"}, {"image": "nii_gz/7320.nii.gz"}, {"image": "nii_gz/7310.nii.gz"}, {"image": "nii_gz/7309.nii.gz"}, {"image": "nii_gz/7306.nii.gz"}, {"image": "nii_gz/7305.nii.gz"}, {"image": "nii_gz/7301.nii.gz"}, {"image": "nii_gz/7300.nii.gz"}, {"image": "nii_gz/7299.nii.gz"}, {"image": "nii_gz/7295.nii.gz"}, {"image": "nii_gz/7293.nii.gz"}, {"image": "nii_gz/7285.nii.gz"}, {"image": "nii_gz/7279.nii.gz"}, {"image": "nii_gz/7273.nii.gz"}, {"image": "nii_gz/7269.nii.gz"}, {"image": "nii_gz/7260.nii.gz"}, {"image": "nii_gz/7257.nii.gz"}, {"image": "nii_gz/7255.nii.gz"}, {"image": "nii_gz/724.nii.gz"}, {"image": "nii_gz/7238.nii.gz"}, {"image": "nii_gz/7221.nii.gz"}, {"image": "nii_gz/7218.nii.gz"}, {"image": "nii_gz/7217.nii.gz"}, {"image": "nii_gz/7212.nii.gz"}, {"image": "nii_gz/7198.nii.gz"}, {"image": "nii_gz/7197.nii.gz"}, {"image": "nii_gz/7194.nii.gz"}, {"image": "nii_gz/7192.nii.gz"}, {"image": "nii_gz/7187.nii.gz"}, {"image": "nii_gz/7178.nii.gz"}, {"image": "nii_gz/7174.nii.gz"}, {"image": "nii_gz/7169.nii.gz"}, {"image": "nii_gz/7165.nii.gz"}, {"image": "nii_gz/7160.nii.gz"}, {"image": "nii_gz/7158.nii.gz"}, {"image": "nii_gz/7141.nii.gz"}, {"image": "nii_gz/7136.nii.gz"}, {"image": "nii_gz/7135.nii.gz"}, {"image": "nii_gz/7121.nii.gz"}, {"image": "nii_gz/7117.nii.gz"}, {"image": "nii_gz/7103.nii.gz"}, {"image": "nii_gz/7090.nii.gz"}, {"image": "nii_gz/7086.nii.gz"}, {"image": "nii_gz/7081.nii.gz"}, {"image": "nii_gz/7077.nii.gz"}, {"image": "nii_gz/7068.nii.gz"}, {"image": "nii_gz/7066.nii.gz"}, {"image": "nii_gz/7065.nii.gz"}, {"image": "nii_gz/7060.nii.gz"}, {"image": "nii_gz/7053.nii.gz"}, {"image": "nii_gz/705.nii.gz"}, {"image": "nii_gz/7041.nii.gz"}, {"image": "nii_gz/7035.nii.gz"}, {"image": "nii_gz/7026.nii.gz"}, {"image": "nii_gz/7024.nii.gz"}, {"image": "nii_gz/7022.nii.gz"}, {"image": "nii_gz/7017.nii.gz"}, {"image": "nii_gz/7012.nii.gz"}, {"image": "nii_gz/701.nii.gz"}, {"image": "nii_gz/7006.nii.gz"}, {"image": "nii_gz/7004.nii.gz"}, {"image": "nii_gz/7002.nii.gz"}, {"image": "nii_gz/70.nii.gz"}, {"image": "nii_gz/6999.nii.gz"}, {"image": "nii_gz/6990.nii.gz"}, {"image": "nii_gz/6986.nii.gz"}, {"image": "nii_gz/6981.nii.gz"}, {"image": "nii_gz/6979.nii.gz"}, {"image": "nii_gz/6966.nii.gz"}, {"image": "nii_gz/6965.nii.gz"}, {"image": "nii_gz/6964.nii.gz"}, {"image": "nii_gz/6959.nii.gz"}, {"image": "nii_gz/6958.nii.gz"}, {"image": "nii_gz/695.nii.gz"}, {"image": "nii_gz/6947.nii.gz"}, {"image": "nii_gz/6941.nii.gz"}, {"image": "nii_gz/6930.nii.gz"}, {"image": "nii_gz/693.nii.gz"}, {"image": "nii_gz/6923.nii.gz"}, {"image": "nii_gz/692.nii.gz"}, {"image": "nii_gz/6918.nii.gz"}, {"image": "nii_gz/6913.nii.gz"}, {"image": "nii_gz/6909.nii.gz"}, {"image": "nii_gz/6902.nii.gz"}, {"image": "nii_gz/6901.nii.gz"}, {"image": "nii_gz/6899.nii.gz"}, {"image": "nii_gz/689.nii.gz"}, {"image": "nii_gz/6889.nii.gz"}, {"image": "nii_gz/6882.nii.gz"}, {"image": "nii_gz/6878.nii.gz"}, {"image": "nii_gz/6871.nii.gz"}, {"image": "nii_gz/6859.nii.gz"}, {"image": "nii_gz/6853.nii.gz"}, {"image": "nii_gz/6851.nii.gz"}, {"image": "nii_gz/685.nii.gz"}, {"image": "nii_gz/6844.nii.gz"}, {"image": "nii_gz/6843.nii.gz"}, {"image": "nii_gz/6842.nii.gz"}, {"image": "nii_gz/684.nii.gz"}, {"image": "nii_gz/6833.nii.gz"}, {"image": "nii_gz/6814.nii.gz"}, {"image": "nii_gz/6809.nii.gz"}, {"image": "nii_gz/6804.nii.gz"}, {"image": "nii_gz/6802.nii.gz"}, {"image": "nii_gz/68.nii.gz"}, {"image": "nii_gz/6792.nii.gz"}, {"image": "nii_gz/6781.nii.gz"}, {"image": "nii_gz/6775.nii.gz"}, {"image": "nii_gz/6774.nii.gz"}, {"image": "nii_gz/6773.nii.gz"}, {"image": "nii_gz/6766.nii.gz"}, {"image": "nii_gz/6765.nii.gz"}, {"image": "nii_gz/676.nii.gz"}, {"image": "nii_gz/6756.nii.gz"}, {"image": "nii_gz/6755.nii.gz"}, {"image": "nii_gz/6749.nii.gz"}, {"image": "nii_gz/6745.nii.gz"}, {"image": "nii_gz/6744.nii.gz"}, {"image": "nii_gz/674.nii.gz"}, {"image": "nii_gz/6730.nii.gz"}, {"image": "nii_gz/6729.nii.gz"}, {"image": "nii_gz/6728.nii.gz"}, {"image": "nii_gz/6715.nii.gz"}, {"image": "nii_gz/6713.nii.gz"}, {"image": "nii_gz/6706.nii.gz"}, {"image": "nii_gz/6697.nii.gz"}, {"image": "nii_gz/6688.nii.gz"}, {"image": "nii_gz/6684.nii.gz"}, {"image": "nii_gz/6679.nii.gz"}, {"image": "nii_gz/6671.nii.gz"}, {"image": "nii_gz/6670.nii.gz"}, {"image": "nii_gz/6665.nii.gz"}, {"image": "nii_gz/6662.nii.gz"}, {"image": "nii_gz/6660.nii.gz"}, {"image": "nii_gz/6653.nii.gz"}, {"image": "nii_gz/6651.nii.gz"}, {"image": "nii_gz/665.nii.gz"}, {"image": "nii_gz/6646.nii.gz"}, {"image": "nii_gz/6636.nii.gz"}, {"image": "nii_gz/6635.nii.gz"}, {"image": "nii_gz/6631.nii.gz"}, {"image": "nii_gz/6624.nii.gz"}, {"image": "nii_gz/6619.nii.gz"}, {"image": "nii_gz/6605.nii.gz"}, {"image": "nii_gz/6601.nii.gz"}, {"image": "nii_gz/6598.nii.gz"}, {"image": "nii_gz/6589.nii.gz"}, {"image": "nii_gz/658.nii.gz"}, {"image": "nii_gz/6575.nii.gz"}, {"image": "nii_gz/657.nii.gz"}, {"image": "nii_gz/6566.nii.gz"}, {"image": "nii_gz/6554.nii.gz"}, {"image": "nii_gz/6533.nii.gz"}, {"image": "nii_gz/6531.nii.gz"}, {"image": "nii_gz/6517.nii.gz"}, {"image": "nii_gz/6516.nii.gz"}, {"image": "nii_gz/6496.nii.gz"}, {"image": "nii_gz/6484.nii.gz"}, {"image": "nii_gz/6472.nii.gz"}, {"image": "nii_gz/6469.nii.gz"}, {"image": "nii_gz/6466.nii.gz"}, {"image": "nii_gz/6452.nii.gz"}, {"image": "nii_gz/6446.nii.gz"}, {"image": "nii_gz/6444.nii.gz"}, {"image": "nii_gz/6443.nii.gz"}, {"image": "nii_gz/6435.nii.gz"}, {"image": "nii_gz/6433.nii.gz"}, {"image": "nii_gz/643.nii.gz"}, {"image": "nii_gz/6429.nii.gz"}, {"image": "nii_gz/6426.nii.gz"}, {"image": "nii_gz/6425.nii.gz"}, {"image": "nii_gz/6419.nii.gz"}, {"image": "nii_gz/6416.nii.gz"}, {"image": "nii_gz/6395.nii.gz"}, {"image": "nii_gz/6387.nii.gz"}, {"image": "nii_gz/6385.nii.gz"}, {"image": "nii_gz/638.nii.gz"}, {"image": "nii_gz/6379.nii.gz"}, {"image": "nii_gz/6374.nii.gz"}, {"image": "nii_gz/6366.nii.gz"}, {"image": "nii_gz/6362.nii.gz"}, {"image": "nii_gz/636.nii.gz"}, {"image": "nii_gz/6353.nii.gz"}, {"image": "nii_gz/6349.nii.gz"}, {"image": "nii_gz/6348.nii.gz"}, {"image": "nii_gz/6346.nii.gz"}, {"image": "nii_gz/6345.nii.gz"}, {"image": "nii_gz/6344.nii.gz"}, {"image": "nii_gz/6340.nii.gz"}, {"image": "nii_gz/634.nii.gz"}, {"image": "nii_gz/633.nii.gz"}, {"image": "nii_gz/6329.nii.gz"}, {"image": "nii_gz/6328.nii.gz"}, {"image": "nii_gz/6324.nii.gz"}, {"image": "nii_gz/631.nii.gz"}, {"image": "nii_gz/6308.nii.gz"}, {"image": "nii_gz/6306.nii.gz"}, {"image": "nii_gz/6303.nii.gz"}, {"image": "nii_gz/6297.nii.gz"}, {"image": "nii_gz/6293.nii.gz"}, {"image": "nii_gz/6291.nii.gz"}, {"image": "nii_gz/6283.nii.gz"}, {"image": "nii_gz/628.nii.gz"}, {"image": "nii_gz/6273.nii.gz"}, {"image": "nii_gz/6272.nii.gz"}, {"image": "nii_gz/6267.nii.gz"}, {"image": "nii_gz/6266.nii.gz"}, {"image": "nii_gz/6257.nii.gz"}, {"image": "nii_gz/6254.nii.gz"}, {"image": "nii_gz/6250.nii.gz"}, {"image": "nii_gz/6236.nii.gz"}, {"image": "nii_gz/6232.nii.gz"}, {"image": "nii_gz/623.nii.gz"}, {"image": "nii_gz/6229.nii.gz"}, {"image": "nii_gz/6227.nii.gz"}, {"image": "nii_gz/6213.nii.gz"}, {"image": "nii_gz/6206.nii.gz"}, {"image": "nii_gz/6201.nii.gz"}, {"image": "nii_gz/620.nii.gz"}, {"image": "nii_gz/6187.nii.gz"}, {"image": "nii_gz/6185.nii.gz"}, {"image": "nii_gz/6184.nii.gz"}, {"image": "nii_gz/6178.nii.gz"}, {"image": "nii_gz/6167.nii.gz"}, {"image": "nii_gz/616.nii.gz"}, {"image": "nii_gz/6159.nii.gz"}, {"image": "nii_gz/6148.nii.gz"}, {"image": "nii_gz/6145.nii.gz"}, {"image": "nii_gz/6143.nii.gz"}, {"image": "nii_gz/6126.nii.gz"}, {"image": "nii_gz/6124.nii.gz"}, {"image": "nii_gz/6112.nii.gz"}, {"image": "nii_gz/6111.nii.gz"}, {"image": "nii_gz/6090.nii.gz"}, {"image": "nii_gz/6084.nii.gz"}, {"image": "nii_gz/6082.nii.gz"}, {"image": "nii_gz/6080.nii.gz"}, {"image": "nii_gz/608.nii.gz"}, {"image": "nii_gz/6077.nii.gz"}, {"image": "nii_gz/6071.nii.gz"}, {"image": "nii_gz/6066.nii.gz"}, {"image": "nii_gz/6058.nii.gz"}, {"image": "nii_gz/6050.nii.gz"}, {"image": "nii_gz/6041.nii.gz"}, {"image": "nii_gz/6035.nii.gz"}, {"image": "nii_gz/6033.nii.gz"}, {"image": "nii_gz/6023.nii.gz"}, {"image": "nii_gz/6019.nii.gz"}, {"image": "nii_gz/6016.nii.gz"}, {"image": "nii_gz/6013.nii.gz"}, {"image": "nii_gz/6.nii.gz"}, {"image": "nii_gz/5993.nii.gz"}, {"image": "nii_gz/5992.nii.gz"}, {"image": "nii_gz/5991.nii.gz"}, {"image": "nii_gz/5977.nii.gz"}, {"image": "nii_gz/5974.nii.gz"}, {"image": "nii_gz/597.nii.gz"}, {"image": "nii_gz/5959.nii.gz"}, {"image": "nii_gz/5958.nii.gz"}, {"image": "nii_gz/5955.nii.gz"}, {"image": "nii_gz/5954.nii.gz"}, {"image": "nii_gz/5947.nii.gz"}, {"image": "nii_gz/5945.nii.gz"}, {"image": "nii_gz/5944.nii.gz"}, {"image": "nii_gz/5927.nii.gz"}, {"image": "nii_gz/5924.nii.gz"}, {"image": "nii_gz/5918.nii.gz"}, {"image": "nii_gz/5910.nii.gz"}, {"image": "nii_gz/589.nii.gz"}, {"image": "nii_gz/5888.nii.gz"}, {"image": "nii_gz/588.nii.gz"}, {"image": "nii_gz/5874.nii.gz"}, {"image": "nii_gz/5870.nii.gz"}, {"image": "nii_gz/587.nii.gz"}, {"image": "nii_gz/5867.nii.gz"}, {"image": "nii_gz/5862.nii.gz"}, {"image": "nii_gz/5860.nii.gz"}, {"image": "nii_gz/5848.nii.gz"}, {"image": "nii_gz/5841.nii.gz"}, {"image": "nii_gz/5836.nii.gz"}, {"image": "nii_gz/5822.nii.gz"}, {"image": "nii_gz/5820.nii.gz"}, {"image": "nii_gz/582.nii.gz"}, {"image": "nii_gz/5818.nii.gz"}, {"image": "nii_gz/5812.nii.gz"}, {"image": "nii_gz/5808.nii.gz"}, {"image": "nii_gz/5806.nii.gz"}, {"image": "nii_gz/5804.nii.gz"}, {"image": "nii_gz/5793.nii.gz"}, {"image": "nii_gz/5791.nii.gz"}, {"image": "nii_gz/5786.nii.gz"}, {"image": "nii_gz/5785.nii.gz"}, {"image": "nii_gz/5783.nii.gz"}, {"image": "nii_gz/5779.nii.gz"}, {"image": "nii_gz/5765.nii.gz"}, {"image": "nii_gz/5762.nii.gz"}, {"image": "nii_gz/576.nii.gz"}, {"image": "nii_gz/5759.nii.gz"}, {"image": "nii_gz/5755.nii.gz"}, {"image": "nii_gz/5751.nii.gz"}, {"image": "nii_gz/5746.nii.gz"}, {"image": "nii_gz/5745.nii.gz"}, {"image": "nii_gz/5739.nii.gz"}, {"image": "nii_gz/5734.nii.gz"}, {"image": "nii_gz/5731.nii.gz"}, {"image": "nii_gz/5725.nii.gz"}, {"image": "nii_gz/5724.nii.gz"}, {"image": "nii_gz/5716.nii.gz"}, {"image": "nii_gz/5709.nii.gz"}, {"image": "nii_gz/5689.nii.gz"}, {"image": "nii_gz/568.nii.gz"}, {"image": "nii_gz/5676.nii.gz"}, {"image": "nii_gz/5672.nii.gz"}, {"image": "nii_gz/5670.nii.gz"}, {"image": "nii_gz/5665.nii.gz"}, {"image": "nii_gz/5662.nii.gz"}, {"image": "nii_gz/5658.nii.gz"}, {"image": "nii_gz/5650.nii.gz"}, {"image": "nii_gz/565.nii.gz"}, {"image": "nii_gz/5648.nii.gz"}, {"image": "nii_gz/5647.nii.gz"}, {"image": "nii_gz/5640.nii.gz"}, {"image": "nii_gz/564.nii.gz"}, {"image": "nii_gz/5631.nii.gz"}, {"image": "nii_gz/5630.nii.gz"}, {"image": "nii_gz/563.nii.gz"}, {"image": "nii_gz/5628.nii.gz"}, {"image": "nii_gz/5623.nii.gz"}, {"image": "nii_gz/5617.nii.gz"}, {"image": "nii_gz/5613.nii.gz"}, {"image": "nii_gz/5612.nii.gz"}, {"image": "nii_gz/5611.nii.gz"}, {"image": "nii_gz/5610.nii.gz"}, {"image": "nii_gz/5609.nii.gz"}, {"image": "nii_gz/5601.nii.gz"}, {"image": "nii_gz/5600.nii.gz"}, {"image": "nii_gz/560.nii.gz"}, {"image": "nii_gz/5598.nii.gz"}, {"image": "nii_gz/5595.nii.gz"}, {"image": "nii_gz/5591.nii.gz"}, {"image": "nii_gz/5579.nii.gz"}, {"image": "nii_gz/557.nii.gz"}, {"image": "nii_gz/5569.nii.gz"}, {"image": "nii_gz/5564.nii.gz"}, {"image": "nii_gz/5556.nii.gz"}, {"image": "nii_gz/555.nii.gz"}, {"image": "nii_gz/5542.nii.gz"}, {"image": "nii_gz/5541.nii.gz"}, {"image": "nii_gz/5537.nii.gz"}, {"image": "nii_gz/5535.nii.gz"}, {"image": "nii_gz/5513.nii.gz"}, {"image": "nii_gz/5511.nii.gz"}, {"image": "nii_gz/5509.nii.gz"}, {"image": "nii_gz/5507.nii.gz"}, {"image": "nii_gz/5503.nii.gz"}, {"image": "nii_gz/550.nii.gz"}, {"image": "nii_gz/55.nii.gz"}, {"image": "nii_gz/5496.nii.gz"}, {"image": "nii_gz/5491.nii.gz"}, {"image": "nii_gz/5490.nii.gz"}, {"image": "nii_gz/5486.nii.gz"}, {"image": "nii_gz/5480.nii.gz"}, {"image": "nii_gz/5478.nii.gz"}, {"image": "nii_gz/547.nii.gz"}, {"image": "nii_gz/5459.nii.gz"}, {"image": "nii_gz/5457.nii.gz"}, {"image": "nii_gz/5455.nii.gz"}, {"image": "nii_gz/5454.nii.gz"}, {"image": "nii_gz/5450.nii.gz"}, {"image": "nii_gz/5449.nii.gz"}, {"image": "nii_gz/5445.nii.gz"}, {"image": "nii_gz/5440.nii.gz"}, {"image": "nii_gz/544.nii.gz"}, {"image": "nii_gz/5436.nii.gz"}, {"image": "nii_gz/5434.nii.gz"}, {"image": "nii_gz/5433.nii.gz"}, {"image": "nii_gz/5432.nii.gz"}, {"image": "nii_gz/5431.nii.gz"}, {"image": "nii_gz/543.nii.gz"}, {"image": "nii_gz/5427.nii.gz"}, {"image": "nii_gz/5421.nii.gz"}, {"image": "nii_gz/5419.nii.gz"}, {"image": "nii_gz/5409.nii.gz"}, {"image": "nii_gz/5407.nii.gz"}, {"image": "nii_gz/5403.nii.gz"}, {"image": "nii_gz/54.nii.gz"}, {"image": "nii_gz/5397.nii.gz"}, {"image": "nii_gz/5390.nii.gz"}, {"image": "nii_gz/5385.nii.gz"}, {"image": "nii_gz/538.nii.gz"}, {"image": "nii_gz/5370.nii.gz"}, {"image": "nii_gz/5369.nii.gz"}, {"image": "nii_gz/5366.nii.gz"}, {"image": "nii_gz/5362.nii.gz"}, {"image": "nii_gz/5359.nii.gz"}, {"image": "nii_gz/5353.nii.gz"}, {"image": "nii_gz/5351.nii.gz"}, {"image": "nii_gz/5350.nii.gz"}, {"image": "nii_gz/5347.nii.gz"}, {"image": "nii_gz/5321.nii.gz"}, {"image": "nii_gz/5314.nii.gz"}, {"image": "nii_gz/5311.nii.gz"}, {"image": "nii_gz/5301.nii.gz"}, {"image": "nii_gz/5280.nii.gz"}, {"image": "nii_gz/5279.nii.gz"}, {"image": "nii_gz/5274.nii.gz"}, {"image": "nii_gz/527.nii.gz"}, {"image": "nii_gz/5260.nii.gz"}, {"image": "nii_gz/5256.nii.gz"}, {"image": "nii_gz/5255.nii.gz"}, {"image": "nii_gz/5252.nii.gz"}, {"image": "nii_gz/5251.nii.gz"}, {"image": "nii_gz/5250.nii.gz"}, {"image": "nii_gz/5239.nii.gz"}, {"image": "nii_gz/5236.nii.gz"}, {"image": "nii_gz/5234.nii.gz"}, {"image": "nii_gz/5229.nii.gz"}, {"image": "nii_gz/5220.nii.gz"}, {"image": "nii_gz/522.nii.gz"}, {"image": "nii_gz/5204.nii.gz"}, {"image": "nii_gz/52.nii.gz"}, {"image": "nii_gz/5198.nii.gz"}, {"image": "nii_gz/5192.nii.gz"}, {"image": "nii_gz/5188.nii.gz"}, {"image": "nii_gz/5187.nii.gz"}, {"image": "nii_gz/5183.nii.gz"}, {"image": "nii_gz/5174.nii.gz"}, {"image": "nii_gz/5171.nii.gz"}, {"image": "nii_gz/5169.nii.gz"}, {"image": "nii_gz/5162.nii.gz"}, {"image": "nii_gz/5160.nii.gz"}, {"image": "nii_gz/5158.nii.gz"}, {"image": "nii_gz/5157.nii.gz"}, {"image": "nii_gz/5151.nii.gz"}, {"image": "nii_gz/5147.nii.gz"}, {"image": "nii_gz/5146.nii.gz"}, {"image": "nii_gz/5144.nii.gz"}, {"image": "nii_gz/5142.nii.gz"}, {"image": "nii_gz/5128.nii.gz"}, {"image": "nii_gz/5118.nii.gz"}, {"image": "nii_gz/5114.nii.gz"}, {"image": "nii_gz/5108.nii.gz"}, {"image": "nii_gz/5101.nii.gz"}, {"image": "nii_gz/510.nii.gz"}, {"image": "nii_gz/51.nii.gz"}, {"image": "nii_gz/5090.nii.gz"}, {"image": "nii_gz/5087.nii.gz"}, {"image": "nii_gz/5084.nii.gz"}, {"image": "nii_gz/5072.nii.gz"}, {"image": "nii_gz/5071.nii.gz"}, {"image": "nii_gz/5067.nii.gz"}, {"image": "nii_gz/5066.nii.gz"}, {"image": "nii_gz/5063.nii.gz"}, {"image": "nii_gz/5062.nii.gz"}, {"image": "nii_gz/5053.nii.gz"}, {"image": "nii_gz/504.nii.gz"}, {"image": "nii_gz/5039.nii.gz"}, {"image": "nii_gz/5034.nii.gz"}, {"image": "nii_gz/5015.nii.gz"}, {"image": "nii_gz/5004.nii.gz"}, {"image": "nii_gz/5002.nii.gz"}, {"image": "nii_gz/4988.nii.gz"}, {"image": "nii_gz/4985.nii.gz"}, {"image": "nii_gz/4981.nii.gz"}, {"image": "nii_gz/4976.nii.gz"}, {"image": "nii_gz/4963.nii.gz"}, {"image": "nii_gz/4951.nii.gz"}, {"image": "nii_gz/4940.nii.gz"}, {"image": "nii_gz/4938.nii.gz"}, {"image": "nii_gz/4934.nii.gz"}, {"image": "nii_gz/4926.nii.gz"}, {"image": "nii_gz/492.nii.gz"}, {"image": "nii_gz/4918.nii.gz"}, {"image": "nii_gz/4914.nii.gz"}, {"image": "nii_gz/4913.nii.gz"}, {"image": "nii_gz/4910.nii.gz"}, {"image": "nii_gz/4909.nii.gz"}, {"image": "nii_gz/4907.nii.gz"}, {"image": "nii_gz/4905.nii.gz"}, {"image": "nii_gz/4903.nii.gz"}, {"image": "nii_gz/4899.nii.gz"}, {"image": "nii_gz/4894.nii.gz"}, {"image": "nii_gz/4893.nii.gz"}, {"image": "nii_gz/489.nii.gz"}, {"image": "nii_gz/4888.nii.gz"}, {"image": "nii_gz/4885.nii.gz"}, {"image": "nii_gz/4880.nii.gz"}, {"image": "nii_gz/4873.nii.gz"}, {"image": "nii_gz/4871.nii.gz"}, {"image": "nii_gz/4864.nii.gz"}, {"image": "nii_gz/4858.nii.gz"}, {"image": "nii_gz/4857.nii.gz"}, {"image": "nii_gz/4852.nii.gz"}, {"image": "nii_gz/4841.nii.gz"}, {"image": "nii_gz/4839.nii.gz"}, {"image": "nii_gz/4836.nii.gz"}, {"image": "nii_gz/4831.nii.gz"}, {"image": "nii_gz/4821.nii.gz"}, {"image": "nii_gz/482.nii.gz"}, {"image": "nii_gz/4812.nii.gz"}, {"image": "nii_gz/4811.nii.gz"}, {"image": "nii_gz/4807.nii.gz"}, {"image": "nii_gz/4794.nii.gz"}, {"image": "nii_gz/4789.nii.gz"}, {"image": "nii_gz/4788.nii.gz"}, {"image": "nii_gz/4779.nii.gz"}, {"image": "nii_gz/4776.nii.gz"}, {"image": "nii_gz/4772.nii.gz"}, {"image": "nii_gz/4770.nii.gz"}, {"image": "nii_gz/4767.nii.gz"}, {"image": "nii_gz/4765.nii.gz"}, {"image": "nii_gz/4760.nii.gz"}, {"image": "nii_gz/4758.nii.gz"}, {"image": "nii_gz/4756.nii.gz"}, {"image": "nii_gz/4754.nii.gz"}, {"image": "nii_gz/4749.nii.gz"}, {"image": "nii_gz/4747.nii.gz"}, {"image": "nii_gz/4743.nii.gz"}, {"image": "nii_gz/4741.nii.gz"}, {"image": "nii_gz/471.nii.gz"}, {"image": "nii_gz/4700.nii.gz"}, {"image": "nii_gz/4695.nii.gz"}, {"image": "nii_gz/4686.nii.gz"}, {"image": "nii_gz/4676.nii.gz"}, {"image": "nii_gz/4669.nii.gz"}, {"image": "nii_gz/4667.nii.gz"}, {"image": "nii_gz/4664.nii.gz"}, {"image": "nii_gz/4652.nii.gz"}, {"image": "nii_gz/4651.nii.gz"}, {"image": "nii_gz/4645.nii.gz"}, {"image": "nii_gz/4630.nii.gz"}, {"image": "nii_gz/4629.nii.gz"}, {"image": "nii_gz/4625.nii.gz"}, {"image": "nii_gz/4617.nii.gz"}, {"image": "nii_gz/4608.nii.gz"}, {"image": "nii_gz/4606.nii.gz"}, {"image": "nii_gz/4602.nii.gz"}, {"image": "nii_gz/4601.nii.gz"}, {"image": "nii_gz/4599.nii.gz"}, {"image": "nii_gz/4594.nii.gz"}, {"image": "nii_gz/4593.nii.gz"}, {"image": "nii_gz/459.nii.gz"}, {"image": "nii_gz/4586.nii.gz"}, {"image": "nii_gz/4585.nii.gz"}, {"image": "nii_gz/458.nii.gz"}, {"image": "nii_gz/4579.nii.gz"}, {"image": "nii_gz/4577.nii.gz"}, {"image": "nii_gz/457.nii.gz"}, {"image": "nii_gz/4563.nii.gz"}, {"image": "nii_gz/4556.nii.gz"}, {"image": "nii_gz/4551.nii.gz"}, {"image": "nii_gz/4545.nii.gz"}, {"image": "nii_gz/4539.nii.gz"}, {"image": "nii_gz/4516.nii.gz"}, {"image": "nii_gz/4515.nii.gz"}, {"image": "nii_gz/4514.nii.gz"}, {"image": "nii_gz/4513.nii.gz"}, {"image": "nii_gz/4509.nii.gz"}, {"image": "nii_gz/4501.nii.gz"}, {"image": "nii_gz/45.nii.gz"}, {"image": "nii_gz/4493.nii.gz"}, {"image": "nii_gz/4461.nii.gz"}, {"image": "nii_gz/4460.nii.gz"}, {"image": "nii_gz/4454.nii.gz"}, {"image": "nii_gz/4453.nii.gz"}, {"image": "nii_gz/4445.nii.gz"}, {"image": "nii_gz/4437.nii.gz"}, {"image": "nii_gz/4432.nii.gz"}, {"image": "nii_gz/443.nii.gz"}, {"image": "nii_gz/4428.nii.gz"}, {"image": "nii_gz/4427.nii.gz"}, {"image": "nii_gz/4425.nii.gz"}, {"image": "nii_gz/442.nii.gz"}, {"image": "nii_gz/4410.nii.gz"}, {"image": "nii_gz/441.nii.gz"}, {"image": "nii_gz/4405.nii.gz"}, {"image": "nii_gz/44.nii.gz"}, {"image": "nii_gz/4398.nii.gz"}, {"image": "nii_gz/4393.nii.gz"}, {"image": "nii_gz/4391.nii.gz"}, {"image": "nii_gz/4386.nii.gz"}, {"image": "nii_gz/4385.nii.gz"}, {"image": "nii_gz/4382.nii.gz"}, {"image": "nii_gz/438.nii.gz"}, {"image": "nii_gz/4369.nii.gz"}, {"image": "nii_gz/4367.nii.gz"}, {"image": "nii_gz/4353.nii.gz"}, {"image": "nii_gz/4351.nii.gz"}, {"image": "nii_gz/4339.nii.gz"}, {"image": "nii_gz/4333.nii.gz"}, {"image": "nii_gz/432.nii.gz"}, {"image": "nii_gz/4319.nii.gz"}, {"image": "nii_gz/4318.nii.gz"}, {"image": "nii_gz/4304.nii.gz"}, {"image": "nii_gz/4299.nii.gz"}, {"image": "nii_gz/4298.nii.gz"}, {"image": "nii_gz/4295.nii.gz"}, {"image": "nii_gz/4288.nii.gz"}, {"image": "nii_gz/4280.nii.gz"}, {"image": "nii_gz/4278.nii.gz"}, {"image": "nii_gz/4276.nii.gz"}, {"image": "nii_gz/4274.nii.gz"}, {"image": "nii_gz/4272.nii.gz"}, {"image": "nii_gz/4269.nii.gz"}, {"image": "nii_gz/4268.nii.gz"}, {"image": "nii_gz/4266.nii.gz"}, {"image": "nii_gz/4263.nii.gz"}, {"image": "nii_gz/4262.nii.gz"}, {"image": "nii_gz/4254.nii.gz"}, {"image": "nii_gz/425.nii.gz"}, {"image": "nii_gz/4247.nii.gz"}, {"image": "nii_gz/424.nii.gz"}, {"image": "nii_gz/4235.nii.gz"}, {"image": "nii_gz/4225.nii.gz"}, {"image": "nii_gz/4219.nii.gz"}, {"image": "nii_gz/4215.nii.gz"}, {"image": "nii_gz/4214.nii.gz"}, {"image": "nii_gz/4213.nii.gz"}, {"image": "nii_gz/4211.nii.gz"}, {"image": "nii_gz/4209.nii.gz"}, {"image": "nii_gz/4204.nii.gz"}, {"image": "nii_gz/4203.nii.gz"}, {"image": "nii_gz/4200.nii.gz"}, {"image": "nii_gz/4199.nii.gz"}, {"image": "nii_gz/4192.nii.gz"}, {"image": "nii_gz/4189.nii.gz"}, {"image": "nii_gz/4182.nii.gz"}, {"image": "nii_gz/418.nii.gz"}, {"image": "nii_gz/4173.nii.gz"}, {"image": "nii_gz/4168.nii.gz"}, {"image": "nii_gz/4155.nii.gz"}, {"image": "nii_gz/4152.nii.gz"}, {"image": "nii_gz/4145.nii.gz"}, {"image": "nii_gz/4142.nii.gz"}, {"image": "nii_gz/414.nii.gz"}, {"image": "nii_gz/4137.nii.gz"}, {"image": "nii_gz/4134.nii.gz"}, {"image": "nii_gz/4133.nii.gz"}, {"image": "nii_gz/412.nii.gz"}, {"image": "nii_gz/4111.nii.gz"}, {"image": "nii_gz/4106.nii.gz"}, {"image": "nii_gz/4103.nii.gz"}, {"image": "nii_gz/4099.nii.gz"}, {"image": "nii_gz/4094.nii.gz"}, {"image": "nii_gz/4088.nii.gz"}, {"image": "nii_gz/408.nii.gz"}, {"image": "nii_gz/4079.nii.gz"}, {"image": "nii_gz/4059.nii.gz"}, {"image": "nii_gz/4058.nii.gz"}, {"image": "nii_gz/4057.nii.gz"}, {"image": "nii_gz/4055.nii.gz"}, {"image": "nii_gz/4054.nii.gz"}, {"image": "nii_gz/4050.nii.gz"}, {"image": "nii_gz/4049.nii.gz"}, {"image": "nii_gz/4042.nii.gz"}, {"image": "nii_gz/4040.nii.gz"}, {"image": "nii_gz/4039.nii.gz"}, {"image": "nii_gz/4038.nii.gz"}, {"image": "nii_gz/4036.nii.gz"}, {"image": "nii_gz/4034.nii.gz"}, {"image": "nii_gz/403.nii.gz"}, {"image": "nii_gz/4026.nii.gz"}, {"image": "nii_gz/4023.nii.gz"}, {"image": "nii_gz/4022.nii.gz"}, {"image": "nii_gz/402.nii.gz"}, {"image": "nii_gz/4015.nii.gz"}, {"image": "nii_gz/4009.nii.gz"}, {"image": "nii_gz/4008.nii.gz"}, {"image": "nii_gz/4005.nii.gz"}, {"image": "nii_gz/3999.nii.gz"}, {"image": "nii_gz/3997.nii.gz"}, {"image": "nii_gz/3993.nii.gz"}, {"image": "nii_gz/3992.nii.gz"}, {"image": "nii_gz/3983.nii.gz"}, {"image": "nii_gz/3978.nii.gz"}, {"image": "nii_gz/3971.nii.gz"}, {"image": "nii_gz/3970.nii.gz"}, {"image": "nii_gz/394.nii.gz"}, {"image": "nii_gz/3925.nii.gz"}, {"image": "nii_gz/3920.nii.gz"}, {"image": "nii_gz/3919.nii.gz"}, {"image": "nii_gz/3918.nii.gz"}, {"image": "nii_gz/3917.nii.gz"}, {"image": "nii_gz/3890.nii.gz"}, {"image": "nii_gz/3884.nii.gz"}, {"image": "nii_gz/3876.nii.gz"}, {"image": "nii_gz/3871.nii.gz"}, {"image": "nii_gz/3866.nii.gz"}, {"image": "nii_gz/3862.nii.gz"}, {"image": "nii_gz/3858.nii.gz"}, {"image": "nii_gz/3842.nii.gz"}, {"image": "nii_gz/3841.nii.gz"}, {"image": "nii_gz/3835.nii.gz"}, {"image": "nii_gz/3826.nii.gz"}, {"image": "nii_gz/3819.nii.gz"}, {"image": "nii_gz/3817.nii.gz"}, {"image": "nii_gz/3816.nii.gz"}, {"image": "nii_gz/3810.nii.gz"}, {"image": "nii_gz/3801.nii.gz"}, {"image": "nii_gz/3790.nii.gz"}, {"image": "nii_gz/3788.nii.gz"}, {"image": "nii_gz/3775.nii.gz"}, {"image": "nii_gz/3772.nii.gz"}, {"image": "nii_gz/3768.nii.gz"}, {"image": "nii_gz/3765.nii.gz"}, {"image": "nii_gz/3759.nii.gz"}, {"image": "nii_gz/3758.nii.gz"}, {"image": "nii_gz/3752.nii.gz"}, {"image": "nii_gz/3751.nii.gz"}, {"image": "nii_gz/375.nii.gz"}, {"image": "nii_gz/3742.nii.gz"}, {"image": "nii_gz/374.nii.gz"}, {"image": "nii_gz/3734.nii.gz"}, {"image": "nii_gz/3732.nii.gz"}, {"image": "nii_gz/3729.nii.gz"}, {"image": "nii_gz/3728.nii.gz"}, {"image": "nii_gz/3727.nii.gz"}, {"image": "nii_gz/3719.nii.gz"}, {"image": "nii_gz/3717.nii.gz"}, {"image": "nii_gz/3705.nii.gz"}, {"image": "nii_gz/3704.nii.gz"}, {"image": "nii_gz/3702.nii.gz"}, {"image": "nii_gz/3699.nii.gz"}, {"image": "nii_gz/369.nii.gz"}, {"image": "nii_gz/3686.nii.gz"}, {"image": "nii_gz/3685.nii.gz"}, {"image": "nii_gz/368.nii.gz"}, {"image": "nii_gz/3679.nii.gz"}, {"image": "nii_gz/3677.nii.gz"}, {"image": "nii_gz/3674.nii.gz"}, {"image": "nii_gz/3660.nii.gz"}, {"image": "nii_gz/3649.nii.gz"}, {"image": "nii_gz/364.nii.gz"}, {"image": "nii_gz/3637.nii.gz"}, {"image": "nii_gz/3630.nii.gz"}, {"image": "nii_gz/3627.nii.gz"}, {"image": "nii_gz/3622.nii.gz"}, {"image": "nii_gz/362.nii.gz"}, {"image": "nii_gz/3617.nii.gz"}, {"image": "nii_gz/3616.nii.gz"}, {"image": "nii_gz/3613.nii.gz"}, {"image": "nii_gz/361.nii.gz"}, {"image": "nii_gz/3607.nii.gz"}, {"image": "nii_gz/3595.nii.gz"}, {"image": "nii_gz/3584.nii.gz"}, {"image": "nii_gz/3581.nii.gz"}, {"image": "nii_gz/3579.nii.gz"}, {"image": "nii_gz/3575.nii.gz"}, {"image": "nii_gz/3569.nii.gz"}, {"image": "nii_gz/3565.nii.gz"}, {"image": "nii_gz/3564.nii.gz"}, {"image": "nii_gz/3562.nii.gz"}, {"image": "nii_gz/3560.nii.gz"}, {"image": "nii_gz/3558.nii.gz"}, {"image": "nii_gz/3552.nii.gz"}, {"image": "nii_gz/3547.nii.gz"}, {"image": "nii_gz/3541.nii.gz"}, {"image": "nii_gz/3537.nii.gz"}, {"image": "nii_gz/3533.nii.gz"}, {"image": "nii_gz/3530.nii.gz"}, {"image": "nii_gz/3527.nii.gz"}, {"image": "nii_gz/3526.nii.gz"}, {"image": "nii_gz/3523.nii.gz"}, {"image": "nii_gz/352.nii.gz"}, {"image": "nii_gz/3510.nii.gz"}, {"image": "nii_gz/3507.nii.gz"}, {"image": "nii_gz/3490.nii.gz"}, {"image": "nii_gz/3477.nii.gz"}, {"image": "nii_gz/3455.nii.gz"}, {"image": "nii_gz/3450.nii.gz"}, {"image": "nii_gz/3448.nii.gz"}, {"image": "nii_gz/3443.nii.gz"}, {"image": "nii_gz/3441.nii.gz"}, {"image": "nii_gz/3434.nii.gz"}, {"image": "nii_gz/343.nii.gz"}, {"image": "nii_gz/3422.nii.gz"}, {"image": "nii_gz/3419.nii.gz"}, {"image": "nii_gz/3418.nii.gz"}, {"image": "nii_gz/3416.nii.gz"}, {"image": "nii_gz/340.nii.gz"}, {"image": "nii_gz/3399.nii.gz"}, {"image": "nii_gz/3395.nii.gz"}, {"image": "nii_gz/3386.nii.gz"}, {"image": "nii_gz/3376.nii.gz"}, {"image": "nii_gz/3375.nii.gz"}, {"image": "nii_gz/337.nii.gz"}, {"image": "nii_gz/3365.nii.gz"}, {"image": "nii_gz/3361.nii.gz"}, {"image": "nii_gz/336.nii.gz"}, {"image": "nii_gz/3327.nii.gz"}, {"image": "nii_gz/332.nii.gz"}, {"image": "nii_gz/3319.nii.gz"}, {"image": "nii_gz/3303.nii.gz"}, {"image": "nii_gz/3302.nii.gz"}, {"image": "nii_gz/3301.nii.gz"}, {"image": "nii_gz/3300.nii.gz"}, {"image": "nii_gz/3297.nii.gz"}, {"image": "nii_gz/3285.nii.gz"}, {"image": "nii_gz/3280.nii.gz"}, {"image": "nii_gz/3278.nii.gz"}, {"image": "nii_gz/3275.nii.gz"}, {"image": "nii_gz/3274.nii.gz"}, {"image": "nii_gz/3270.nii.gz"}, {"image": "nii_gz/3269.nii.gz"}, {"image": "nii_gz/3261.nii.gz"}, {"image": "nii_gz/3260.nii.gz"}, {"image": "nii_gz/3257.nii.gz"}, {"image": "nii_gz/3255.nii.gz"}, {"image": "nii_gz/3253.nii.gz"}, {"image": "nii_gz/3251.nii.gz"}, {"image": "nii_gz/3246.nii.gz"}, {"image": "nii_gz/3244.nii.gz"}, {"image": "nii_gz/3240.nii.gz"}, {"image": "nii_gz/3236.nii.gz"}, {"image": "nii_gz/3235.nii.gz"}, {"image": "nii_gz/3234.nii.gz"}, {"image": "nii_gz/3227.nii.gz"}, {"image": "nii_gz/3226.nii.gz"}, {"image": "nii_gz/3223.nii.gz"}, {"image": "nii_gz/3220.nii.gz"}, {"image": "nii_gz/3216.nii.gz"}, {"image": "nii_gz/3215.nii.gz"}, {"image": "nii_gz/3207.nii.gz"}, {"image": "nii_gz/3206.nii.gz"}, {"image": "nii_gz/3201.nii.gz"}, {"image": "nii_gz/3193.nii.gz"}, {"image": "nii_gz/3189.nii.gz"}, {"image": "nii_gz/3188.nii.gz"}, {"image": "nii_gz/3182.nii.gz"}, {"image": "nii_gz/3181.nii.gz"}, {"image": "nii_gz/3177.nii.gz"}, {"image": "nii_gz/3174.nii.gz"}, {"image": "nii_gz/3165.nii.gz"}, {"image": "nii_gz/3163.nii.gz"}, {"image": "nii_gz/3157.nii.gz"}, {"image": "nii_gz/3154.nii.gz"}, {"image": "nii_gz/3152.nii.gz"}, {"image": "nii_gz/3151.nii.gz"}, {"image": "nii_gz/3149.nii.gz"}, {"image": "nii_gz/3141.nii.gz"}, {"image": "nii_gz/3137.nii.gz"}, {"image": "nii_gz/3131.nii.gz"}, {"image": "nii_gz/3130.nii.gz"}, {"image": "nii_gz/3118.nii.gz"}, {"image": "nii_gz/3116.nii.gz"}, {"image": "nii_gz/3113.nii.gz"}, {"image": "nii_gz/3109.nii.gz"}, {"image": "nii_gz/3108.nii.gz"}, {"image": "nii_gz/310.nii.gz"}, {"image": "nii_gz/31.nii.gz"}, {"image": "nii_gz/3098.nii.gz"}, {"image": "nii_gz/3097.nii.gz"}, {"image": "nii_gz/3095.nii.gz"}, {"image": "nii_gz/3091.nii.gz"}, {"image": "nii_gz/3090.nii.gz"}, {"image": "nii_gz/3084.nii.gz"}, {"image": "nii_gz/3083.nii.gz"}, {"image": "nii_gz/3077.nii.gz"}, {"image": "nii_gz/3069.nii.gz"}, {"image": "nii_gz/3061.nii.gz"}, {"image": "nii_gz/3056.nii.gz"}, {"image": "nii_gz/3053.nii.gz"}, {"image": "nii_gz/305.nii.gz"}, {"image": "nii_gz/3046.nii.gz"}, {"image": "nii_gz/304.nii.gz"}, {"image": "nii_gz/3038.nii.gz"}, {"image": "nii_gz/3034.nii.gz"}, {"image": "nii_gz/3031.nii.gz"}, {"image": "nii_gz/3028.nii.gz"}, {"image": "nii_gz/3022.nii.gz"}, {"image": "nii_gz/3020.nii.gz"}, {"image": "nii_gz/3011.nii.gz"}, {"image": "nii_gz/3009.nii.gz"}, {"image": "nii_gz/300.nii.gz"}, {"image": "nii_gz/2996.nii.gz"}, {"image": "nii_gz/2993.nii.gz"}, {"image": "nii_gz/2992.nii.gz"}, {"image": "nii_gz/2982.nii.gz"}, {"image": "nii_gz/2973.nii.gz"}, {"image": "nii_gz/2972.nii.gz"}, {"image": "nii_gz/2968.nii.gz"}, {"image": "nii_gz/2960.nii.gz"}, {"image": "nii_gz/296.nii.gz"}, {"image": "nii_gz/2957.nii.gz"}, {"image": "nii_gz/2948.nii.gz"}, {"image": "nii_gz/2947.nii.gz"}, {"image": "nii_gz/2945.nii.gz"}, {"image": "nii_gz/2944.nii.gz"}, {"image": "nii_gz/294.nii.gz"}, {"image": "nii_gz/2938.nii.gz"}, {"image": "nii_gz/2937.nii.gz"}, {"image": "nii_gz/2934.nii.gz"}, {"image": "nii_gz/2932.nii.gz"}, {"image": "nii_gz/2925.nii.gz"}, {"image": "nii_gz/2924.nii.gz"}, {"image": "nii_gz/2922.nii.gz"}, {"image": "nii_gz/2916.nii.gz"}, {"image": "nii_gz/2915.nii.gz"}, {"image": "nii_gz/2910.nii.gz"}, {"image": "nii_gz/2909.nii.gz"}, {"image": "nii_gz/2908.nii.gz"}, {"image": "nii_gz/2907.nii.gz"}, {"image": "nii_gz/2906.nii.gz"}, {"image": "nii_gz/290.nii.gz"}, {"image": "nii_gz/2896.nii.gz"}, {"image": "nii_gz/2893.nii.gz"}, {"image": "nii_gz/2888.nii.gz"}, {"image": "nii_gz/2875.nii.gz"}, {"image": "nii_gz/2874.nii.gz"}, {"image": "nii_gz/2866.nii.gz"}, {"image": "nii_gz/2865.nii.gz"}, {"image": "nii_gz/2863.nii.gz"}, {"image": "nii_gz/2855.nii.gz"}, {"image": "nii_gz/2850.nii.gz"}, {"image": "nii_gz/2841.nii.gz"}, {"image": "nii_gz/2840.nii.gz"}, {"image": "nii_gz/2829.nii.gz"}, {"image": "nii_gz/282.nii.gz"}, {"image": "nii_gz/2817.nii.gz"}, {"image": "nii_gz/2815.nii.gz"}, {"image": "nii_gz/2802.nii.gz"}, {"image": "nii_gz/2800.nii.gz"}, {"image": "nii_gz/2797.nii.gz"}, {"image": "nii_gz/2793.nii.gz"}, {"image": "nii_gz/2791.nii.gz"}, {"image": "nii_gz/2780.nii.gz"}, {"image": "nii_gz/2777.nii.gz"}, {"image": "nii_gz/2774.nii.gz"}, {"image": "nii_gz/2765.nii.gz"}, {"image": "nii_gz/2761.nii.gz"}, {"image": "nii_gz/2756.nii.gz"}, {"image": "nii_gz/2740.nii.gz"}, {"image": "nii_gz/2735.nii.gz"}, {"image": "nii_gz/2710.nii.gz"}, {"image": "nii_gz/271.nii.gz"}, {"image": "nii_gz/2706.nii.gz"}, {"image": "nii_gz/2704.nii.gz"}, {"image": "nii_gz/2701.nii.gz"}, {"image": "nii_gz/2700.nii.gz"}, {"image": "nii_gz/27.nii.gz"}, {"image": "nii_gz/2699.nii.gz"}, {"image": "nii_gz/2695.nii.gz"}, {"image": "nii_gz/2685.nii.gz"}, {"image": "nii_gz/2682.nii.gz"}, {"image": "nii_gz/268.nii.gz"}, {"image": "nii_gz/2677.nii.gz"}, {"image": "nii_gz/2676.nii.gz"}, {"image": "nii_gz/2674.nii.gz"}, {"image": "nii_gz/2663.nii.gz"}, {"image": "nii_gz/2656.nii.gz"}, {"image": "nii_gz/2651.nii.gz"}, {"image": "nii_gz/2643.nii.gz"}, {"image": "nii_gz/2641.nii.gz"}, {"image": "nii_gz/264.nii.gz"}, {"image": "nii_gz/2638.nii.gz"}, {"image": "nii_gz/2632.nii.gz"}, {"image": "nii_gz/2631.nii.gz"}, {"image": "nii_gz/263.nii.gz"}, {"image": "nii_gz/2623.nii.gz"}, {"image": "nii_gz/2611.nii.gz"}, {"image": "nii_gz/2610.nii.gz"}, {"image": "nii_gz/261.nii.gz"}, {"image": "nii_gz/2607.nii.gz"}, {"image": "nii_gz/2604.nii.gz"}, {"image": "nii_gz/2587.nii.gz"}, {"image": "nii_gz/2575.nii.gz"}, {"image": "nii_gz/2574.nii.gz"}, {"image": "nii_gz/2573.nii.gz"}, {"image": "nii_gz/2570.nii.gz"}, {"image": "nii_gz/2563.nii.gz"}, {"image": "nii_gz/2558.nii.gz"}, {"image": "nii_gz/2550.nii.gz"}, {"image": "nii_gz/2548.nii.gz"}, {"image": "nii_gz/254.nii.gz"}, {"image": "nii_gz/2539.nii.gz"}, {"image": "nii_gz/2530.nii.gz"}, {"image": "nii_gz/2529.nii.gz"}, {"image": "nii_gz/2516.nii.gz"}, {"image": "nii_gz/2515.nii.gz"}, {"image": "nii_gz/2514.nii.gz"}, {"image": "nii_gz/2511.nii.gz"}, {"image": "nii_gz/2496.nii.gz"}, {"image": "nii_gz/2488.nii.gz"}, {"image": "nii_gz/2483.nii.gz"}, {"image": "nii_gz/2478.nii.gz"}, {"image": "nii_gz/2477.nii.gz"}, {"image": "nii_gz/2473.nii.gz"}, {"image": "nii_gz/2469.nii.gz"}, {"image": "nii_gz/2465.nii.gz"}, {"image": "nii_gz/2463.nii.gz"}, {"image": "nii_gz/2459.nii.gz"}, {"image": "nii_gz/2452.nii.gz"}, {"image": "nii_gz/2446.nii.gz"}, {"image": "nii_gz/2445.nii.gz"}, {"image": "nii_gz/244.nii.gz"}, {"image": "nii_gz/2439.nii.gz"}, {"image": "nii_gz/2438.nii.gz"}, {"image": "nii_gz/2437.nii.gz"}, {"image": "nii_gz/2429.nii.gz"}, {"image": "nii_gz/2425.nii.gz"}, {"image": "nii_gz/2418.nii.gz"}, {"image": "nii_gz/2417.nii.gz"}, {"image": "nii_gz/2413.nii.gz"}, {"image": "nii_gz/2408.nii.gz"}, {"image": "nii_gz/2406.nii.gz"}, {"image": "nii_gz/2405.nii.gz"}, {"image": "nii_gz/2396.nii.gz"}, {"image": "nii_gz/2394.nii.gz"}, {"image": "nii_gz/2393.nii.gz"}, {"image": "nii_gz/239.nii.gz"}, {"image": "nii_gz/2384.nii.gz"}, {"image": "nii_gz/2375.nii.gz"}, {"image": "nii_gz/2372.nii.gz"}, {"image": "nii_gz/2371.nii.gz"}, {"image": "nii_gz/2364.nii.gz"}, {"image": "nii_gz/2360.nii.gz"}, {"image": "nii_gz/2358.nii.gz"}, {"image": "nii_gz/2355.nii.gz"}, {"image": "nii_gz/2354.nii.gz"}, {"image": "nii_gz/2353.nii.gz"}, {"image": "nii_gz/2347.nii.gz"}, {"image": "nii_gz/234.nii.gz"}, {"image": "nii_gz/2335.nii.gz"}, {"image": "nii_gz/2325.nii.gz"}, {"image": "nii_gz/2318.nii.gz"}, {"image": "nii_gz/2315.nii.gz"}, {"image": "nii_gz/2313.nii.gz"}, {"image": "nii_gz/2311.nii.gz"}, {"image": "nii_gz/2306.nii.gz"}, {"image": "nii_gz/2304.nii.gz"}, {"image": "nii_gz/2303.nii.gz"}, {"image": "nii_gz/23.nii.gz"}, {"image": "nii_gz/2290.nii.gz"}, {"image": "nii_gz/2289.nii.gz"}, {"image": "nii_gz/2280.nii.gz"}, {"image": "nii_gz/228.nii.gz"}, {"image": "nii_gz/2276.nii.gz"}, {"image": "nii_gz/2274.nii.gz"}, {"image": "nii_gz/2272.nii.gz"}, {"image": "nii_gz/227.nii.gz"}, {"image": "nii_gz/2269.nii.gz"}, {"image": "nii_gz/2262.nii.gz"}, {"image": "nii_gz/2259.nii.gz"}, {"image": "nii_gz/2249.nii.gz"}, {"image": "nii_gz/2248.nii.gz"}, {"image": "nii_gz/2235.nii.gz"}, {"image": "nii_gz/2234.nii.gz"}, {"image": "nii_gz/2232.nii.gz"}, {"image": "nii_gz/2229.nii.gz"}, {"image": "nii_gz/2221.nii.gz"}, {"image": "nii_gz/222.nii.gz"}, {"image": "nii_gz/2213.nii.gz"}, {"image": "nii_gz/2208.nii.gz"}, {"image": "nii_gz/2205.nii.gz"}, {"image": "nii_gz/2203.nii.gz"}, {"image": "nii_gz/2202.nii.gz"}, {"image": "nii_gz/22.nii.gz"}, {"image": "nii_gz/2199.nii.gz"}, {"image": "nii_gz/2187.nii.gz"}, {"image": "nii_gz/2185.nii.gz"}, {"image": "nii_gz/2181.nii.gz"}, {"image": "nii_gz/2178.nii.gz"}, {"image": "nii_gz/2173.nii.gz"}, {"image": "nii_gz/217.nii.gz"}, {"image": "nii_gz/2157.nii.gz"}, {"image": "nii_gz/215.nii.gz"}, {"image": "nii_gz/2144.nii.gz"}, {"image": "nii_gz/214.nii.gz"}, {"image": "nii_gz/2139.nii.gz"}, {"image": "nii_gz/2137.nii.gz"}, {"image": "nii_gz/2133.nii.gz"}, {"image": "nii_gz/2132.nii.gz"}, {"image": "nii_gz/2130.nii.gz"}, {"image": "nii_gz/2124.nii.gz"}, {"image": "nii_gz/2121.nii.gz"}, {"image": "nii_gz/2117.nii.gz"}, {"image": "nii_gz/2110.nii.gz"}, {"image": "nii_gz/2106.nii.gz"}, {"image": "nii_gz/210.nii.gz"}, {"image": "nii_gz/2092.nii.gz"}, {"image": "nii_gz/209.nii.gz"}, {"image": "nii_gz/2088.nii.gz"}, {"image": "nii_gz/2086.nii.gz"}, {"image": "nii_gz/2079.nii.gz"}, {"image": "nii_gz/2074.nii.gz"}, {"image": "nii_gz/2071.nii.gz"}, {"image": "nii_gz/2066.nii.gz"}, {"image": "nii_gz/2063.nii.gz"}, {"image": "nii_gz/2060.nii.gz"}, {"image": "nii_gz/2054.nii.gz"}, {"image": "nii_gz/2052.nii.gz"}, {"image": "nii_gz/2042.nii.gz"}, {"image": "nii_gz/204.nii.gz"}, {"image": "nii_gz/2039.nii.gz"}, {"image": "nii_gz/2037.nii.gz"}, {"image": "nii_gz/2036.nii.gz"}, {"image": "nii_gz/2029.nii.gz"}, {"image": "nii_gz/2017.nii.gz"}, {"image": "nii_gz/2011.nii.gz"}, {"image": "nii_gz/2010.nii.gz"}, {"image": "nii_gz/201.nii.gz"}, {"image": "nii_gz/2002.nii.gz"}, {"image": "nii_gz/1999.nii.gz"}, {"image": "nii_gz/1995.nii.gz"}, {"image": "nii_gz/1992.nii.gz"}, {"image": "nii_gz/1990.nii.gz"}, {"image": "nii_gz/199.nii.gz"}, {"image": "nii_gz/1989.nii.gz"}, {"image": "nii_gz/1988.nii.gz"}, {"image": "nii_gz/1980.nii.gz"}, {"image": "nii_gz/198.nii.gz"}, {"image": "nii_gz/1976.nii.gz"}, {"image": "nii_gz/1967.nii.gz"}, {"image": "nii_gz/1964.nii.gz"}, {"image": "nii_gz/1961.nii.gz"}, {"image": "nii_gz/1958.nii.gz"}, {"image": "nii_gz/1952.nii.gz"}, {"image": "nii_gz/1947.nii.gz"}, {"image": "nii_gz/1945.nii.gz"}, {"image": "nii_gz/1944.nii.gz"}, {"image": "nii_gz/1943.nii.gz"}, {"image": "nii_gz/1940.nii.gz"}, {"image": "nii_gz/1929.nii.gz"}, {"image": "nii_gz/1928.nii.gz"}, {"image": "nii_gz/1912.nii.gz"}, {"image": "nii_gz/191.nii.gz"}, {"image": "nii_gz/1894.nii.gz"}, {"image": "nii_gz/1892.nii.gz"}, {"image": "nii_gz/1891.nii.gz"}, {"image": "nii_gz/1878.nii.gz"}, {"image": "nii_gz/1874.nii.gz"}, {"image": "nii_gz/1868.nii.gz"}, {"image": "nii_gz/1865.nii.gz"}, {"image": "nii_gz/1852.nii.gz"}, {"image": "nii_gz/1850.nii.gz"}, {"image": "nii_gz/1848.nii.gz"}, {"image": "nii_gz/1847.nii.gz"}, {"image": "nii_gz/1845.nii.gz"}, {"image": "nii_gz/1838.nii.gz"}, {"image": "nii_gz/1837.nii.gz"}, {"image": "nii_gz/1836.nii.gz"}, {"image": "nii_gz/183.nii.gz"}, {"image": "nii_gz/1827.nii.gz"}, {"image": "nii_gz/1825.nii.gz"}, {"image": "nii_gz/1816.nii.gz"}, {"image": "nii_gz/1814.nii.gz"}, {"image": "nii_gz/1812.nii.gz"}, {"image": "nii_gz/1810.nii.gz"}, {"image": "nii_gz/181.nii.gz"}, {"image": "nii_gz/1809.nii.gz"}, {"image": "nii_gz/1807.nii.gz"}, {"image": "nii_gz/1802.nii.gz"}, {"image": "nii_gz/1801.nii.gz"}, {"image": "nii_gz/180.nii.gz"}, {"image": "nii_gz/1799.nii.gz"}, {"image": "nii_gz/1793.nii.gz"}, {"image": "nii_gz/1790.nii.gz"}, {"image": "nii_gz/1789.nii.gz"}, {"image": "nii_gz/1788.nii.gz"}, {"image": "nii_gz/1787.nii.gz"}, {"image": "nii_gz/1768.nii.gz"}, {"image": "nii_gz/1766.nii.gz"}, {"image": "nii_gz/1757.nii.gz"}, {"image": "nii_gz/1754.nii.gz"}, {"image": "nii_gz/1744.nii.gz"}, {"image": "nii_gz/174.nii.gz"}, {"image": "nii_gz/1737.nii.gz"}, {"image": "nii_gz/1735.nii.gz"}, {"image": "nii_gz/1732.nii.gz"}, {"image": "nii_gz/1731.nii.gz"}, {"image": "nii_gz/1729.nii.gz"}, {"image": "nii_gz/1718.nii.gz"}, {"image": "nii_gz/1711.nii.gz"}, {"image": "nii_gz/1709.nii.gz"}, {"image": "nii_gz/1708.nii.gz"}, {"image": "nii_gz/1700.nii.gz"}, {"image": "nii_gz/17.nii.gz"}, {"image": "nii_gz/1696.nii.gz"}, {"image": "nii_gz/1683.nii.gz"}, {"image": "nii_gz/1682.nii.gz"}, {"image": "nii_gz/1680.nii.gz"}, {"image": "nii_gz/1674.nii.gz"}, {"image": "nii_gz/167.nii.gz"}, {"image": "nii_gz/1667.nii.gz"}, {"image": "nii_gz/1666.nii.gz"}, {"image": "nii_gz/1662.nii.gz"}, {"image": "nii_gz/1660.nii.gz"}, {"image": "nii_gz/166.nii.gz"}, {"image": "nii_gz/1659.nii.gz"}, {"image": "nii_gz/1656.nii.gz"}, {"image": "nii_gz/1653.nii.gz"}, {"image": "nii_gz/1641.nii.gz"}, {"image": "nii_gz/164.nii.gz"}, {"image": "nii_gz/1639.nii.gz"}, {"image": "nii_gz/1634.nii.gz"}, {"image": "nii_gz/1633.nii.gz"}, {"image": "nii_gz/1625.nii.gz"}, {"image": "nii_gz/1621.nii.gz"}, {"image": "nii_gz/1612.nii.gz"}, {"image": "nii_gz/1610.nii.gz"}, {"image": "nii_gz/1606.nii.gz"}, {"image": "nii_gz/1605.nii.gz"}, {"image": "nii_gz/1603.nii.gz"}, {"image": "nii_gz/1598.nii.gz"}, {"image": "nii_gz/1597.nii.gz"}, {"image": "nii_gz/1595.nii.gz"}, {"image": "nii_gz/1583.nii.gz"}, {"image": "nii_gz/1575.nii.gz"}, {"image": "nii_gz/1574.nii.gz"}, {"image": "nii_gz/1573.nii.gz"}, {"image": "nii_gz/1560.nii.gz"}, {"image": "nii_gz/1553.nii.gz"}, {"image": "nii_gz/1552.nii.gz"}, {"image": "nii_gz/1548.nii.gz"}, {"image": "nii_gz/1545.nii.gz"}, {"image": "nii_gz/1538.nii.gz"}, {"image": "nii_gz/1519.nii.gz"}, {"image": "nii_gz/1506.nii.gz"}, {"image": "nii_gz/1504.nii.gz"}, {"image": "nii_gz/1501.nii.gz"}, {"image": "nii_gz/1500.nii.gz"}, {"image": "nii_gz/150.nii.gz"}, {"image": "nii_gz/1490.nii.gz"}, {"image": "nii_gz/1486.nii.gz"}, {"image": "nii_gz/1481.nii.gz"}, {"image": "nii_gz/1480.nii.gz"}, {"image": "nii_gz/148.nii.gz"}, {"image": "nii_gz/1471.nii.gz"}, {"image": "nii_gz/1467.nii.gz"}, {"image": "nii_gz/1457.nii.gz"}, {"image": "nii_gz/1443.nii.gz"}, {"image": "nii_gz/1439.nii.gz"}, {"image": "nii_gz/1437.nii.gz"}, {"image": "nii_gz/1435.nii.gz"}, {"image": "nii_gz/1433.nii.gz"}, {"image": "nii_gz/1431.nii.gz"}, {"image": "nii_gz/1426.nii.gz"}, {"image": "nii_gz/1421.nii.gz"}, {"image": "nii_gz/1417.nii.gz"}, {"image": "nii_gz/1415.nii.gz"}, {"image": "nii_gz/1410.nii.gz"}, {"image": "nii_gz/1403.nii.gz"}, {"image": "nii_gz/1399.nii.gz"}, {"image": "nii_gz/139.nii.gz"}, {"image": "nii_gz/1383.nii.gz"}, {"image": "nii_gz/1375.nii.gz"}, {"image": "nii_gz/1372.nii.gz"}, {"image": "nii_gz/1364.nii.gz"}, {"image": "nii_gz/1363.nii.gz"}, {"image": "nii_gz/136.nii.gz"}, {"image": "nii_gz/1357.nii.gz"}, {"image": "nii_gz/1355.nii.gz"}, {"image": "nii_gz/1335.nii.gz"}, {"image": "nii_gz/1331.nii.gz"}, {"image": "nii_gz/1329.nii.gz"}, {"image": "nii_gz/1324.nii.gz"}, {"image": "nii_gz/1323.nii.gz"}, {"image": "nii_gz/1315.nii.gz"}, {"image": "nii_gz/1310.nii.gz"}, {"image": "nii_gz/1300.nii.gz"}, {"image": "nii_gz/1298.nii.gz"}, {"image": "nii_gz/1288.nii.gz"}, {"image": "nii_gz/1287.nii.gz"}, {"image": "nii_gz/1284.nii.gz"}, {"image": "nii_gz/1283.nii.gz"}, {"image": "nii_gz/128.nii.gz"}, {"image": "nii_gz/1269.nii.gz"}, {"image": "nii_gz/1266.nii.gz"}, {"image": "nii_gz/1264.nii.gz"}, {"image": "nii_gz/1256.nii.gz"}, {"image": "nii_gz/1249.nii.gz"}, {"image": "nii_gz/1248.nii.gz"}, {"image": "nii_gz/1242.nii.gz"}, {"image": "nii_gz/1241.nii.gz"}, {"image": "nii_gz/1239.nii.gz"}, {"image": "nii_gz/1234.nii.gz"}, {"image": "nii_gz/1231.nii.gz"}, {"image": "nii_gz/123.nii.gz"}, {"image": "nii_gz/1219.nii.gz"}, {"image": "nii_gz/1216.nii.gz"}, {"image": "nii_gz/1202.nii.gz"}, {"image": "nii_gz/120.nii.gz"}, {"image": "nii_gz/1195.nii.gz"}, {"image": "nii_gz/119.nii.gz"}, {"image": "nii_gz/1189.nii.gz"}, {"image": "nii_gz/1187.nii.gz"}, {"image": "nii_gz/1182.nii.gz"}, {"image": "nii_gz/1181.nii.gz"}, {"image": "nii_gz/1175.nii.gz"}, {"image": "nii_gz/1173.nii.gz"}, {"image": "nii_gz/1172.nii.gz"}, {"image": "nii_gz/117.nii.gz"}, {"image": "nii_gz/1169.nii.gz"}, {"image": "nii_gz/1164.nii.gz"}, {"image": "nii_gz/1162.nii.gz"}, {"image": "nii_gz/1161.nii.gz"}, {"image": "nii_gz/116.nii.gz"}, {"image": "nii_gz/1156.nii.gz"}, {"image": "nii_gz/1154.nii.gz"}, {"image": "nii_gz/1153.nii.gz"}, {"image": "nii_gz/1149.nii.gz"}, {"image": "nii_gz/1148.nii.gz"}, {"image": "nii_gz/1146.nii.gz"}, {"image": "nii_gz/1142.nii.gz"}, {"image": "nii_gz/114.nii.gz"}, {"image": "nii_gz/1128.nii.gz"}, {"image": "nii_gz/1119.nii.gz"}, {"image": "nii_gz/1115.nii.gz"}, {"image": "nii_gz/1111.nii.gz"}, {"image": "nii_gz/1107.nii.gz"}, {"image": "nii_gz/1103.nii.gz"}, {"image": "nii_gz/1099.nii.gz"}, {"image": "nii_gz/1094.nii.gz"}, {"image": "nii_gz/109.nii.gz"}, {"image": "nii_gz/1079.nii.gz"}, {"image": "nii_gz/1076.nii.gz"}, {"image": "nii_gz/10729.nii.gz"}, {"image": "nii_gz/1072.nii.gz"}, {"image": "nii_gz/10714.nii.gz"}, {"image": "nii_gz/10707.nii.gz"}, {"image": "nii_gz/107.nii.gz"}, {"image": "nii_gz/10694.nii.gz"}, {"image": "nii_gz/10681.nii.gz"}, {"image": "nii_gz/1068.nii.gz"}, {"image": "nii_gz/1067.nii.gz"}, {"image": "nii_gz/10665.nii.gz"}, {"image": "nii_gz/10664.nii.gz"}, {"image": "nii_gz/10657.nii.gz"}, {"image": "nii_gz/10653.nii.gz"}, {"image": "nii_gz/10652.nii.gz"}, {"image": "nii_gz/10651.nii.gz"}, {"image": "nii_gz/10650.nii.gz"}, {"image": "nii_gz/1065.nii.gz"}, {"image": "nii_gz/1063.nii.gz"}, {"image": "nii_gz/10621.nii.gz"}, {"image": "nii_gz/10620.nii.gz"}, {"image": "nii_gz/10616.nii.gz"}, {"image": "nii_gz/10610.nii.gz"}, {"image": "nii_gz/1061.nii.gz"}, {"image": "nii_gz/10606.nii.gz"}, {"image": "nii_gz/1060.nii.gz"}, {"image": "nii_gz/10595.nii.gz"}, {"image": "nii_gz/10593.nii.gz"}, {"image": "nii_gz/10591.nii.gz"}, {"image": "nii_gz/10590.nii.gz"}, {"image": "nii_gz/10583.nii.gz"}, {"image": "nii_gz/10582.nii.gz"}, {"image": "nii_gz/1058.nii.gz"}, {"image": "nii_gz/10577.nii.gz"}, {"image": "nii_gz/10576.nii.gz"}, {"image": "nii_gz/10575.nii.gz"}, {"image": "nii_gz/10573.nii.gz"}, {"image": "nii_gz/10572.nii.gz"}, {"image": "nii_gz/10571.nii.gz"}, {"image": "nii_gz/10570.nii.gz"}, {"image": "nii_gz/1057.nii.gz"}, {"image": "nii_gz/10569.nii.gz"}, {"image": "nii_gz/10561.nii.gz"}, {"image": "nii_gz/10541.nii.gz"}, {"image": "nii_gz/10531.nii.gz"}, {"image": "nii_gz/10529.nii.gz"}, {"image": "nii_gz/10528.nii.gz"}, {"image": "nii_gz/10525.nii.gz"}, {"image": "nii_gz/10522.nii.gz"}, {"image": "nii_gz/10521.nii.gz"}, {"image": "nii_gz/1052.nii.gz"}, {"image": "nii_gz/10516.nii.gz"}, {"image": "nii_gz/10507.nii.gz"}, {"image": "nii_gz/10502.nii.gz"}, {"image": "nii_gz/1050.nii.gz"}, {"image": "nii_gz/1049.nii.gz"}, {"image": "nii_gz/10486.nii.gz"}, {"image": "nii_gz/10484.nii.gz"}, {"image": "nii_gz/10480.nii.gz"}, {"image": "nii_gz/1048.nii.gz"}, {"image": "nii_gz/10471.nii.gz"}, {"image": "nii_gz/1047.nii.gz"}, {"image": "nii_gz/10456.nii.gz"}, {"image": "nii_gz/10451.nii.gz"}, {"image": "nii_gz/10440.nii.gz"}, {"image": "nii_gz/10435.nii.gz"}, {"image": "nii_gz/10429.nii.gz"}, {"image": "nii_gz/10424.nii.gz"}, {"image": "nii_gz/10418.nii.gz"}, {"image": "nii_gz/10417.nii.gz"}, {"image": "nii_gz/10413.nii.gz"}, {"image": "nii_gz/10407.nii.gz"}, {"image": "nii_gz/10403.nii.gz"}, {"image": "nii_gz/1040.nii.gz"}, {"image": "nii_gz/10397.nii.gz"}, {"image": "nii_gz/10389.nii.gz"}, {"image": "nii_gz/10388.nii.gz"}, {"image": "nii_gz/10385.nii.gz"}, {"image": "nii_gz/10383.nii.gz"}, {"image": "nii_gz/10373.nii.gz"}, {"image": "nii_gz/10368.nii.gz"}, {"image": "nii_gz/10364.nii.gz"}, {"image": "nii_gz/10360.nii.gz"}, {"image": "nii_gz/10359.nii.gz"}, {"image": "nii_gz/10351.nii.gz"}, {"image": "nii_gz/10348.nii.gz"}, {"image": "nii_gz/10341.nii.gz"}, {"image": "nii_gz/1034.nii.gz"}, {"image": "nii_gz/10338.nii.gz"}, {"image": "nii_gz/10334.nii.gz"}, {"image": "nii_gz/10307.nii.gz"}, {"image": "nii_gz/10303.nii.gz"}, {"image": "nii_gz/10300.nii.gz"}, {"image": "nii_gz/10292.nii.gz"}, {"image": "nii_gz/10291.nii.gz"}, {"image": "nii_gz/10288.nii.gz"}, {"image": "nii_gz/10286.nii.gz"}, {"image": "nii_gz/10285.nii.gz"}, {"image": "nii_gz/10282.nii.gz"}, {"image": "nii_gz/10277.nii.gz"}, {"image": "nii_gz/10276.nii.gz"}, {"image": "nii_gz/10273.nii.gz"}, {"image": "nii_gz/1027.nii.gz"}, {"image": "nii_gz/10261.nii.gz"}, {"image": "nii_gz/10260.nii.gz"}, {"image": "nii_gz/10256.nii.gz"}, {"image": "nii_gz/10254.nii.gz"}, {"image": "nii_gz/10248.nii.gz"}, {"image": "nii_gz/10242.nii.gz"}, {"image": "nii_gz/10239.nii.gz"}, {"image": "nii_gz/10226.nii.gz"}, {"image": "nii_gz/10225.nii.gz"}, {"image": "nii_gz/10223.nii.gz"}, {"image": "nii_gz/10221.nii.gz"}, {"image": "nii_gz/10212.nii.gz"}, {"image": "nii_gz/10206.nii.gz"}, {"image": "nii_gz/1020.nii.gz"}, {"image": "nii_gz/10192.nii.gz"}, {"image": "nii_gz/10190.nii.gz"}, {"image": "nii_gz/10177.nii.gz"}, {"image": "nii_gz/10175.nii.gz"}, {"image": "nii_gz/10169.nii.gz"}, {"image": "nii_gz/10164.nii.gz"}, {"image": "nii_gz/10157.nii.gz"}, {"image": "nii_gz/10155.nii.gz"}, {"image": "nii_gz/10147.nii.gz"}, {"image": "nii_gz/10134.nii.gz"}, {"image": "nii_gz/10131.nii.gz"}, {"image": "nii_gz/10127.nii.gz"}, {"image": "nii_gz/10126.nii.gz"}, {"image": "nii_gz/10112.nii.gz"}, {"image": "nii_gz/1011.nii.gz"}, {"image": "nii_gz/10109.nii.gz"}, {"image": "nii_gz/10095.nii.gz"}, {"image": "nii_gz/10085.nii.gz"}, {"image": "nii_gz/1008.nii.gz"}, {"image": "nii_gz/10074.nii.gz"}, {"image": "nii_gz/10072.nii.gz"}, {"image": "nii_gz/1007.nii.gz"}, {"image": "nii_gz/10064.nii.gz"}, {"image": "nii_gz/10058.nii.gz"}, {"image": "nii_gz/10044.nii.gz"}, {"image": "nii_gz/10036.nii.gz"}, {"image": "nii_gz/10033.nii.gz"}, {"image": "nii_gz/10032.nii.gz"}, {"image": "nii_gz/1003.nii.gz"}, {"image": "nii_gz/1002.nii.gz"}, {"image": "nii_gz/10011.nii.gz"}, {"image": "nii_gz/10010.nii.gz"}]}
================================================
FILE: models/voco_head.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
import numpy as np
from monai.networks.nets.swin_unetr import *
from monai.networks.blocks import PatchEmbed, UnetOutBlock, UnetrBasicBlock, UnetrUpBlock
from monai.networks.nets.swin_unetr import SwinTransformer as SwinViT
from monai.utils import ensure_tuple_rep
import argparse
import torch.nn.functional as F
class projection_head(nn.Module):
def __init__(self, in_dim=768, hidden_dim=2048, out_dim=2048):
super().__init__()
self.layer1 = nn.Sequential(
nn.Linear(in_dim, hidden_dim),
nn.BatchNorm1d(hidden_dim, affine=False, track_running_stats=False),
nn.ReLU(inplace=True)
)
self.layer2 = nn.Sequential(
nn.Linear(hidden_dim, hidden_dim),
nn.BatchNorm1d(hidden_dim, affine=False, track_running_stats=False),
nn.ReLU(inplace=True)
)
self.layer3 = nn.Sequential(
nn.Linear(hidden_dim, out_dim),
)
self.out_dim = out_dim
def forward(self, input):
if torch.is_tensor(input):
x = input
else:
x = input[-1]
b = x.size()[0]
x = F.adaptive_avg_pool3d(x, (1, 1, 1)).view(b, -1)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
return x
class Swin(nn.Module):
def __init__(self, args):
super(Swin, self).__init__()
patch_size = ensure_tuple_rep(2, args.spatial_dims)
window_size = ensure_tuple_rep(7, args.spatial_dims)
self.swinViT = SwinViT(
in_chans=args.in_channels,
embed_dim=args.feature_size,
window_size=window_size,
patch_size=patch_size,
depths=[2, 2, 2, 2],
num_heads=[3, 6, 12, 24],
mlp_ratio=4.0,
qkv_bias=True,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=args.dropout_path_rate,
norm_layer=torch.nn.LayerNorm,
use_checkpoint=args.use_checkpoint,
spatial_dims=args.spatial_dims,
use_v2=True,
)
norm_name = 'instance'
self.encoder1 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=args.in_channels,
out_channels=args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder2 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=args.feature_size,
out_channels=args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder3 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=2 * args.feature_size,
out_channels=2 * args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder4 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=4 * args.feature_size,
out_channels=4 * args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.encoder10 = UnetrBasicBlock(
spatial_dims=args.spatial_dims,
in_channels=16 * args.feature_size,
out_channels=16 * args.feature_size,
kernel_size=3,
stride=1,
norm_name=norm_name,
res_block=True,
)
self.proj_head = projection_head(in_dim=1152, hidden_dim=2048, out_dim=2048)
def forward_encs(self, encs):
b = encs[0].size()[0]
outs = []
for enc in encs:
out = F.adaptive_avg_pool3d(enc, (1, 1, 1))
outs.append(out.view(b, -1))
outs = torch.cat(outs, dim=1)
return outs
def forward(self, x_in):
b = x_in.size()[0]
hidden_states_out = self.swinViT(x_in)
enc0 = self.encoder1(x_in)
enc1 = self.encoder2(hidden_states_out[0])
enc2 = self.encoder3(hidden_states_out[1])
enc3 = self.encoder4(hidden_states_out[2])
dec4 = self.encoder10(hidden_states_out[4])
encs = [enc0, enc1, enc2, enc3, dec4]
# for enc in encs:
# print(enc.shape)
out = self.forward_encs(encs)
out = self.proj_head(out.view(b, -1))
return out
class VoCoHead(nn.Module):
def __init__(self, args):
super(VoCoHead, self).__init__()
self.student = Swin(args)
self.teacher = Swin(args)
@torch.no_grad()
def _EMA_update_encoder_teacher(self):
## no scheduler here
momentum = 0.9
for param, param_t in zip(self.student.parameters(), self.teacher.parameters()):
param_t.data = momentum * param_t.data + (1. - momentum) * param.data
def forward(self, img, crops, labels):
batch_size = labels.size()[0]
total_size = img.size()[0]
sw_size = total_size // batch_size
pos, neg, total_b_loss = 0.0, 0.0, 0.0
img, crops = img.as_tensor(), crops.as_tensor()
inputs = torch.cat([img, crops], dim=0)
# here we do norm on all instances
students_all = self.student(inputs)
self._EMA_update_encoder_teacher()
with torch.no_grad():
teachers_all = (self.teacher(inputs)).detach()
x_stu_all, bases_stu_all = students_all[:total_size], students_all[total_size:]
x_tea_all, bases_tea_all = teachers_all[:total_size], teachers_all[total_size:]
for i in range(batch_size):
label = labels[i]
x_stu, bases_stu = x_stu_all[i * sw_size:(i + 1) * sw_size], bases_stu_all[i * 16:(i + 1) * 16]
x_tea, bases_tea = x_tea_all[i * sw_size:(i + 1) * sw_size], bases_tea_all[i * 16:(i + 1) * 16]
logits1 = online_assign(x_stu, bases_tea)
logits2 = online_assign(x_tea, bases_stu)
logits = (logits1 + logits2) * 0.5
if i == 0:
print('labels and logits:', label[0].data, logits[0].data)
pos_loss, neg_loss = ce_loss(label, logits)
pos += pos_loss
neg += neg_loss
b_loss = regularization_loss(bases_stu)
total_b_loss += b_loss
pos, neg = pos / batch_size, neg / batch_size
total_b_loss = total_b_loss / batch_size
return pos, neg, total_b_loss
def online_assign(feats, bases):
b, c = feats.size()
k, _ = bases.size()
assert bases.size()[1] == c, print(feats.size(), bases.size())
logits = []
for i in range(b):
feat = feats[i].unsqueeze(0)
simi = F.cosine_similarity(feat, bases, dim=1).unsqueeze(0)
logits.append(simi)
logits = torch.concatenate(logits, dim=0)
logits = F.relu(logits)
return logits
def regularization_loss(bases):
k, c = bases.size()
loss_all = 0
num = 0
for i in range(k - 1):
for j in range(i + 1, k):
num += 1
simi = F.cosine_similarity(bases[i].unsqueeze(0), bases[j].unsqueeze(0).detach(), dim=1)
simi = F.relu(simi)
loss_all += simi ** 2
loss_all = loss_all / num
return loss_all
def ce_loss(labels, logits):
pos_dis = torch.abs(labels - logits)
pos_loss = - labels * torch.log(1 - pos_dis + 1e-6)
pos_loss = pos_loss.sum() / (labels.sum() + 1e-6)
neg_lab = (labels == 0).long()
neg_loss = neg_lab * (logits ** 2)
neg_loss = neg_loss.sum() / (neg_lab.sum() + 1e-6)
return pos_loss, neg_loss
================================================
FILE: optimizers/__init__.py
================================================
================================================
FILE: optimizers/lr_scheduler.py
================================================
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import warnings
from typing import List
from torch import nn as nn
from torch.optim import Adam, Optimizer
from torch.optim.lr_scheduler import LambdaLR, _LRScheduler
__all__ = ["LinearLR", "ExponentialLR"]
class _LRSchedulerMONAI(_LRScheduler):
"""Base class for increasing the learning rate between two boundaries over a number
of iterations"""
def __init__(self, optimizer: Optimizer, end_lr: float, num_iter: int, last_epoch: int = -1) -> None:
"""
Args:
optimizer: wrapped optimizer.
end_lr: the final learning rate.
num_iter: the number of iterations over which the test occurs.
last_epoch: the index of last epoch.
Returns:
None
"""
self.end_lr = end_lr
self.num_iter = num_iter
super(_LRSchedulerMONAI, self).__init__(optimizer, last_epoch)
class LinearLR(_LRSchedulerMONAI):
"""Linearly increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr + r * (self.end_lr - base_lr) for base_lr in self.base_lrs]
class ExponentialLR(_LRSchedulerMONAI):
"""Exponentially increases the learning rate between two boundaries over a number of
iterations.
"""
def get_lr(self):
r = self.last_epoch / (self.num_iter - 1)
return [base_lr * (self.end_lr / base_lr) ** r for base_lr in self.base_lrs]
class WarmupCosineSchedule(LambdaLR):
"""Linear warmup and then cosine decay.
Based on https://huggingface.co/ implementation.
"""
def __init__(
self, optimizer: Optimizer, warmup_steps: int, t_total: int, cycles: float = 0.5, last_epoch: int = -1
) -> None:
"""
Args:
optimizer: wrapped optimizer.
warmup_steps: number of warmup iterations.
t_total: total number of training iterations.
cycles: cosine cycles parameter.
last_epoch: the index of last epoch.
Returns:
None
"""
self.warmup_steps = warmup_steps
self.t_total = t_total
self.cycles = cycles
super(WarmupCosineSchedule, self).__init__(optimizer, self.lr_lambda, last_epoch)
def lr_lambda(self, step):
if step < self.warmup_steps:
return float(step) / float(max(1.0, self.warmup_steps))
progress = float(step - self.warmup_steps) / float(max(1, self.t_total - self.warmup_steps))
return max(0.0, 0.5 * (1.0 + math.cos(math.pi * float(self.cycles) * 2.0 * progress)))
class LinearWarmupCosineAnnealingLR(_LRScheduler):
def __init__(
self,
optimizer: Optimizer,
warmup_epochs: int,
max_epochs: int,
warmup_start_lr: float = 0.0,
eta_min: float = 0.0,
last_epoch: int = -1,
) -> None:
"""
Args:
optimizer (Optimizer): Wrapped optimizer.
warmup_epochs (int): Maximum number of iterations for linear warmup
max_epochs (int): Maximum number of iterations
warmup_start_lr (float): Learning rate to start the linear warmup. Default: 0.
eta_min (float): Minimum learning rate. Default: 0.
last_epoch (int): The index of last epoch. Default: -1.
"""
self.warmup_epochs = warmup_epochs
self.max_epochs = max_epochs
self.warmup_start_lr = warmup_start_lr
self.eta_min = eta_min
super(LinearWarmupCosineAnnealingLR, self).__init__(optimizer, last_epoch)
def get_lr(self) -> List[float]:
"""
Compute learning rate using chainable form of the scheduler
"""
if not self._get_lr_called_within_step:
warnings.warn(
"To get the last learning rate computed by the scheduler, " "please use `get_last_lr()`.", UserWarning
)
if self.last_epoch == 0:
return [self.warmup_start_lr] * len(self.base_lrs)
elif self.last_epoch < self.warmup_epochs:
return [
group["lr"] + (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
elif self.last_epoch == self.warmup_epochs:
return self.base_lrs
elif (self.last_epoch - 1 - self.max_epochs) % (2 * (self.max_epochs - self.warmup_epochs)) == 0:
return [
group["lr"]
+ (base_lr - self.eta_min) * (1 - math.cos(math.pi / (self.max_epochs - self.warmup_epochs))) / 2
for base_lr, group in zip(self.base_lrs, self.optimizer.param_groups)
]
return [
(1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
/ (
1
+ math.cos(
math.pi * (self.last_epoch - self.warmup_epochs - 1) / (self.max_epochs - self.warmup_epochs)
)
)
* (group["lr"] - self.eta_min)
+ self.eta_min
for group in self.optimizer.param_groups
]
def _get_closed_form_lr(self) -> List[float]:
"""
Called when epoch is passed as a param to the `step` function of the scheduler.
"""
if self.last_epoch < self.warmup_epochs:
return [
self.warmup_start_lr + self.last_epoch * (base_lr - self.warmup_start_lr) / (self.warmup_epochs - 1)
for base_lr in self.base_lrs
]
return [
self.eta_min
+ 0.5
* (base_lr - self.eta_min)
* (1 + math.cos(math.pi * (self.last_epoch - self.warmup_epochs) / (self.max_epochs - self.warmup_epochs)))
for base_lr in self.base_lrs
]
================================================
FILE: requirements.txt
================================================
# packages in environment at /home/lwubf/anaconda3/envs/nnunet:
#
# Name Version Build Channel
_libgcc_mutex 0.1 main
absl-py 2.1.0
ca-certificates 2023.12.12 h06a4308_0
certifi 2022.12.7
charset-normalizer 2.1.1
cmake 3.25.0
contourpy 1.2.0
cycler 0.12.1
einops 0.7.0
elasticdeform 0.5.0
filelock 3.9.0
fonttools 4.50.0
fsspec 2024.2.0
grpcio 1.62.0
huggingface-hub 0.21.4
idna 3.4
importlib-metadata 7.0.1
importlib_resources 6.4.0
inquirerpy 0.3.4
Jinja2 3.1.2
kiwisolver 1.4.5
ld_impl_linux-64 2.38 h1181459_1
libffi 3.3 he6710b0_2
libgcc-ng 9.1.0 hdf63c60_0
libstdcxx-ng 9.1.0 hdf63c60_0
lit 15.0.7
Markdown 3.5.2
MarkupSafe 2.1.5
matplotlib 3.8.3
monai 1.3.0
mpmath 1.3.0
ncurses 6.3 h7f8727e_2
networkx 3.2.1
nibabel 5.2.0
numpy 1.26.4
opencv-python 4.9.0.80
openssl 1.1.1w h7f8727e_0
packaging 23.2
pfzy 0.3.4
pillow 10.2.0
pip 23.3.1 py39h06a4308_0
prompt-toolkit 3.0.43
protobuf 4.25.3
pyparsing 3.1.2
python 3.9.12 h12debd9_1
python-dateutil 2.9.0.post0
PyYAML 6.0.1
readline 8.1.2 h7f8727e_1
requests 2.28.1
scipy 1.12.0
setuptools 68.2.2 py39h06a4308_0
SimpleITK 2.0.2
six 1.16.0
sqlite 3.38.5 hc218d9a_0
sympy 1.12
tensorboard 2.16.2
tensorboard-data-server 0.7.2
tensorboardX 2.6.2.2
tk 8.6.12 h1ccaba5_0
torch 2.0.1+cu118
torchaudio 2.0.2+cu118
torchvision 0.15.2+cu118
tqdm 4.66.2
triton 2.0.0
typing_extensions 4.8.0
tzdata 2024a h04d1e81_0
urllib3 1.26.13
wcwidth 0.2.13
Werkzeug 3.0.1
wheel 0.41.2 py39h06a4308_0
xz 5.2.5 h7f8727e_1
zipp 3.17.0
zlib 1.2.12 h7f8727e_2
================================================
FILE: train.sh
================================================
now=$(date +"%Y%m%d_%H%M%S")
logdir=runs/logs_10k
mkdir -p $logdir
torchrun --master_port=28802 voco_train.py \
--logdir $logdir | tee $logdir/$now.txt
================================================
FILE: utils/__init__.py
================================================
================================================
FILE: utils/data_utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections.abc import Callable, Sequence
from torch.utils.data import Dataset as _TorchDataset
from torch.utils.data import Subset
import collections
import numpy as np
from monai.data import *
import pickle
from monai.transforms import *
from math import *
def get_loader_1k(args):
splits1 = "/btcv.json"
splits2 = "/dataset_TCIAcovid19_0.json"
splits3 = "/dataset_LUNA16_0.json"
# splits3 = "/dataset_HNSCC_0.json"
# splits4 = "/dataset_TCIAcolon_v2_0.json"
# splits5 = "/dataset_LIDC_0.json"
list_dir = "./jsons"
jsonlist1 = list_dir + splits1
jsonlist2 = list_dir + splits2
jsonlist3 = list_dir + splits3
# jsonlist4 = list_dir + splits4
# jsonlist5 = list_dir + splits5
datadir1 = "/data/linshan/CTs/BTCV"
datadir2 = "/data/linshan/CTs/TCIAcovid19"
datadir3 = "/data/linshan/CTs/Luna16-jx"
num_workers = 8
datalist1 = load_decathlon_datalist(jsonlist1, False, "training", base_dir=datadir1)
print("Dataset 1 BTCV: number of data: {}".format(len(datalist1)))
new_datalist1 = []
for item in datalist1:
item_dict = {"image": item["image"]}
new_datalist1.append(item_dict)
datalist2 = load_decathlon_datalist(jsonlist2, False, "training", base_dir=datadir2)
print("Dataset 2 Covid 19: number of data: {}".format(len(datalist2)))
datalist3 = load_decathlon_datalist(jsonlist3, False, "training", base_dir=datadir3)
print("Dataset 3 Luna: number of data: {}".format(len(datalist3)))
new_datalist3 = []
for item in datalist3:
item_dict = {"image": item["image"]}
new_datalist3.append(item_dict)
vallist1 = load_decathlon_datalist(jsonlist1, False, "validation", base_dir=datadir1)
vallist2 = load_decathlon_datalist(jsonlist2, False, "validation", base_dir=datadir2)
vallist3 = load_decathlon_datalist(jsonlist3, False, "validation", base_dir=datadir3)
# vallist4 = load_decathlon_datalist(jsonlist4, False, "validation", base_dir=datadir4)
# vallist5 = load_decathlon_datalist(jsonlist5, False, "validation", base_dir=datadir5)
datalist = new_datalist1 + datalist2 + new_datalist3 # + datalist4 + datalist5
# datalist = new_datalist1
val_files = vallist1 + vallist2 + vallist3 # + vallist4 + vallist5
print("Dataset all training: number of data: {}".format(len(datalist)))
print("Dataset all validation: number of data: {}".format(len(val_files)))
train_transforms = Compose([LoadImaged(keys=["image"], image_only=True, dtype=np.int16),
EnsureChannelFirstd(keys=["image"]),
Orientationd(keys=["image"], axcodes="RAS"),
ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max,
b_min=args.b_min, b_max=args.b_max, clip=True),
SpatialPadd(keys="image", spatial_size=[args.roi_x, args.roi_y,
args.roi_z]),
CropForegroundd(keys=["image"], source_key="image"),
SpatialCropd(keys=["image"], roi_start=[60, 80, 0],
roi_end=[440, 380, 10000]),
Resized(keys=["image"], mode="trilinear", align_corners=True,
spatial_size=(384, 384, 96)),
# Random
RandShiftIntensityd(keys="image", offsets=0.1, prob=0.0),
CropForegroundd(keys="image", source_key="image", select_fn=threshold),
Resized(keys="image", mode="bilinear", align_corners=True,
spatial_size=(384, 384, 96)),
VoCoAugmentation(args, aug=True)
])
val_transforms = Compose([LoadImaged(keys=["image"], image_only=True, dtype=np.int16),
EnsureChannelFirstd(keys=["image"]),
Orientationd(keys=["image"], axcodes="RAS"),
ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max,
b_min=args.b_min, b_max=args.b_max, clip=True),
SpatialPadd(keys="image", spatial_size=[args.roi_x, args.roi_y,
args.roi_z]),
CropForegroundd(keys=["image"], source_key="image"),
SpatialCropd(keys=["image"], roi_start=[60, 80, 0],
roi_end=[440, 380, 10000]),
Resized(keys=["image"], mode="trilinear", align_corners=True,
spatial_size=(384, 384, 96)),
VoCoAugmentation(args, aug=False)
])
if args.cache_dataset:
print("Using MONAI Cache Dataset")
train_ds = CacheDataset(data=datalist, transform=train_transforms,
cache_rate=0.5, num_workers=num_workers)
elif args.smartcache_dataset:
print("Using MONAI SmartCache Dataset")
train_ds = SmartCacheDataset(
data=datalist,
transform=train_transforms,
replace_rate=1.0,
cache_num=2 * args.batch_size * args.sw_batch_size,
)
else:
print("Using Persistent dataset")
# train_ds = Dataset(data=datalist, transform=train_transforms)
train_ds = PersistentDataset(data=datalist,
transform=train_transforms,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/1.5k')
if args.distributed:
train_sampler = DistributedSampler(dataset=train_ds, even_divisible=True, shuffle=True)
else:
train_sampler = None
train_loader = DataLoader(
train_ds, batch_size=args.batch_size, num_workers=num_workers, sampler=train_sampler,
drop_last=True, pin_memory=True
)
val_ds = PersistentDataset(data=val_files,
transform=val_transforms,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/1.5k')
val_loader = DataLoader(val_ds, batch_size=args.batch_size, num_workers=num_workers, shuffle=False, drop_last=True)
return train_loader, val_loader
def random_split(ls):
length = len(ls)
train_ls = ls[:ceil(length * 0.9)]
val_ls = ls[ceil(length * 0.9):]
return train_ls, val_ls
def get_loader(args):
splits1 = "/btcv.json"
splits2 = "/dataset_TCIAcovid19_0.json"
splits3 = "/dataset_LUNA16_0.json"
splits4 = "/stoic21.json"
splits5 = "/Totalsegmentator_dataset.json"
splits6 = "/flare23.json"
splits7 = "/HNSCC.json"
list_dir = "./jsons/"
jsonlist1 = list_dir + splits1
jsonlist2 = list_dir + splits2
jsonlist3 = list_dir + splits3
jsonlist4 = list_dir + splits4
jsonlist5 = list_dir + splits5
jsonlist6 = list_dir + splits6
jsonlist7 = list_dir + splits7
datadir1 = "./data/BTCV"
datadir2 = "./data/TCIAcovid19"
datadir3 = "./data/Luna16-jx"
datadir4 = "./data/stoic21"
datadir5 = "./data/Totalsegmentator_dataset"
datadir6 = "./data/Flare23"
datadir7 = "./data/HNSCC_convert_v1"
num_workers = 16
datalist1 = load_decathlon_datalist(jsonlist1, False, "training", base_dir=datadir1)
print("Dataset 1 BTCV: number of data: {}".format(len(datalist1)))
new_datalist1 = []
for item in datalist1:
item_dict = {"image": item["image"]}
new_datalist1.append(item_dict)
datalist2 = load_decathlon_datalist(jsonlist2, False, "training", base_dir=datadir2)
print("Dataset 2 Covid 19: number of data: {}".format(len(datalist2)))
datalist3 = load_decathlon_datalist(jsonlist3, False, "training", base_dir=datadir3)
print("Dataset 3 Luna: number of data: {}".format(len(datalist3)))
new_datalist3 = []
for item in datalist3:
item_dict = {"image": item["image"]}
new_datalist3.append(item_dict)
datalist4 = load_decathlon_datalist(jsonlist4, False, "training", base_dir=datadir4)
# datalist4, vallist4 = random_split(datalist4)
print("Dataset 4 TCIA Colon: number of data: {}".format(len(datalist4)))
datalist5 = load_decathlon_datalist(jsonlist5, False, "training", base_dir=datadir5)
# datalist5, vallist5 = random_split(datalist5)
print("Dataset 5 Totalsegmentator: number of data: {}".format(len(datalist5)))
datalist6 = load_decathlon_datalist(jsonlist6, False, "training", base_dir=datadir6)
# datalist6, vallist6 = random_split(datalist6)
print("Dataset 6 Flare23: number of data: {}".format(len(datalist6)))
datalist7 = load_decathlon_datalist(jsonlist7, False, "training", base_dir=datadir7)
# datalist7, vallist7 = random_split(datalist7)
print("Dataset 7 HNSCC: number of data: {}".format(len(datalist7)))
vallist1 = load_decathlon_datalist(jsonlist1, False, "validation", base_dir=datadir1)
vallist2 = load_decathlon_datalist(jsonlist2, False, "validation", base_dir=datadir2)
vallist3 = load_decathlon_datalist(jsonlist3, False, "validation", base_dir=datadir3)
datalist = new_datalist1 + datalist2 + new_datalist3 + datalist4 + datalist5 + datalist6 + datalist7
val_files = vallist1 + vallist2 + vallist3 # + vallist4 + vallist5 + vallist6 + vallist7
print("Dataset all training: number of data: {}".format(len(datalist)))
print("Dataset all validation: number of data: {}".format(len(val_files)))
train_transforms = Compose([LoadImaged(keys=["image"], image_only=True, dtype=np.int16),
EnsureChannelFirstd(keys=["image"]),
Orientationd(keys=["image"], axcodes="RAS"),
ScaleIntensityRanged(
keys=["image"], a_min=args.a_min, a_max=args.a_max,
b_min=args.b_min, b_max=args.b_max, clip=True),
CropForegroundd(keys="image", source_key="image", select_fn=threshold),
Resized(keys="image", mode="bilinear", align_corners=True,
spatial_size=(384, 384, 96)),
VoCoAugmentation(args, aug=True)
])
if args.cache_dataset:
print("Using MONAI Cache Dataset")
train_ds = CacheDataset(data=datalist, transform=train_transforms,
cache_rate=0.5, num_workers=num_workers)
elif args.smartcache_dataset:
print("Using MONAI SmartCache Dataset")
train_ds = SmartCacheDataset(
data=datalist,
transform=train_transforms,
replace_rate=1.0,
cache_num=2 * args.batch_size * args.sw_batch_size,
)
else:
print("Using Persistent dataset")
# train_ds = Dataset(data=datalist, transform=train_transforms)
train_ds = PersistentDataset(data=datalist,
transform=train_transforms,
pickle_protocol=pickle.HIGHEST_PROTOCOL,
cache_dir='/data/linshan/cache/10k')
if args.distributed:
train_sampler = DistributedSampler(dataset=train_ds, even_divisible=True, shuffle=True)
else:
train_sampler = None
train_loader = DataLoader(
train_ds, batch_size=args.batch_size, num_workers=num_workers, sampler=train_sampler, shuffle=True,
drop_last=True, pin_memory=True
)
return train_loader
def threshold(x):
# threshold at 0
return x > 0.3
class VoCoAugmentation():
def __init__(self, args, aug):
self.args = args
self.aug = aug
def __call__(self, x_in):
crops_trans = get_crop_transform(roi_small=self.args.roi_x, aug=self.aug)
vanilla_trans, labels = get_vanilla_transform(num=self.args.sw_batch_size,
roi_small=self.args.roi_x, aug=self.aug)
imgs = []
for trans in vanilla_trans:
img = trans(x_in)
imgs.append(img)
crops = []
for trans in crops_trans:
crop = trans(x_in)
crops.append(crop)
return imgs, labels, crops
def get_vanilla_transform(num=2, num_crops=4, roi_small=64, roi=96, max_roi=384, aug=False):
vanilla_trans = []
labels = []
for i in range(num):
center_x, center_y, label = get_position_label(roi=roi,
max_roi=max_roi,
num_crops=num_crops)
if aug:
trans = Compose([
SpatialCropd(keys=['image'],
roi_center=[center_x, center_y, roi // 2],
roi_size=[roi, roi, roi]),
Resized(keys=["image"], mode="bilinear", align_corners=True,
spatial_size=(roi_small, roi_small, roi_small)),
RandFlipd(keys=["image"], prob=0.2, spatial_axis=0),
RandFlipd(keys=["image"], prob=0.2, spatial_axis=1),
RandFlipd(keys=["image"], prob=0.2, spatial_axis=2),
RandRotate90d(keys=["image"], prob=0.2, max_k=3),
RandShiftIntensityd(keys="image", offsets=0.1, prob=0.1),
ToTensord(keys=["image"])])
else:
trans = Compose([
SpatialCropd(keys=['image'],
roi_center=[center_x, center_y, roi // 2],
roi_size=[roi, roi, roi]),
Resized(keys=["image"], mode="bilinear", align_corners=True,
spatial_size=(roi_small, roi_small, roi_small)),
ToTensord(keys=["image"])])
vanilla_trans.append(trans)
labels.append(label)
labels = np.concatenate(labels, 0).reshape(num, num_crops * num_crops)
return vanilla_trans, labels
def get_crop_transform(num=4, roi_small=64, roi=96, aug=False):
voco_trans = []
# not symmetric at axis x !!!
for i in range(num):
for j in range(num):
center_x = (i + 1 / 2) * roi
center_y = (j + 1 / 2) * roi
center_z = roi // 2
if aug:
trans = Compose([
SpatialCropd(keys=['image'],
roi_center=[center_x, center_y, center_z],
roi_size=[roi, roi, roi]),
Resized(keys=["image"],
mode="bilinear",
align_corners=True,
spatial_size=(roi_small, roi_small, roi_small)
),
Resized(keys=["image"], mode="bilinear", align_corners=True,
spatial_size=(roi_small, roi_small, roi_small)),
RandFlipd(keys=["image"], prob=0.2, spatial_axis=0),
RandFlipd(keys=["image"], prob=0.2, spatial_axis=1),
RandFlipd(keys=["image"], prob=0.2, spatial_axis=2),
RandRotate90d(keys=["image"], prob=0.2, max_k=3),
RandShiftIntensityd(keys="image", offsets=0.1, prob=0.1),
ToTensord(keys=["image"])],
)
else:
trans = Compose([
SpatialCropd(keys=['image'],
roi_center=[center_x, center_y, center_z],
roi_size=[roi, roi, roi]),
Resized(keys=["image"],
mode="bilinear",
align_corners=True,
spatial_size=(roi_small, roi_small, roi_small)
),
ToTensord(keys=["image"])],
)
voco_trans.append(trans)
return voco_trans
def get_position_label(roi=96, base_roi=96, max_roi=384, num_crops=4):
half = roi // 2
center_x, center_y = np.random.randint(low=half, high=max_roi - half), \
np.random.randint(low=half, high=max_roi - half)
# center_x, center_y = np.random.randint(low=half, high=half+1), \
# np.random.randint(low=half, high=half+1)
# center_x, center_y = roi + half, roi + half
# print(center_x, center_y)
x_min, x_max = center_x - half, center_x + half
y_min, y_max = center_y - half, center_y + half
total_area = roi * roi
labels = []
for i in range(num_crops):
for j in range(num_crops):
crop_x_min, crop_x_max = i * base_roi, (i + 1) * base_roi
crop_y_min, crop_y_max = j * base_roi, (j + 1) * base_roi
dx = min(crop_x_max, x_max) - max(crop_x_min, x_min)
dy = min(crop_y_max, y_max) - max(crop_y_min, y_min)
if dx <= 0 or dy <= 0:
area = 0
else:
area = (dx * dy) / total_area
labels.append(area)
labels = np.asarray(labels).reshape(1, num_crops * num_crops)
return center_x, center_y, labels
if __name__ == '__main__':
center_x, center_y, labels = get_position_label()
print(center_x, center_y, labels)
================================================
FILE: utils/ops.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import torch
from numpy.random import randint
def patch_rand_drop(args, x, x_rep=None, max_drop=0.3, max_block_sz=0.25, tolr=0.05):
c, h, w, z = x.size()
n_drop_pix = np.random.uniform(0, max_drop) * h * w * z
mx_blk_height = int(h * max_block_sz)
mx_blk_width = int(w * max_block_sz)
mx_blk_slices = int(z * max_block_sz)
tolr = (int(tolr * h), int(tolr * w), int(tolr * z))
total_pix = 0
while total_pix < n_drop_pix:
rnd_r = randint(0, h - tolr[0])
rnd_c = randint(0, w - tolr[1])
rnd_s = randint(0, z - tolr[2])
rnd_h = min(randint(tolr[0], mx_blk_height) + rnd_r, h)
rnd_w = min(randint(tolr[1], mx_blk_width) + rnd_c, w)
rnd_z = min(randint(tolr[2], mx_blk_slices) + rnd_s, z)
if x_rep is None:
x_uninitialized = torch.empty(
(c, rnd_h - rnd_r, rnd_w - rnd_c, rnd_z - rnd_s), dtype=x.dtype, device=args.local_rank
).normal_()
x_uninitialized = (x_uninitialized - torch.min(x_uninitialized)) / (
torch.max(x_uninitialized) - torch.min(x_uninitialized)
)
x[:, rnd_r:rnd_h, rnd_c:rnd_w, rnd_s:rnd_z] = x_uninitialized
else:
x[:, rnd_r:rnd_h, rnd_c:rnd_w, rnd_s:rnd_z] = x_rep[:, rnd_r:rnd_h, rnd_c:rnd_w, rnd_s:rnd_z]
total_pix = total_pix + (rnd_h - rnd_r) * (rnd_w - rnd_c) * (rnd_z - rnd_s)
return x
def rot_rand(args, x_s):
img_n = x_s.size()[0]
x_aug = x_s.detach().clone()
device = torch.device(f"cuda:{args.local_rank}")
x_rot = torch.zeros(img_n).long().to(device)
for i in range(img_n):
x = x_s[i]
orientation = np.random.randint(0, 4)
if orientation == 0:
pass
elif orientation == 1:
x = x.rot90(1, (2, 3))
elif orientation == 2:
x = x.rot90(2, (2, 3))
elif orientation == 3:
x = x.rot90(3, (2, 3))
x_aug[i] = x
x_rot[i] = orientation
return x_aug, x_rot
def aug_rand(args, samples):
img_n = samples.size()[0]
x_aug = samples.detach().clone()
for i in range(img_n):
x_aug[i] = patch_rand_drop(args, x_aug[i])
idx_rnd = randint(0, img_n)
if idx_rnd != i:
x_aug[i] = patch_rand_drop(args, x_aug[i], x_aug[idx_rnd])
return x_aug
def concat_image(imgs):
output = []
for img in imgs:
img = img['image']
output.append(img)
output = torch.concatenate(output, dim=1)
bs, sw_s, x, y, z = output.size()
output = output.view(-1, 1, x, y, z)
return output
def concat_label(labels):
output = []
for lab in labels:
output.append(lab)
output = torch.concatenate(output, dim=0)
return output
================================================
FILE: utils/utils.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import scipy.ndimage as ndimage
import torch
def resample_3d(img, target_size):
imx, imy, imz = img.shape
tx, ty, tz = target_size
zoom_ratio = (float(tx) / float(imx), float(ty) / float(imy), float(tz) / float(imz))
img_resampled = ndimage.zoom(img, zoom_ratio, order=0, prefilter=False)
return img_resampled
def dice(x, y):
intersect = np.sum(np.sum(np.sum(x * y)))
y_sum = np.sum(np.sum(np.sum(y)))
if y_sum == 0:
return 0.0
x_sum = np.sum(np.sum(np.sum(x)))
return 2 * intersect / (x_sum + y_sum)
class AverageMeter(object):
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = np.where(self.count > 0, self.sum / self.count, self.sum)
def distributed_all_gather(
tensor_list, valid_batch_size=None, out_numpy=False, world_size=None, no_barrier=False, is_valid=None
):
if world_size is None:
world_size = torch.distributed.get_world_size()
if valid_batch_size is not None:
valid_batch_size = min(valid_batch_size, world_size)
elif is_valid is not None:
is_valid = torch.tensor(bool(is_valid), dtype=torch.bool, device=tensor_list[0].device)
if not no_barrier:
torch.distributed.barrier()
tensor_list_out = []
with torch.no_grad():
if is_valid is not None:
is_valid_list = [torch.zeros_like(is_valid) for _ in range(world_size)]
torch.distributed.all_gather(is_valid_list, is_valid)
is_valid = [x.item() for x in is_valid_list]
for tensor in tensor_list:
gather_list = [torch.zeros_like(tensor) for _ in range(world_size)]
torch.distributed.all_gather(gather_list, tensor)
if valid_batch_size is not None:
gather_list = gather_list[:valid_batch_size]
elif is_valid is not None:
gather_list = [g for g, v in zip(gather_list, is_valid_list) if v]
if out_numpy:
gather_list = [t.cpu().numpy() for t in gather_list]
tensor_list_out.append(gather_list)
return tensor_list_out
================================================
FILE: voco_train.py
================================================
# Copyright 2020 - 2022 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
from time import time
import logging
import numpy as np
import torch
import torch.distributed as dist
import torch.optim as optim
from models.voco_head import VoCoHead
from optimizers.lr_scheduler import WarmupCosineSchedule
from torch.cuda.amp import GradScaler, autocast
from torch.nn.parallel import DistributedDataParallel
from torch.utils.tensorboard import SummaryWriter
from utils.data_utils import *
from utils.ops import *
from utils.utils import AverageMeter, distributed_all_gather
import torch.multiprocessing
torch.multiprocessing.set_sharing_strategy('file_system')
os.environ['CUDA_VISIBLE_DEVICES'] = "0"
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '28890'
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (8192, rlimit[1]))
print('Setting resource limit:', str(resource.getrlimit(resource.RLIMIT_NOFILE)))
def main():
def save_ckp(state, checkpoint_dir):
torch.save(state, checkpoint_dir)
def train(args, global_step, train_loader, val_best, scaler):
model.train()
loss_train = []
run_loss = AverageMeter()
pos_avg, neg_avg, base_avg = AverageMeter(), AverageMeter(), AverageMeter()
for step, batch in enumerate(train_loader):
t1 = time()
img, labels, crops = batch
img, crops = concat_image(img), concat_image(crops)
# print(img.size(), crops.size(), labels.size())
img, crops, labels = img.cuda(), crops.cuda(), labels.cuda()
with autocast(enabled=args.amp):
# loss = model(img, crops, labels)
pos, neg, b_loss = model(img, crops, labels)
loss = pos + neg + b_loss
loss_train.append(loss.item())
if args.amp:
scaler.scale(loss).backward()
scaler.step(optimizer)
scaler.update()
else:
loss.backward()
if args.grad_clip:
torch.nn.utils.clip_grad_norm_(model.parameters(), args.max_grad_norm)
optimizer.step()
if args.lrdecay:
scheduler.step()
optimizer.zero_grad()
run_loss.update(loss.item(), n=args.batch_size)
pos_avg.update(pos.item(), n=args.batch_size)
neg_avg.update(neg.item(), n=args.batch_size)
base_avg.update(b_loss.item(), n=args.batch_size)
lr = optimizer.param_groups[0]["lr"]
if args.distributed:
if dist.get_rank() == 0:
print("Step:{}/{}, Loss:{:.4f}, Time:{:.4f}".format
(global_step, args.num_steps, loss, time() - t1))
else:
print("Step:{}/{}, Loss:{:.4f}, pos:{:.4f}, neg:{:.4f}, base:{:.4f}, "
"lr:{:.8f}, Time:{:.4f}".format(global_step, args.num_steps,
run_loss.avg, pos_avg.avg, neg_avg.avg, base_avg.avg,
lr, time() - t1))
global_step += 1
if args.distributed:
val_cond = (dist.get_rank() == 0) and (global_step % args.eval_num == 0)
else:
val_cond = global_step % args.eval_num == 0
freq = 1000
val_freq = global_step % freq == 0
if val_cond:
checkpoint = {
"global_step": global_step,
"state_dict": model.state_dict(),
"optimizer": optimizer,
}
save_ckp(checkpoint, logdir + "/model_current_epoch.pt")
if val_freq:
checkpoint = {
"global_step": global_step,
"state_dict": model.state_dict(),
"optimizer": optimizer,
}
save_ckp(checkpoint, logdir + "/model_step" + str(global_step) + ".pt")
return global_step, loss, val_best
roi = 64
parser = argparse.ArgumentParser(description="PyTorch Training")
parser.add_argument("--logdir", default="logs", type=str, help="directory to save logs")
parser.add_argument("--epochs", default=100, type=int, help="number of training epochs")
parser.add_argument("--num_steps", default=250000, type=int, help="number of training iterations")
parser.add_argument("--eval_num", default=100, type=int, help="evaluation frequency")
parser.add_argument("--warmup_steps", default=5000, type=int, help="warmup steps")
parser.add_argument("--in_channels", default=1, type=int, help="number of input channels")
parser.add_argument("--feature_size", default=48, type=int, help="embedding size")
parser.add_argument("--dropout_path_rate", default=0.0, type=float, help="drop path rate")
parser.add_argument("--use_checkpoint", default=True, help="use gradient checkpointing to save memory")
parser.add_argument("--spatial_dims", default=3, type=int, help="spatial dimension of input data")
parser.add_argument("--a_min", default=-175.0, type=float, help="a_min in ScaleIntensityRanged")
parser.add_argument("--a_max", default=250.0, type=float, help="a_max in ScaleIntensityRanged")
parser.add_argument("--b_min", default=0.0, type=float, help="b_min in ScaleIntensityRanged")
parser.add_argument("--b_max", default=1.0, type=float, help="b_max in ScaleIntensityRanged")
parser.add_argument("--space_x", default=1.5, type=float, help="spacing in x direction")
parser.add_argument("--space_y", default=1.5, type=float, help="spacing in y direction")
parser.add_argument("--space_z", default=1.5, type=float, help="spacing in z direction")
parser.add_argument("--roi_x", default=roi, type=int, help="roi size in x direction")
parser.add_argument("--roi_y", default=roi, type=int, help="roi size in y direction")
parser.add_argument("--roi_z", default=roi, type=int, help="roi size in z direction")
parser.add_argument("--batch_size", default=2, type=int, help="number of batch size")
parser.add_argument("--sw_batch_size", default=2, type=int, help="number of sliding window batch size")
parser.add_argument("--lr", default=1e-4, type=float, help="learning rate")
parser.add_argument("--decay", default=0.1, type=float, help="decay rate")
parser.add_argument("--momentum", default=0.9, type=float, help="momentum")
parser.add_argument("--lrdecay", default=True, help="enable learning rate decay")
parser.add_argument("--max_grad_norm", default=1.0, type=float, help="maximum gradient norm")
parser.add_argument("--loss_type", default="SSL", type=str)
parser.add_argument("--opt", default="adamw", type=str, help="optimization algorithm")
parser.add_argument("--lr_schedule", default="warmup_cosine", type=str)
# './runs/logs_10k/model_current_epoch.pt'
parser.add_argument("--resume", default=None, type=str,
help="resume training")
parser.add_argument("--local_rank", type=int, default=0, help="local rank")
parser.add_argument("--grad_clip", action="store_true", help="gradient clip")
parser.add_argument("--noamp", default=True, help="do NOT use amp for training")
parser.add_argument("--dist-url", default="env://", help="url used to set up distributed training")
parser.add_argument("--smartcache_dataset", default=False, help="use monai smartcache Dataset")
parser.add_argument("--cache_dataset", action="store_true", help="use monai cache Dataset")
args = parser.parse_args()
logdir = args.logdir
torch.cuda.set_device(0)
args.amp = True
torch.backends.cudnn.benchmark = True
# torch.autograd.set_detect_anomaly(True)
args.distributed = False
if "WORLD_SIZE" in os.environ:
args.distributed = int(os.environ["WORLD_SIZE"]) > 1
args.world_size = 1
args.rank = 0
if args.distributed:
args.device = "cuda:%d" % args.local_rank
torch.cuda.set_device(args.local_rank)
torch.distributed.init_process_group(backend="nccl", init_method=args.dist_url)
args.world_size = torch.distributed.get_world_size()
args.rank = torch.distributed.get_rank()
print(
"Training in distributed mode with multiple processes, 1 GPU per process. Process %d, total %d."
% (args.rank, args.world_size)
)
else:
print("Training with a single process on 1 GPUs.")
assert args.rank >= 0
if args.rank == 0:
os.makedirs(logdir, exist_ok=True)
logger = init_log('global', logging.INFO)
logger.propagate = 0
model = VoCoHead(args)
model.cuda()
if args.opt == "adam":
optimizer = optim.Adam(params=model.parameters(), lr=args.lr, weight_decay=args.decay)
elif args.opt == "adamw":
optimizer = optim.AdamW(params=model.parameters(), lr=args.lr, amsgrad=True)
elif args.opt == "sgd":
optimizer = optim.SGD(params=model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.decay)
global_step = 0
if args.resume:
print('resume from previous checkpoints')
model_pth = args.resume
model_dict = torch.load(model_pth)
model.load_state_dict(model_dict, strict=False)
global_step = model_dict["global_step"]
# optimizer = model_dict["optimizer"]["state_dict"]
if args.lrdecay:
if args.lr_schedule == "warmup_cosine":
scheduler = WarmupCosineSchedule(optimizer, warmup_steps=args.warmup_steps, t_total=args.num_steps)
elif args.lr_schedule == "poly":
def lambdas(epoch):
return (1 - float(epoch) / float(args.epochs)) ** 0.9
scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambdas)
if args.distributed:
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
model = DistributedDataParallel(model, device_ids=[args.local_rank])
train_loader = get_loader(args)
best_val = 1e8
if args.amp:
scaler = GradScaler()
else:
scaler = None
while global_step < args.num_steps:
global_step, loss, best_val = train(args, global_step, train_loader, best_val, scaler)
checkpoint = {"epoch": args.epochs, "state_dict": model.state_dict(), "optimizer": optimizer.state_dict()}
if args.distributed:
if dist.get_rank() == 0:
torch.save(model.state_dict(), logdir + "final_model.pth")
dist.destroy_process_group()
else:
torch.save(model.state_dict(), logdir + "final_model.pth")
save_ckp(checkpoint, logdir + "/model_final_epoch.pt")
logs = set()
def init_log(name, level=logging.INFO):
if (name, level) in logs:
return
logs.add((name, level))
logger = logging.getLogger(name)
logger.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
if "SLURM_PROCID" in os.environ:
rank = int(os.environ["SLURM_PROCID"])
logger.addFilter(lambda record: rank == 0)
else:
rank = 0
format_str = "[%(asctime)s][%(levelname)8s] %(message)s"
formatter = logging.Formatter(format_str)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
if __name__ == "__main__":
main()