[
  {
    "path": ".gitignore",
    "content": "# Byte-compiled / optimized / DLL files\nresults/*/plots\n*/log/\ndemo/\n*/para_restored.txt\n*/pc_distance/__pycache__\n.idea/\n.DS_Store\n__pycache__/\n*.py[cod]\n*$py.class\n*/*.sh\n*/data/*\n/render/dump\n*.ipynb\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\npip-wheel-metadata/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py,cover\n.hypothesis/\n.pytest_cache/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n.python-version\n\n# pipenv\n#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n#   However, in case of collaboration, if having platform-specific dependencies or dependencies\n#   having no cross-platform support, pipenv may install dependencies that don't work, or not\n#   install all needed dependencies.\n#Pipfile.lock\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2020 Hanchen Wang\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "OcCo_TF/.gitignore",
    "content": "# others code\nresults/*/plots\nlog/\ndemo/\ndemo_data/\npara_restored.txt\npc_distance/__pycache__\n\n# Byte-compiled / optimized / DLL files\n.idea/\n.DS_Store\n__pycache__/\n*.py[cod]\n*$py.class\n*.sh\n*/*.sh\ndata/*\n/render/dump*\n\n# C extensions\n*.so\n\n# Distribution / packaging\n.Python\nbuild/\ndevelop-eggs/\ndist/\ndownloads/\neggs/\n.eggs/\nlib/\nlib64/\nparts/\nsdist/\nvar/\nwheels/\npip-wheel-metadata/\nshare/python-wheels/\n*.egg-info/\n.installed.cfg\n*.egg\nMANIFEST\n\n# PyInstaller\n#  Usually these files are written by a python script from a template\n#  before PyInstaller builds the exe, so as to inject date/other infos into it.\n*.manifest\n*.spec\n\n# Installer logs\npip-log.txt\npip-delete-this-directory.txt\n\n# Unit test / coverage reports\nhtmlcov/\n.tox/\n.nox/\n.coverage\n.coverage.*\n.cache\nnosetests.xml\ncoverage.xml\n*.cover\n*.py,cover\n.hypothesis/\n.pytest_cache/\n\n# Translations\n*.mo\n*.pot\n\n# Django stuff:\n*.log\nlocal_settings.py\ndb.sqlite3\ndb.sqlite3-journal\n\n# Flask stuff:\ninstance/\n.webassets-cache\n\n# Scrapy stuff:\n.scrapy\n\n# Sphinx documentation\ndocs/_build/\n\n# PyBuilder\ntarget/\n\n# Jupyter Notebook\n.ipynb_checkpoints\n\n# IPython\nprofile_default/\nipython_config.py\n\n# pyenv\n.python-version\n\n# pipenv\n#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.\n#   However, in case of collaboration, if having platform-specific dependencies or dependencies\n#   having no cross-platform support, pipenv may install dependencies that don't work, or not\n#   install all needed dependencies.\n#Pipfile.lock\n\n# PEP 582; used by e.g. github.com/David-OConnor/pyflow\n__pypackages__/\n\n# Celery stuff\ncelerybeat-schedule\ncelerybeat.pid\n\n# SageMath parsed files\n*.sage.py\n\n# Environments\n.env\n.venv\nenv/\nvenv/\nENV/\nenv.bak/\nvenv.bak/\n\n# Spyder project settings\n.spyderproject\n.spyproject\n\n# Rope project settings\n.ropeproject\n\n# mkdocs documentation\n/site\n\n# mypy\n.mypy_cache/\n.dmypy.json\ndmypy.json\n\n# Pyre type checker\n.pyre/\n"
  },
  {
    "path": "OcCo_TF/Requirements_TF.txt",
    "content": "# Originally Designed for Docker Environment, TensorFlow 1.12.0/1.15.0, Python 3.7, CUDA 10.0\n\nlmdb>=0.9\nnumpy>=1.14.0\nh5py >= 2.10.0\nmsgpack==0.5.6\npyarrow>=0.10.0\nopen3d>=0.9.0.0\ntensorpack>=0.8.9\nmatplotlib>=2.1.0\ntensorflow==2.4.0\nopen3d-python==0.7.0.0\n"
  },
  {
    "path": "OcCo_TF/cls_models/__init__.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\n"
  },
  {
    "path": "OcCo_TF/cls_models/dgcnn_cls.py",
    "content": "# Author: Hanchen Wang (hw501@cam.ac.uk)\n# Ref: https://github.com/WangYueFt/dgcnn/blob/master/tensorflow/models/dgcnn.py\n\nimport sys, pdb, tensorflow as tf\nsys.path.append('../')\nfrom utils import tf_util\nfrom train_cls_dgcnn_torchloader import NUM_CLASSES, BATCH_SIZE, NUM_POINT\n\n\nclass Model:\n    def __init__(self, inputs, npts, labels, is_training, **kwargs):\n        self.__dict__.update(kwargs)  # have self.bn_decay\n        self.knn = 20\n        self.is_training = is_training\n        self.features = self.create_encoder(inputs)\n        self.pred = self.create_decoder(self.features)\n        self.loss = self.create_loss(self.pred, labels)\n\n    @staticmethod\n    def get_graph_feature(x, k):\n        \"\"\"Torch: get_graph_feature = TF: adj_matrix + nn_idx + edge_feature\"\"\"\n        adj_matrix = tf_util.pairwise_distance(x)\n        nn_idx = tf_util.knn(adj_matrix, k=k)\n        x = tf_util.get_edge_feature(x, nn_idx=nn_idx, k=k)\n        return x\n\n    def create_encoder(self, point_cloud):\n        point_cloud = tf.reshape(point_cloud, (BATCH_SIZE, NUM_POINT, 3))\n\n        ''' Previous Solution Author Provided '''\n        # point_cloud_transformed = point_cloud\n        # adj_matrix = tf_util.pairwise_distance(point_cloud_transformed)\n        # nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        # x = tf_util.get_edge_feature(point_cloud_transformed, nn_idx=nn_idx, k=self.knn)\n\n        x = self.get_graph_feature(point_cloud, self.knn)\n        x = tf_util.conv2d(x, 64, [1, 1],\n                           padding='VALID', stride=[1, 1],\n                           bn=True, bias=False, is_training=self.is_training,\n                           activation_fn=tf.nn.leaky_relu, scope='conv1', bn_decay=self.bn_decay)\n        x1 = tf.reduce_max(x, axis=-2, keep_dims=True)\n\n        x = self.get_graph_feature(x1, self.knn)\n        x = tf_util.conv2d(x, 64, [1, 1],\n                           padding='VALID', stride=[1, 1],\n                           bn=True, bias=False, is_training=self.is_training,\n                           activation_fn=tf.nn.leaky_relu, scope='conv2', bn_decay=self.bn_decay)\n        x2 = tf.reduce_max(x, axis=-2, keep_dims=True)\n\n        x = self.get_graph_feature(x2, self.knn)\n        x = tf_util.conv2d(x, 128, [1, 1],\n                           padding='VALID', stride=[1, 1],\n                           bn=True, bias=False, is_training=self.is_training,\n                           activation_fn=tf.nn.leaky_relu, scope='conv3', bn_decay=self.bn_decay)\n        x3 = tf.reduce_max(x, axis=-2, keep_dims=True)\n\n        x = self.get_graph_feature(x3, self.knn)\n        x = tf_util.conv2d(x, 256, [1, 1],\n                           padding='VALID', stride=[1, 1],\n                           bn=True, bias=False, is_training=self.is_training,\n                           activation_fn=tf.nn.leaky_relu, scope='conv4', bn_decay=self.bn_decay)\n        x4 = tf.reduce_max(x, axis=-2, keep_dims=True)\n\n        x = tf_util.conv2d(tf.concat([x1, x2, x3, x4], axis=-1), 1024, [1, 1],\n                           padding='VALID', stride=[1, 1],\n                           bn=True, bias=False, is_training=self.is_training,\n                           activation_fn=tf.nn.leaky_relu, scope='agg', bn_decay=self.bn_decay)\n\n        x1 = tf.reduce_max(x, axis=1, keep_dims=True)\n        x2 = tf.reduce_mean(x, axis=1, keep_dims=True)\n        # pdb.set_trace()\n        features = tf.reshape(tf.concat([x1, x2], axis=-1), [BATCH_SIZE, -1])\n        return features\n\n    def create_decoder(self, features):\n        \"\"\"fully connected layers for classification with dropout\"\"\"\n\n        with tf.variable_scope('decoder_cls', reuse=tf.AUTO_REUSE):\n            # self.linear1 = nn.Linear(args.emb_dims*2, 512, bias=False)\n            features = tf_util.fully_connected(features, 512, bn=True, bias=False,\n                                               activation_fn=tf.nn.leaky_relu,\n                                               scope='linear1', is_training=self.is_training)\n            features = tf_util.dropout(features, keep_prob=0.5, scope='dp1', is_training=self.is_training)\n\n            # self.linear2 = nn.Linear(512, 256)\n            features = tf_util.fully_connected(features, 256, bn=True, bias=True,\n                                               activation_fn=tf.nn.leaky_relu,\n                                               scope='linear2', is_training=self.is_training)\n            features = tf_util.dropout(features, keep_prob=0.5, scope='dp2', is_training=self.is_training)\n\n            # self.linear3 = nn.Linear(256, output_channels)\n            pred = tf_util.fully_connected(features, NUM_CLASSES, bn=False, bias=True,\n                                           activation_fn=None,\n                                           scope='linear3', is_training=self.is_training)\n        return pred\n\n    @staticmethod\n    def create_loss(pred, label, smoothing=True):\n        # if smoothing:\n        # \teps = 0.2\n        # \tn_class = pred.size(1)\n        #\n        # \tone_hot = torch.zeros_like(pred).scatter(1, gold.view(-1, 1), 1)\n        # \tone_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1)\n        # \tlog_prb = F.log_softmax(pred, dim=1)\n        #\n        # \tloss = -(one_hot * log_prb).sum(dim=1).mean()\n\n        if smoothing:\n            eps = 0.2\n            # pdb.set_trace()\n            one_hot = tf.one_hot(indices=label, depth=NUM_CLASSES)\n            # tf.print(one_hot, output_stream=sys.stderr)  # not working\n            # pdb.set_trace()\n            one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (NUM_CLASSES - 1)\n            log_prb = tf.nn.log_softmax(logits=pred, axis=1)\n            # pdb.set_trace()\n            cls_loss = -tf.reduce_mean(tf.reduce_sum(one_hot * log_prb, axis=1))\n        else:\n            loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=pred, labels=label)\n            cls_loss = tf.reduce_mean(loss)\n\n        tf.summary.scalar('classification loss', cls_loss)\n\n        return cls_loss\n\n\nif __name__ == '__main__':\n\n    batch_size, num_cls = 16, NUM_CLASSES\n    lr_clip, base_lr, lr_decay_steps, lr_decay_rate = 1e-6, 1e-4, 50000, .7\n\n    is_training_pl = tf.placeholder(tf.bool, shape=(), name='is_training')\n    global_step = tf.Variable(0, trainable=False, name='global_step')\n\n    inputs_pl = tf.placeholder(tf.float32, (1, None, 3), 'inputs')\n    npts_pl = tf.placeholder(tf.int32, (batch_size,), 'num_points')\n    labels_pl = tf.placeholder(tf.int32, (batch_size,), 'ground_truths')\n    learning_rate = tf.train.exponential_decay(base_lr, global_step, lr_decay_steps, lr_decay_rate,\n                                               staircase=True, name='lr')\n    learning_rate = tf.maximum(learning_rate, lr_clip)\n\n    model = Model(inputs_pl, npts_pl, labels_pl, is_training_pl)\n    trainer = tf.train.AdamOptimizer(learning_rate)\n    train_op = trainer.minimize(model.loss, global_step)\n\n    print('\\n\\n\\n==========')\n    print('pred', model.pred)\n    print('loss', model.loss)\n    # seems like different from the what the paper has claimed:\n    saver = tf.train.Saver()\n\n    config = tf.ConfigProto()\n    config.gpu_options.allow_growth = True\n    config.allow_soft_placement = True\n    config.log_device_placement = True\n    sess = tf.Session(config=config)\n\n    # Init Weights\n    init = tf.global_variables_initializer()\n    sess.run(init, {is_training_pl: True})  # restore will cover the random initialized parameters\n\n    for idx, var in enumerate(tf.trainable_variables()):\n        print(idx, var)\n"
  },
  {
    "path": "OcCo_TF/cls_models/pcn_cls.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport sys, tensorflow as tf\nsys.path.append('../')\nfrom utils.tf_util import mlp_conv, point_maxpool, point_unpool, fully_connected, dropout\nfrom train_cls import NUM_CLASSES\n# NUM_CLASSES = 40\n\n\nclass Model:\n    def __init__(self, inputs, npts, labels, is_training, **kwargs):\n        self.is_training = is_training\n        self.features = self.create_encoder(inputs, npts)\n        self.pred = self.create_decoder(self.features)\n        self.loss = self.create_loss(self.pred, labels)\n\n    def create_encoder(self, inputs, npts):\n        \"\"\"mini-PointNet encoder\"\"\"\n\n        with tf.variable_scope('encoder_0', reuse=tf.AUTO_REUSE):\n            features = mlp_conv(inputs, [128, 256])\n            features_global = point_unpool(point_maxpool(features, npts, keepdims=True), npts)\n            features = tf.concat([features, features_global], axis=2)\n        with tf.variable_scope('encoder_1', reuse=tf.AUTO_REUSE):\n            features = mlp_conv(features, [512, 1024])\n            features = point_maxpool(features, npts)\n        return features\n\n    def create_decoder(self, features):\n        \"\"\"fully connected layers for classification with dropout\"\"\"\n\n        with tf.variable_scope('decoder_cls', reuse=tf.AUTO_REUSE):\n\n            features = fully_connected(features, 512, bn=True, scope='fc1', is_training=self.is_training)\n            features = dropout(features, keep_prob=0.7, scope='dp1', is_training=self.is_training)\n            features = fully_connected(features, 256, bn=True, scope='fc2', is_training=self.is_training)\n            features = dropout(features, keep_prob=0.7, scope='dp2', is_training=self.is_training)\n            pred = fully_connected(features, NUM_CLASSES, activation_fn=None, scope='fc3',\n                                   is_training=self.is_training)\n\n        return pred\n\n    def create_loss(self, pred, label):\n        \"\"\" pred: B * NUM_CLASSES,\n            label: B, \"\"\"\n        loss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=pred, labels=label)\n        cls_loss = tf.reduce_mean(loss)\n        tf.summary.scalar('classification loss', cls_loss)\n\n        return cls_loss\n\n\nif __name__ == '__main__':\n\n    batch_size, num_cls = 16, NUM_CLASSES\n    lr_clip, base_lr, lr_decay_steps, lr_decay_rate = 1e-6, 1e-4, 50000, .7\n\n    is_training_pl = tf.placeholder(tf.bool, shape=(), name='is_training')\n    global_step = tf.Variable(0, trainable=False, name='global_step')\n\n    inputs_pl = tf.placeholder(tf.float32, (1, None, 3), 'inputs')\n    npts_pl = tf.placeholder(tf.int32, (batch_size,), 'num_points')\n    labels_pl = tf.placeholder(tf.int32, (batch_size,), 'ground_truths')\n    learning_rate = tf.train.exponential_decay(base_lr, global_step,\n                                               lr_decay_steps, lr_decay_rate,\n                                               staircase=True, name='lr')\n    learning_rate = tf.maximum(learning_rate, lr_clip)\n\n    # model_module = importlib.import_module('./pcn_cls', './')\n    model = Model(inputs_pl, npts_pl, labels_pl, is_training_pl)\n    trainer = tf.train.AdamOptimizer(learning_rate)\n    train_op = trainer.minimize(model.loss, global_step)\n\n    print('\\n\\n\\n==========')\n    print('pred', model.pred)\n    print('loss', model.loss)\n    # seems like different from the what the paper has claimed:\n    saver = tf.train.Saver()\n\n    config = tf.ConfigProto()\n    config.gpu_options.allow_growth = True\n    config.allow_soft_placement = True\n    config.log_device_placement = True\n    sess = tf.Session(config=config)\n\n    # Init variables\n    init = tf.global_variables_initializer()\n    sess.run(init, {is_training_pl: True})  # restore will cover the random initialized parameters\n\n    for idx, var in enumerate(tf.trainable_variables()):\n        print(idx, var)\n\n"
  },
  {
    "path": "OcCo_TF/cls_models/pointnet_cls.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport sys, os\nimport tensorflow as tf\nBASE_DIR = os.path.dirname(__file__)\nsys.path.append(BASE_DIR)\nsys.path.append(os.path.join(BASE_DIR, '../utils'))\nfrom utils.tf_util import fully_connected, dropout, conv2d, max_pool2d\nfrom train_cls import NUM_CLASSES, BATCH_SIZE, NUM_POINT\nfrom utils.transform_nets import input_transform_net, feature_transform_net\n\n\nclass Model:\n\tdef __init__(self, inputs, npts, labels, is_training, **kwargs):\n\t\tself.__dict__.update(kwargs)  # batch_decay and is_training\n\t\tself.is_training = is_training\n\t\tself.features = self.create_encoder(inputs, npts)\n\t\tself.pred = self.create_decoder(self.features)\n\t\tself.loss = self.create_loss(self.pred, labels)\n\n\tdef create_encoder(self, inputs, npts):\n\t\t\"\"\"PointNet encoder\"\"\"\n\t\t\n\t\tinputs = tf.reshape(inputs, (BATCH_SIZE, NUM_POINT, 3))\n\t\twith tf.variable_scope('transform_net1') as sc:\n\t\t\ttransform = input_transform_net(inputs, self.is_training, self.bn_decay, K=3)\n\t\t\n\t\tpoint_cloud_transformed = tf.matmul(inputs, transform)\n\t\tinput_image = tf.expand_dims(point_cloud_transformed, -1)\n\t\t\n\t\tnet = conv2d(inputs=input_image, num_output_channels=64, kernel_size=[1, 3],\n\t\t             scope='conv1', padding='VALID', stride=[1, 1],\n\t\t             bn=True, is_training=self.is_training, bn_decay=self.bn_decay)\n\t\tnet = conv2d(inputs=net, num_output_channels=64, kernel_size=[1, 1],\n\t\t             scope='conv2', padding='VALID', stride=[1, 1],\n\t\t             bn=True, is_training=self.is_training, bn_decay=self.bn_decay)\n\t\t\n\t\twith tf.variable_scope('transform_net2') as sc:\n\t\t\ttransform = feature_transform_net(net, self.is_training, self.bn_decay, K=64)\n\t\tnet_transformed = tf.matmul(tf.squeeze(net, axis=[2]), transform)\n\t\tnet_transformed = tf.expand_dims(net_transformed, [2])\n\t\t\n\t\t'''conv2d, with kernel size of [1,1,1,1] and stride of [1,1,1,1],\n\t\tbasically equals with the MLPs'''\n\t\t\n\t\t# use_xavier=True, stddev=1e-3, weight_decay=0.0, activation_fn=tf.nn.relu,\n\t\tnet = conv2d(net_transformed, 64, [1, 1],\n\t\t             scope='conv3', padding='VALID', stride=[1, 1],\n\t\t             bn=True, is_training=self.is_training, bn_decay=self.bn_decay)\n\t\tnet = conv2d(net, 128, [1, 1],\n\t\t             padding='VALID', stride=[1, 1],\n\t\t             bn=True, is_training=self.is_training,\n\t\t             scope='conv4', bn_decay=self.bn_decay)\n\t\tnet = conv2d(net, 1024, [1, 1],\n\t\t             padding='VALID', stride=[1, 1],\n\t\t             bn=True, is_training=self.is_training,\n\t\t             scope='conv5', bn_decay=self.bn_decay)\n\t\t\n\t\tnet = max_pool2d(net, [NUM_POINT, 1],\n\t\t                 padding='VALID', scope='maxpool')\n\t\t\n\t\tfeatures = tf.reshape(net, [BATCH_SIZE, -1])\n\t\treturn features\n\t\n\tdef create_decoder(self, features):\n\t\t\"\"\"fully connected layers for classification with dropout\"\"\"\n\t\t\n\t\twith tf.variable_scope('decoder_cls', reuse=tf.AUTO_REUSE):\n\t\t\t\n\t\t\tfeatures = fully_connected(features, 512, bn=True, scope='fc1', is_training=self.is_training)\n\t\t\tfeatures = dropout(features, keep_prob=0.7, scope='dp1', is_training=self.is_training)\n\t\t\tfeatures = fully_connected(features, 256, bn=True, scope='fc2', is_training=self.is_training)\n\t\t\tfeatures = dropout(features, keep_prob=0.7, scope='dp2', is_training=self.is_training)\n\t\t\tpred = fully_connected(features, NUM_CLASSES, activation_fn=None, scope='fc3',\n\t\t\t                       is_training=self.is_training)\n\t\t\n\t\treturn pred\n\t\n\tdef create_loss(self, pred, label):\n\t\t\"\"\" pred: B * NUM_CLASSES,\n\t\t\tlabel: B, \"\"\"\n\t\tloss = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=pred, labels=label)\n\t\tcls_loss = tf.reduce_mean(loss)\n\t\ttf.summary.scalar('classification loss', cls_loss)\n\t\t\n\t\treturn cls_loss\n\n\nif __name__ == '__main__':\n\t\n\tbatch_size, num_cls = BATCH_SIZE, NUM_CLASSES\n\tlr_clip, base_lr, lr_decay_steps, lr_decay_rate = 1e-6, 1e-4, 50000, .7\n\t\n\tis_training_pl = tf.placeholder(tf.bool, shape=(), name='is_training')\n\tglobal_step = tf.Variable(0, trainable=False, name='global_step')\n\t\n\tinputs_pl = tf.placeholder(tf.float32, (1, None, 3), 'inputs')\n\tnpts_pl = tf.placeholder(tf.int32, (batch_size,), 'num_points')\n\tlabels_pl = tf.placeholder(tf.int32, (batch_size,), 'ground_truths')\n\tlearning_rate = tf.train.exponential_decay(base_lr, global_step,\n\t                                           lr_decay_steps, lr_decay_rate,\n\t                                           staircase=True, name='lr')\n\tlearning_rate = tf.maximum(learning_rate, lr_clip)\n\t\n\t# model_module = importlib.import_module('./pcn_cls', './')\n\tmodel = Model(inputs_pl, npts_pl, labels_pl, is_training_pl)\n\ttrainer = tf.train.AdamOptimizer(learning_rate)\n\ttrain_op = trainer.minimize(model.loss, global_step)\n\t\n\tprint('\\n\\n\\n==========')\n\tprint('pred', model.pred)\n\tprint('loss', model.loss)\n\t# seems like different from the what the paper has claimed:\n\tsaver = tf.train.Saver()\n\t\n\tconfig = tf.ConfigProto()\n\tconfig.gpu_options.allow_growth = True\n\tconfig.allow_soft_placement = True\n\tconfig.log_device_placement = True\n\tsess = tf.Session(config=config)\n\t\n\t# Init variables\n\tinit = tf.global_variables_initializer()\n\tsess.run(init, {is_training_pl: True})  # restore will cover the random initialized parameters\n\t\n\tfor idx, var in enumerate(tf.trainable_variables()):\n\t\tprint(idx, var)\n\n"
  },
  {
    "path": "OcCo_TF/completion_models/__init__.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\n"
  },
  {
    "path": "OcCo_TF/completion_models/dgcnn_cd.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\n# author: Hanchen Wang\n\nimport os, sys, tensorflow as tf\nBASE_DIR = os.path.dirname(__file__)\nsys.path.append(BASE_DIR)\nsys.path.append('../')\nsys.path.append(os.path.join(BASE_DIR, '../utils'))\nfrom utils import tf_util\nfrom utils.transform_nets import input_transform_net_dgcnn\nfrom train_completion import BATCH_SIZE, NUM_POINT\n\n# BATCH_SIZE = 8  # otherwise set to 8\n# NUM_POINT = 2048  # 3000\n\n\nclass Model:\n    def __init__(self, inputs, npts, gt, alpha, **kwargs):\n        self.knn = 20\n        self.__dict__.update(kwargs)  # batch_decay and is_training\n        self.num_output_points = 16384  # 1024 * 16\n        self.num_coarse = 1024\n        self.grid_size = 4\n        self.grid_scale = 0.05\n        self.num_fine = self.grid_size ** 2 * self.num_coarse\n        self.features = self.create_encoder(inputs, npts)\n        self.coarse, self.fine = self.create_decoder(self.features)\n        self.loss, self.update = self.create_loss(gt, alpha)\n        self.outputs = self.fine\n        self.visualize_ops = [tf.split(inputs[0], npts, axis=0), self.coarse, self.fine, gt]\n        self.visualize_titles = ['input', 'coarse output', 'fine output', 'ground truth']\n    \n    def create_encoder(self, point_cloud, npts):\n\n        point_cloud = tf.reshape(point_cloud, (BATCH_SIZE, NUM_POINT, 3))\n\n        adj_matrix = tf_util.pairwise_distance(point_cloud)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(point_cloud, nn_idx=nn_idx, k=self.knn)\n\n        with tf.variable_scope('transform_net1') as sc:\n            transform = input_transform_net_dgcnn(edge_feature, self.is_training, self.bn_decay, K=3)\n\n        point_cloud_transformed = tf.matmul(point_cloud, transform)\n        adj_matrix = tf_util.pairwise_distance(point_cloud_transformed)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(point_cloud_transformed, nn_idx=nn_idx, k=self.knn)\n\n        net = tf_util.conv2d(edge_feature, 64, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='dgcnn1', bn_decay=self.bn_decay)\n        net = tf.reduce_max(net, axis=-2, keep_dims=True)\n        net1 = net\n\n        adj_matrix = tf_util.pairwise_distance(net)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(net, nn_idx=nn_idx, k=self.knn)\n\n        net = tf_util.conv2d(edge_feature, 64, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='dgcnn2', bn_decay=self.bn_decay)\n        net = tf.reduce_max(net, axis=-2, keep_dims=True)\n        net2 = net\n\n        adj_matrix = tf_util.pairwise_distance(net)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(net, nn_idx=nn_idx, k=self.knn)\n\n        net = tf_util.conv2d(edge_feature, 64, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='dgcnn3', bn_decay=self.bn_decay)\n        net = tf.reduce_max(net, axis=-2, keep_dims=True)\n        net3 = net\n\n        adj_matrix = tf_util.pairwise_distance(net)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(net, nn_idx=nn_idx, k=self.knn)\n\n        net = tf_util.conv2d(edge_feature, 128, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='dgcnn4', bn_decay=self.bn_decay)\n        net = tf.reduce_max(net, axis=-2, keep_dims=True)\n        net4 = net\n\n        net = tf_util.conv2d(tf.concat([net1, net2, net3, net4], axis=-1), 1024, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='agg', bn_decay=self.bn_decay)\n\n        net = tf.reduce_max(net, axis=1, keep_dims=True)\n\n        features = tf.reshape(net, [BATCH_SIZE, -1])\n        return features\n\n    def create_decoder(self, features):\n        with tf.variable_scope('decoder', reuse=tf.AUTO_REUSE):\n            coarse = tf_util.mlp(features, [1024, 1024, self.num_coarse * 3])\n            coarse = tf.reshape(coarse, [-1, self.num_coarse, 3])\n    \n        with tf.variable_scope('folding', reuse=tf.AUTO_REUSE):\n            grid = tf.meshgrid(tf.linspace(-0.05, 0.05, self.grid_size), tf.linspace(-0.05, 0.05, self.grid_size))\n            grid = tf.expand_dims(tf.reshape(tf.stack(grid, axis=2), [-1, 2]), 0)\n            grid_feat = tf.tile(grid, [features.shape[0], self.num_coarse, 1])\n        \n            point_feat = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            point_feat = tf.reshape(point_feat, [-1, self.num_fine, 3])\n        \n            global_feat = tf.tile(tf.expand_dims(features, 1), [1, self.num_fine, 1])\n        \n            feat = tf.concat([grid_feat, point_feat, global_feat], axis=2)\n        \n            center = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            center = tf.reshape(center, [-1, self.num_fine, 3])\n        \n            fine = tf_util.mlp_conv(feat, [512, 512, 3]) + center\n        return coarse, fine\n\n    def create_loss(self, gt, alpha):\n    \n        loss_coarse = tf_util.chamfer(self.coarse, gt)\n        tf_util.add_train_summary('train/coarse_loss', loss_coarse)\n        update_coarse = tf_util.add_valid_summary('valid/coarse_loss', loss_coarse)\n    \n        loss_fine = tf_util.chamfer(self.fine, gt)\n        tf_util.add_train_summary('train/fine_loss', loss_fine)\n        update_fine = tf_util.add_valid_summary('valid/fine_loss', loss_fine)\n    \n        loss = loss_coarse + alpha * loss_fine\n        tf_util.add_train_summary('train/loss', loss)\n        update_loss = tf_util.add_valid_summary('valid/loss', loss)\n    \n        return loss, [update_coarse, update_fine, update_loss]\n"
  },
  {
    "path": "OcCo_TF/completion_models/dgcnn_emd.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\n# author: Hanchen Wang\n\nimport os, sys, tensorflow as tf\nBASE_DIR = os.path.dirname(__file__)\nsys.path.append(BASE_DIR)\nsys.path.append('../')\nsys.path.append(os.path.join(BASE_DIR, '../utils'))\nfrom utils import tf_util\nfrom utils.transform_nets import input_transform_net_dgcnn\nfrom train_completion import BATCH_SIZE, NUM_POINT\n\n# BATCH_SIZE = 8  # otherwise set to 8\n# NUM_POINT = 2048  # 3000\n\n\nclass Model:\n    def __init__(self, inputs, npts, gt, alpha, **kwargs):\n        self.knn = 20\n        self.__dict__.update(kwargs)  # batch_decay and is_training\n        self.num_output_points = 16384  # 1024 * 16\n        self.num_coarse = 1024\n        self.grid_size = 4\n        self.grid_scale = 0.05\n        self.num_fine = self.grid_size ** 2 * self.num_coarse\n        self.features = self.create_encoder(inputs, npts)\n        self.coarse, self.fine = self.create_decoder(self.features)\n        self.loss, self.update = self.create_loss(gt, alpha)\n        self.outputs = self.fine\n        self.visualize_ops = [tf.split(inputs[0], npts, axis=0), self.coarse, self.fine, gt]\n        self.visualize_titles = ['input', 'coarse output', 'fine output', 'ground truth']\n    \n    def create_encoder(self, point_cloud, npts):\n\n        point_cloud = tf.reshape(point_cloud, (BATCH_SIZE, NUM_POINT, 3))\n\n        adj_matrix = tf_util.pairwise_distance(point_cloud)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(point_cloud, nn_idx=nn_idx, k=self.knn)\n\n        with tf.variable_scope('transform_net1') as sc:\n            transform = input_transform_net_dgcnn(edge_feature, self.is_training, self.bn_decay, K=3)\n\n        point_cloud_transformed = tf.matmul(point_cloud, transform)\n        adj_matrix = tf_util.pairwise_distance(point_cloud_transformed)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(point_cloud_transformed, nn_idx=nn_idx, k=self.knn)\n\n        net = tf_util.conv2d(edge_feature, 64, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='dgcnn1', bn_decay=self.bn_decay)\n        net = tf.reduce_max(net, axis=-2, keep_dims=True)\n        net1 = net\n\n        adj_matrix = tf_util.pairwise_distance(net)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(net, nn_idx=nn_idx, k=self.knn)\n\n        net = tf_util.conv2d(edge_feature, 64, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='dgcnn2', bn_decay=self.bn_decay)\n        net = tf.reduce_max(net, axis=-2, keep_dims=True)\n        net2 = net\n\n        adj_matrix = tf_util.pairwise_distance(net)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(net, nn_idx=nn_idx, k=self.knn)\n\n        net = tf_util.conv2d(edge_feature, 64, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='dgcnn3', bn_decay=self.bn_decay)\n        net = tf.reduce_max(net, axis=-2, keep_dims=True)\n        net3 = net\n\n        adj_matrix = tf_util.pairwise_distance(net)\n        nn_idx = tf_util.knn(adj_matrix, k=self.knn)\n        edge_feature = tf_util.get_edge_feature(net, nn_idx=nn_idx, k=self.knn)\n\n        net = tf_util.conv2d(edge_feature, 128, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='dgcnn4', bn_decay=self.bn_decay)\n        net = tf.reduce_max(net, axis=-2, keep_dims=True)\n        net4 = net\n\n        net = tf_util.conv2d(tf.concat([net1, net2, net3, net4], axis=-1), 1024, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='agg', bn_decay=self.bn_decay)\n\n        net = tf.reduce_max(net, axis=1, keep_dims=True)\n\n        features = tf.reshape(net, [BATCH_SIZE, -1])\n        return features\n\n    def create_decoder(self, features):\n        with tf.variable_scope('decoder', reuse=tf.AUTO_REUSE):\n            coarse = tf_util.mlp(features, [1024, 1024, self.num_coarse * 3])\n            coarse = tf.reshape(coarse, [-1, self.num_coarse, 3])\n    \n        with tf.variable_scope('folding', reuse=tf.AUTO_REUSE):\n            grid = tf.meshgrid(tf.linspace(-0.05, 0.05, self.grid_size), tf.linspace(-0.05, 0.05, self.grid_size))\n            grid = tf.expand_dims(tf.reshape(tf.stack(grid, axis=2), [-1, 2]), 0)\n            grid_feat = tf.tile(grid, [features.shape[0], self.num_coarse, 1])\n        \n            point_feat = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            point_feat = tf.reshape(point_feat, [-1, self.num_fine, 3])\n        \n            global_feat = tf.tile(tf.expand_dims(features, 1), [1, self.num_fine, 1])\n        \n            feat = tf.concat([grid_feat, point_feat, global_feat], axis=2)\n        \n            center = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            center = tf.reshape(center, [-1, self.num_fine, 3])\n        \n            fine = tf_util.mlp_conv(feat, [512, 512, 3]) + center\n        return coarse, fine\n\n    def create_loss(self, gt, alpha):\n    \n        gt_ds = gt[:, :self.coarse.shape[1], :]\n        loss_coarse = tf_util.earth_mover(self.coarse, gt_ds)\n        tf_util.add_train_summary('train/coarse_loss', loss_coarse)\n        update_coarse = tf_util.add_valid_summary('valid/coarse_loss', loss_coarse)\n    \n        loss_fine = tf_util.chamfer(self.fine, gt)\n        tf_util.add_train_summary('train/fine_loss', loss_fine)\n        update_fine = tf_util.add_valid_summary('valid/fine_loss', loss_fine)\n    \n        loss = loss_coarse + alpha * loss_fine\n        tf_util.add_train_summary('train/loss', loss)\n        update_loss = tf_util.add_valid_summary('valid/loss', loss)\n    \n        return loss, [update_coarse, update_fine, update_loss]\n"
  },
  {
    "path": "OcCo_TF/completion_models/pcn_cd.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/wentaoyuan/pcn/blob/master/models/pcn_cd.py\n\nimport pdb, tensorflow as tf\nfrom utils.tf_util import mlp, mlp_conv, point_maxpool, point_unpool, chamfer, \\\n    add_train_summary, add_valid_summary\n\n\nclass Model:\n    def __init__(self, inputs, npts, gt, alpha, **kwargs):\n        self.__dict__.update(kwargs)  # batch_decay and is_training\n        self.num_coarse = 1024\n        self.grid_size = 4\n        self.grid_scale = 0.05\n        self.num_fine = self.grid_size ** 2 * self.num_coarse\n        self.features = self.create_encoder(inputs, npts)\n        self.coarse, self.fine = self.create_decoder(self.features)\n        self.loss, self.update = self.create_loss(self.coarse, self.fine, gt, alpha)\n        self.outputs = self.fine\n        self.visualize_ops = [tf.split(inputs[0], npts, axis=0), self.coarse, self.fine, gt]\n        self.visualize_titles = ['input', 'coarse output', 'fine output', 'ground truth']\n\n    def create_encoder(self, inputs, npts):\n        with tf.variable_scope('encoder_0', reuse=tf.AUTO_REUSE):\n            features = mlp_conv(inputs, [128, 256])\n            features_global = point_unpool(point_maxpool(features, npts, keepdims=True), npts)\n            features = tf.concat([features, features_global], axis=2)\n        with tf.variable_scope('encoder_1', reuse=tf.AUTO_REUSE):\n            features = mlp_conv(features, [512, 1024])\n            features = point_maxpool(features, npts)\n        return features\n\n    def create_decoder(self, features):\n        with tf.variable_scope('decoder', reuse=tf.AUTO_REUSE):\n            coarse = mlp(features, [1024, 1024, self.num_coarse * 3])\n            coarse = tf.reshape(coarse, [-1, self.num_coarse, 3])\n\n        with tf.variable_scope('folding', reuse=tf.AUTO_REUSE):\n            grid = tf.meshgrid(tf.linspace(-0.05, 0.05, self.grid_size), tf.linspace(-0.05, 0.05, self.grid_size))\n            grid = tf.expand_dims(tf.reshape(tf.stack(grid, axis=2), [-1, 2]), 0)\n            grid_feat = tf.tile(grid, [features.shape[0], self.num_coarse, 1])\n\n            point_feat = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            point_feat = tf.reshape(point_feat, [-1, self.num_fine, 3])\n\n            global_feat = tf.tile(tf.expand_dims(features, 1), [1, self.num_fine, 1])\n\n            feat = tf.concat([grid_feat, point_feat, global_feat], axis=2)\n\n            center = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            center = tf.reshape(center, [-1, self.num_fine, 3])\n\n            fine = mlp_conv(feat, [512, 512, 3]) + center\n        return coarse, fine\n\n    def create_loss(self, coarse, fine, gt, alpha):\n\n        # print('coarse shape:', coarse.shape)\n        # print('fine shape:', fine.shape)\n        # print('gt shape:', gt.shape)\n\n        loss_coarse = chamfer(coarse, gt)\n        add_train_summary('train/coarse_loss', loss_coarse)\n        update_coarse = add_valid_summary('valid/coarse_loss', loss_coarse)\n\n        loss_fine = chamfer(fine, gt)\n        add_train_summary('train/fine_loss', loss_fine)\n        update_fine = add_valid_summary('valid/fine_loss', loss_fine)\n\n        loss = loss_coarse + alpha * loss_fine\n        add_train_summary('train/loss', loss)\n        update_loss = add_valid_summary('valid/loss', loss)\n\n        return loss, [update_coarse, update_fine, update_loss]\n"
  },
  {
    "path": "OcCo_TF/completion_models/pcn_emd.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\n#  Author: Wentao Yuan (wyuan1@cs.cmu.edu) 05/31/2018\n\nimport tensorflow as tf\nfrom utils.tf_util import mlp_conv, point_maxpool, point_unpool, mlp, add_train_summary, \\\n    add_valid_summary, earth_mover, chamfer\n\n\nclass Model:\n    def __init__(self, inputs, npts, gt, alpha, **kwargs):\n        self.num_coarse = 1024\n        self.grid_size = 4\n        self.grid_scale = 0.05\n        self.num_fine = self.grid_size ** 2 * self.num_coarse\n        self.features = self.create_encoder(inputs, npts)\n        self.coarse, self.fine = self.create_decoder(self.features)\n        self.loss, self.update = self.create_loss(self.coarse, self.fine, gt, alpha)\n        self.outputs = self.fine\n        self.visualize_ops = [tf.split(inputs[0], npts, axis=0), self.coarse, self.fine, gt]\n        self.visualize_titles = ['input', 'coarse output', 'fine output', 'ground truth']\n\n    def create_encoder(self, inputs, npts):\n        with tf.variable_scope('encoder_0', reuse=tf.AUTO_REUSE):\n            features = mlp_conv(inputs, [128, 256])\n            features_global = point_unpool(point_maxpool(features, npts, keepdims=True), npts)\n            features = tf.concat([features, features_global], axis=2)\n        with tf.variable_scope('encoder_1', reuse=tf.AUTO_REUSE):\n            features = mlp_conv(features, [512, 1024])\n            features = point_maxpool(features, npts)\n        return features\n\n    def create_decoder(self, features):\n        with tf.variable_scope('decoder', reuse=tf.AUTO_REUSE):\n            coarse = mlp(features, [1024, 1024, self.num_coarse * 3])\n            coarse = tf.reshape(coarse, [-1, self.num_coarse, 3])\n\n        with tf.variable_scope('folding', reuse=tf.AUTO_REUSE):\n            x = tf.linspace(-self.grid_scale, self.grid_scale, self.grid_size)\n            y = tf.linspace(-self.grid_scale, self.grid_scale, self.grid_size)\n            grid = tf.meshgrid(x, y)\n            grid = tf.expand_dims(tf.reshape(tf.stack(grid, axis=2), [-1, 2]), 0)\n            grid_feat = tf.tile(grid, [features.shape[0], self.num_coarse, 1])\n\n            point_feat = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            point_feat = tf.reshape(point_feat, [-1, self.num_fine, 3])\n\n            global_feat = tf.tile(tf.expand_dims(features, 1), [1, self.num_fine, 1])\n\n            feat = tf.concat([grid_feat, point_feat, global_feat], axis=2)\n\n            center = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            center = tf.reshape(center, [-1, self.num_fine, 3])\n\n            fine = mlp_conv(feat, [512, 512, 3]) + center\n        return coarse, fine\n\n    def create_loss(self, coarse, fine, gt, alpha):\n        \n        gt_ds = gt[:, :coarse.shape[1], :]\n        \n        loss_coarse = earth_mover(coarse, gt_ds)\n        add_train_summary('train/coarse_loss', loss_coarse)\n        update_coarse = add_valid_summary('valid/coarse_loss', loss_coarse)\n\n        loss_fine = chamfer(fine, gt)\n        add_train_summary('train/fine_loss', loss_fine)\n        update_fine = add_valid_summary('valid/fine_loss', loss_fine)\n\n        loss = loss_coarse + alpha * loss_fine\n        add_train_summary('train/loss', loss)\n        update_loss = add_valid_summary('valid/loss', loss)\n\n        return loss, [update_coarse, update_fine, update_loss]\n"
  },
  {
    "path": "OcCo_TF/completion_models/pointnet_cd.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport os, sys, tensorflow as tf\nBASE_DIR = os.path.dirname(__file__)\nsys.path.append(BASE_DIR)\nsys.path.append(os.path.join(BASE_DIR, '../utils'))\nsys.path.append('../')\nfrom utils.tf_util import conv2d, mlp, mlp_conv, chamfer, add_valid_summary, add_train_summary, max_pool2d\nfrom utils.transform_nets import input_transform_net, feature_transform_net\nfrom train_completion import BATCH_SIZE, NUM_POINT\n\n\nclass Model:\n    def __init__(self, inputs, npts, gt, alpha, **kwargs):\n        self.__dict__.update(kwargs)  # batch_decay and is_training\n        self.num_output_points = 16384  # 1024 * 16\n        self.num_coarse = 1024\n        self.grid_size = 4\n        self.grid_scale = 0.05\n        self.num_fine = self.grid_size ** 2 * self.num_coarse\n        self.features = self.create_encoder(inputs, npts)\n        self.coarse, self.fine = self.create_decoder(self.features)\n        self.loss, self.update = self.create_loss(gt, alpha)\n        self.outputs = self.fine\n        self.visualize_ops = [tf.split(inputs[0], npts, axis=0), self.coarse, self.fine, gt]\n        self.visualize_titles = ['input', 'coarse output', 'fine output', 'ground truth']\n\n    def create_encoder(self, inputs, npts):\n        # with tf.variable_scope('encoder_0', reuse=tf.AUTO_REUSE):\n        #     features = mlp_conv(inputs, [128, 256])\n        #     features_global = tf.reduce_max(features, axis=1, keep_dims=True, name='maxpool_0')\n        #     features = tf.concat([features, tf.tile(features_global, [1, tf.shape(inputs)[1], 1])], axis=2)\n        # with tf.variable_scope('encoder_1', reuse=tf.AUTO_REUSE):\n        #     features = mlp_conv(features, [512, 1024])\n        #     features = tf.reduce_max(features, axis=1, name='maxpool_1')\n        # end_points = {}\n\n        # if DATASET =='modelnet40':\n        inputs = tf.reshape(inputs, (BATCH_SIZE, NUM_POINT, 3))\n\n        with tf.variable_scope('transform_net1') as sc:\n            transform = input_transform_net(inputs, self.is_training, self.bn_decay, K=3)\n\n        point_cloud_transformed = tf.matmul(inputs, transform)\n        input_image = tf.expand_dims(point_cloud_transformed, -1)\n\n        net = conv2d(inputs=input_image, num_output_channels=64, kernel_size=[1, 3],\n                     scope='conv1', padding='VALID', stride=[1, 1],\n                     bn=True, is_training=self.is_training, bn_decay=self.bn_decay)\n        net = conv2d(inputs=net, num_output_channels=64, kernel_size=[1, 1],\n                     scope='conv2', padding='VALID', stride=[1, 1],\n                     bn=True, is_training=self.is_training, bn_decay=self.bn_decay)\n\n        with tf.variable_scope('transform_net2') as sc:\n            transform = feature_transform_net(net, self.is_training, self.bn_decay, K=64)\n        # end_points['transform'] = transform\n        net_transformed = tf.matmul(tf.squeeze(net, axis=[2]), transform)\n        net_transformed = tf.expand_dims(net_transformed, [2])\n\n        '''conv2d, with kernel size of [1,1,1,1] and stride of [1,1,1,1],\n        basically equals with the MLPs'''\n\n        # use_xavier=True, stddev=1e-3, weight_decay=0.0, activation_fn=tf.nn.relu,\n        net = conv2d(net_transformed, 64, [1, 1],\n                     scope='conv3', padding='VALID', stride=[1, 1],\n                     bn=True, is_training=self.is_training, bn_decay=self.bn_decay)\n        net = conv2d(net, 128, [1, 1],\n                     padding='VALID', stride=[1, 1],\n                     bn=True, is_training=self.is_training,\n                     scope='conv4', bn_decay=self.bn_decay)\n        net = conv2d(net, 1024, [1, 1],\n                     padding='VALID', stride=[1, 1],\n                     bn=True, is_training=self.is_training,\n                     scope='conv5', bn_decay=self.bn_decay)\n\n        net = max_pool2d(net, [NUM_POINT, 1],\n                         padding='VALID', scope='maxpool')\n\n        features = tf.reshape(net, [BATCH_SIZE, -1])\n        return features\n\n    def create_decoder(self, features):\n        with tf.variable_scope('decoder', reuse=tf.AUTO_REUSE):\n            coarse = mlp(features, [1024, 1024, self.num_coarse * 3])\n            coarse = tf.reshape(coarse, [-1, self.num_coarse, 3])\n\n        with tf.variable_scope('folding', reuse=tf.AUTO_REUSE):\n            grid = tf.meshgrid(tf.linspace(-0.05, 0.05, self.grid_size),\n                               tf.linspace(-0.05, 0.05, self.grid_size))\n            grid = tf.expand_dims(tf.reshape(tf.stack(grid, axis=2), [-1, 2]), 0)\n            grid_feat = tf.tile(grid, [features.shape[0], self.num_coarse, 1])\n\n            point_feat = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            point_feat = tf.reshape(point_feat, [-1, self.num_fine, 3])\n\n            global_feat = tf.tile(tf.expand_dims(features, 1), [1, self.num_fine, 1])\n\n            feat = tf.concat([grid_feat, point_feat, global_feat], axis=2)\n\n            center = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            center = tf.reshape(center, [-1, self.num_fine, 3])\n\n            fine = mlp_conv(feat, [512, 512, 3]) + center\n        return coarse, fine\n\n    def create_loss(self, gt, alpha):\n\n        loss_coarse = chamfer(self.coarse, gt)\n        add_train_summary('train/coarse_loss', loss_coarse)\n        update_coarse = add_valid_summary('valid/coarse_loss', loss_coarse)\n\n        loss_fine = chamfer(self.fine, gt)\n        add_train_summary('train/fine_loss', loss_fine)\n        update_fine = add_valid_summary('valid/fine_loss', loss_fine)\n\n        loss = loss_coarse + alpha * loss_fine\n        add_train_summary('train/loss', loss)\n        update_loss = add_valid_summary('valid/loss', loss)\n\n        return loss, [update_coarse, update_fine, update_loss]\n"
  },
  {
    "path": "OcCo_TF/completion_models/pointnet_emd.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport os, sys, tensorflow as tf\nBASE_DIR = os.path.dirname(__file__)\nsys.path.append(BASE_DIR)\nsys.path.append(os.path.join(BASE_DIR, '../utils'))\nsys.path.append('../')\nfrom utils import tf_util\nfrom utils.transform_nets import input_transform_net, feature_transform_net\nfrom train_completion import BATCH_SIZE, NUM_POINT\n\n# BATCH_SIZE = 8  # otherwise set to 8\n# NUM_POINT = 2048  # 3000\n\n\nclass Model:\n    def __init__(self, inputs, npts, gt, alpha, **kwargs):\n        self.__dict__.update(kwargs)  # batch_decay and is_training\n        self.num_output_points = 16384  # 1024 * 16\n        self.num_coarse = 1024\n        self.grid_size = 4\n        self.grid_scale = 0.05\n        self.num_fine = self.grid_size ** 2 * self.num_coarse\n        self.features = self.create_encoder(inputs, npts)\n        self.coarse, self.fine = self.create_decoder(self.features)\n        self.loss, self.update = self.create_loss(gt, alpha)\n        self.outputs = self.fine\n        self.visualize_ops = [tf.split(inputs[0], npts, axis=0), self.coarse, self.fine, gt]\n        self.visualize_titles = ['input', 'coarse output', 'fine output', 'ground truth']\n    \n    def create_encoder(self, inputs, npts):\n        # with tf.variable_scope('encoder_0', reuse=tf.AUTO_REUSE):\n        #     features = mlp_conv(inputs, [128, 256])\n        #     features_global = tf.reduce_max(features, axis=1, keep_dims=True, name='maxpool_0')\n        #     features = tf.concat([features, tf.tile(features_global, [1, tf.shape(inputs)[1], 1])], axis=2)\n        # with tf.variable_scope('encoder_1', reuse=tf.AUTO_REUSE):\n        #     features = mlp_conv(features, [512, 1024])\n        #     features = tf.reduce_max(features, axis=1, name='maxpool_1')\n        # end_points = {}\n        \n        inputs = tf.reshape(inputs, (BATCH_SIZE, NUM_POINT, 3))\n        with tf.variable_scope('transform_net1') as sc:\n            transform = input_transform_net(inputs, self.is_training, self.bn_decay, K=3)\n\n        point_cloud_transformed = tf.matmul(inputs, transform)\n        input_image = tf.expand_dims(point_cloud_transformed, -1)\n\n        net = tf_util.conv2d(inputs=input_image, num_output_channels=64, kernel_size=[1, 3],\n                             scope='conv1', padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training, bn_decay=self.bn_decay)\n        net = tf_util.conv2d(inputs=net, num_output_channels=64, kernel_size=[1, 1],\n                             scope='conv2', padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training, bn_decay=self.bn_decay)\n\n        with tf.variable_scope('transform_net2') as sc:\n            transform = feature_transform_net(net, self.is_training, self.bn_decay, K=64)\n        # end_points['transform'] = transform\n        net_transformed = tf.matmul(tf.squeeze(net, axis=[2]), transform)\n        net_transformed = tf.expand_dims(net_transformed, [2])\n\n        '''conv2d, with kernel size of [1,1,1,1] and stride of [1,1,1,1], \n        basically equals with the MLPs'''\n        \n        # use_xavier=True, stddev=1e-3, weight_decay=0.0, activation_fn=tf.nn.relu,\n        net = tf_util.conv2d(net_transformed, 64, [1, 1],\n                             scope='conv3', padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training, bn_decay=self.bn_decay)\n        net = tf_util.conv2d(net, 128, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='conv4', bn_decay=self.bn_decay)\n        net = tf_util.conv2d(net, 1024, [1, 1],\n                             padding='VALID', stride=[1, 1],\n                             bn=True, is_training=self.is_training,\n                             scope='conv5', bn_decay=self.bn_decay)\n\n        net = tf_util.max_pool2d(net, [NUM_POINT, 1],\n                                 padding='VALID', scope='maxpool')\n\n        features = tf.reshape(net, [BATCH_SIZE, -1])\n        return features\n\n    def create_decoder(self, features):\n        with tf.variable_scope('decoder', reuse=tf.AUTO_REUSE):\n            coarse = tf_util.mlp(features, [1024, 1024, self.num_coarse * 3])\n            coarse = tf.reshape(coarse, [-1, self.num_coarse, 3])\n    \n        with tf.variable_scope('folding', reuse=tf.AUTO_REUSE):\n            grid = tf.meshgrid(tf.linspace(-0.05, 0.05, self.grid_size), tf.linspace(-0.05, 0.05, self.grid_size))\n            grid = tf.expand_dims(tf.reshape(tf.stack(grid, axis=2), [-1, 2]), 0)\n            grid_feat = tf.tile(grid, [features.shape[0], self.num_coarse, 1])\n        \n            point_feat = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            point_feat = tf.reshape(point_feat, [-1, self.num_fine, 3])\n        \n            global_feat = tf.tile(tf.expand_dims(features, 1), [1, self.num_fine, 1])\n        \n            feat = tf.concat([grid_feat, point_feat, global_feat], axis=2)\n        \n            center = tf.tile(tf.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n            center = tf.reshape(center, [-1, self.num_fine, 3])\n        \n            fine = tf_util.mlp_conv(feat, [512, 512, 3]) + center\n        return coarse, fine\n\n    def create_loss(self, gt, alpha):\n         \n        gt_ds = gt[:, :self.coarse.shape[1], :]\n        loss_coarse = tf_util.earth_mover(self.coarse, gt_ds)\n        # loss_coarse = earth_mover(coarse, gt_ds)\n        tf_util.add_train_summary('train/coarse_loss', loss_coarse)\n        update_coarse = tf_util.add_valid_summary('valid/coarse_loss', loss_coarse)\n    \n        loss_fine = tf_util.chamfer(self.fine, gt)\n        tf_util.add_train_summary('train/fine_loss', loss_fine)\n        update_fine = tf_util.add_valid_summary('valid/fine_loss', loss_fine)\n    \n        loss = loss_coarse + alpha * loss_fine\n        tf_util.add_train_summary('train/loss', loss)\n        update_loss = tf_util.add_valid_summary('valid/loss', loss)\n    \n        return loss, [update_coarse, update_fine, update_loss]\n"
  },
  {
    "path": "OcCo_TF/docker/.dockerignore",
    "content": "../data/\n../log/\n"
  },
  {
    "path": "OcCo_TF/docker/Dockerfile_TF",
    "content": "FROM tensorflow/tensorflow:1.12.0-gpu-py3\n\nWORKDIR /workspace/OcCo_TF\nRUN mkdir /home/hcw\nRUN chmod -R 777 /home/hcw\nRUN chmod 777 /usr/bin\nRUN chmod 777 /bin\nRUN chmod 777 /usr/local/\nRUN apt-get -y update\nRUN apt-get -y install vim screen libgl1-mesa-glx\nCOPY ./Requirements_TF.txt /workspace/OcCo_TF\nRUN pip install -r ../Requirements_TF.txt\nCOPY ./pc_distance /workspace/OcCo_TF/pc_distance\n# RUN apt-key adv --fetch-keys http://developer.download.nvidia.com/compute/cuda/repos/ubuntu1604/x86_64/7fa2af80.pub\n# RUN apt-get install wget\n# RUN wget http://developer.download.nvidia.com/compute/cuda/repos/ubuntu1604/x86_64/cuda-repo-ubuntu1604_9.1.85-1_amd64.deb\n# RUN yes|apt -y install ./cuda-repo-ubuntu1604_9.1.85-1_amd64.deb\n# RUN wget http://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1604/x86_64/nvidia-machine-learning-repo-ubuntu1604_1.0.0-1_amd64.deb\n# RUN apt -y install ./nvidia-machine-learning-repo-ubuntu1604_1.0.0-1_amd64.deb\n\n# RUN apt-get update\n# Install the NVIDIA driver\n# Issue with driver install requires creating /usr/lib/nvidia\n# RUN mkdir /usr/lib/nvidia\n# RUN apt-get -y -o Dpkg::Options::=\"--force-overwrite\" install --no-install-recommends nvidia-410\n# Reboot. Check that GPUs are visible using the command: nvidia-smi\n\n# Install CUDA and tools. Include optional NCCL 2.x\n# RUN apt install -y --allow-downgrades cuda9.0 cuda-cublas-9-0 cuda-cufft-9-0 cuda-curand-9-0 \\\n#    cuda-cusolver-9-0 cuda-cusparse-9-0 libcudnn7=7.2.1.38-1+cuda9.0 \\\n#    libnccl2=2.2.13-1+cuda9.0 cuda-command-line-tools-9-0\n\n# Optional: Install the TensorRT runtime (must be after CUDA install)\n# RUN apt update\n# RUN apt -y install libnvinfer4=4.1.2-1+cuda9.0\nWORKDIR /workspace/OcCo_TF/pc_distance\nRUN make\nRUN chmod -R 777  /workspace/OcCo_TF/pc_distance\n# RUN ln -s /usr/local/cuda/lib64/libcudart.so.10.0 /usr/local/cuda/lib64/libcudart.so.9.0\nRUN ln -s /usr/local/lib/python3.5/dist-packages/tensorflow/libtensorflow_framework.so /usr/local/lib/python3.5/dist-packages/tensorflow/libtensorflow_framework.so.1\nRUN mkdir -p /usr/local/nvidia/lib\nRUN cp /usr/local/lib/python3.5/dist-packages/tensorflow/libtensorflow_framework.so /usr/local/nvidia/lib/libtensorflow_framework.so.1\n\n\nRUN useradd hcw\nUSER hcw\nWORKDIR /workspace/OcCo_TF\n"
  },
  {
    "path": "OcCo_TF/pc_distance/__init__.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n"
  },
  {
    "path": "OcCo_TF/pc_distance/makefile",
    "content": "cuda_inc = /usr/local/cuda-9.0/include/\ncuda_lib = /usr/local/cuda-9.0/lib64/\nnvcc = /usr/local/cuda-9.0/bin/nvcc\ntf_inc = /usr/local/lib/python3.5/dist-packages/tensorflow/include\ntf_lib = /usr/local/lib/python3.5/dist-packages/tensorflow\n\nall: tf_nndistance_so.so tf_approxmatch_so.so\n\ntf_nndistance.cu.o: tf_nndistance.cu\n\t$(nvcc) tf_nndistance.cu -o tf_nndistance.cu.o -c -O2 -DGOOGLE_CUDA=1 -x cu -Xcompiler -fPIC\n\ntf_nndistance_so.so: tf_nndistance.cpp tf_nndistance.cu.o\n\tg++ tf_nndistance.cpp tf_nndistance.cu.o -o tf_nndistance_so.so \\\n\t-I $(cuda_inc) -I $(tf_inc) -L $(cuda_lib) -lcudart -L $(tf_lib) -ltensorflow_framework \\\n    -shared -D_GLIBCXX_USE_CXX11_ABI=0 -std=c++11 -fPIC -O2\n\ntf_approxmatch.cu.o: tf_approxmatch.cu\n\t$(nvcc) tf_approxmatch.cu -o tf_approxmatch.cu.o -c -O2 -DGOOGLE_CUDA=1 -x cu -Xcompiler -fPIC\n\ntf_approxmatch_so.so: tf_approxmatch.cpp tf_approxmatch.cu.o\n\tg++ -shared $(CPPFLAGS) tf_approxmatch.cpp tf_approxmatch.cu.o -o tf_approxmatch_so.so \\\n\t-I $(cuda_inc) -I $(tf_inc) -L $(cuda_lib) -lcudart -L $(tf_lib) -ltensorflow_framework \\\n    -shared -D_GLIBCXX_USE_CXX11_ABI=0 -std=c++11 -fPIC -O2\n\nclean:\n\trm -rf *.o *.so\n"
  },
  {
    "path": "OcCo_TF/pc_distance/tf_approxmatch.cpp",
    "content": "#include \"tensorflow/core/framework/op.h\"\n#include \"tensorflow/core/framework/op_kernel.h\"\n#include <algorithm>\n#include <vector>\n#include <math.h>\nusing namespace tensorflow;\nREGISTER_OP(\"ApproxMatch\")\n\t.Input(\"xyz1: float32\")\n\t.Input(\"xyz2: float32\")\n\t.Output(\"match: float32\");\nREGISTER_OP(\"MatchCost\")\n\t.Input(\"xyz1: float32\")\n\t.Input(\"xyz2: float32\")\n\t.Input(\"match: float32\")\n\t.Output(\"cost: float32\");\nREGISTER_OP(\"MatchCostGrad\")\n\t.Input(\"xyz1: float32\")\n\t.Input(\"xyz2: float32\")\n\t.Input(\"match: float32\")\n\t.Output(\"grad1: float32\")\n\t.Output(\"grad2: float32\");\n\nvoid approxmatch_cpu(int b,int n,int m,const float * xyz1,const float * xyz2,float * match){\n\tfor (int i=0;i<b;i++){\n\t\tint factorl=std::max(n,m)/n;\n\t\tint factorr=std::max(n,m)/m;\n\t\tstd::vector<double> saturatedl(n,double(factorl)),saturatedr(m,double(factorr));\n\t\tstd::vector<double> weight(n*m);\n\t\tfor (int j=0;j<n*m;j++)\n\t\t\tmatch[j]=0;\n\t\tfor (int j=8;j>=-2;j--){\n\t\t\t//printf(\"i=%d j=%d\\n\",i,j);\n\t\t\tdouble level=-powf(4.0,j);\n\t\t\tif (j==-2)\n\t\t\t\tlevel=0;\n\t\t\tfor (int k=0;k<n;k++){\n\t\t\t\tdouble x1=xyz1[k*3+0];\n\t\t\t\tdouble y1=xyz1[k*3+1];\n\t\t\t\tdouble z1=xyz1[k*3+2];\n\t\t\t\tfor (int l=0;l<m;l++){\n\t\t\t\t\tdouble x2=xyz2[l*3+0];\n\t\t\t\t\tdouble y2=xyz2[l*3+1];\n\t\t\t\t\tdouble z2=xyz2[l*3+2];\n\t\t\t\t\tweight[k*m+l]=expf(level*((x1-x2)*(x1-x2)+(y1-y2)*(y1-y2)+(z1-z2)*(z1-z2)))*saturatedr[l];\n\t\t\t\t}\n\t\t\t}\n\t\t\tstd::vector<double> ss(m,1e-9);\n\t\t\tfor (int k=0;k<n;k++){\n\t\t\t\tdouble s=1e-9;\n\t\t\t\tfor (int l=0;l<m;l++){\n\t\t\t\t\ts+=weight[k*m+l];\n\t\t\t\t}\n\t\t\t\tfor (int l=0;l<m;l++){\n\t\t\t\t\tweight[k*m+l]=weight[k*m+l]/s*saturatedl[k];\n\t\t\t\t}\n\t\t\t\tfor (int l=0;l<m;l++)\n\t\t\t\t\tss[l]+=weight[k*m+l];\n\t\t\t}\n\t\t\tfor (int l=0;l<m;l++){\n\t\t\t\tdouble s=ss[l];\n\t\t\t\tdouble r=std::min(saturatedr[l]/s,1.0);\n\t\t\t\tss[l]=r;\n\t\t\t}\n\t\t\tstd::vector<double> ss2(m,0);\n\t\t\tfor (int k=0;k<n;k++){\n\t\t\t\tdouble s=0;\n\t\t\t\tfor (int l=0;l<m;l++){\n\t\t\t\t\tweight[k*m+l]*=ss[l];\n\t\t\t\t\ts+=weight[k*m+l];\n\t\t\t\t\tss2[l]+=weight[k*m+l];\n\t\t\t\t}\n\t\t\t\tsaturatedl[k]=std::max(saturatedl[k]-s,0.0);\n\t\t\t}\n\t\t\tfor (int k=0;k<n*m;k++)\n\t\t\t\tmatch[k]+=weight[k];\n\t\t\tfor (int l=0;l<m;l++){\n\t\t\t\tsaturatedr[l]=std::max(saturatedr[l]-ss2[l],0.0);\n\t\t\t}\n\t\t}\n\t\txyz1+=n*3;\n\t\txyz2+=m*3;\n\t\tmatch+=n*m;\n\t}\n}\nvoid matchcost_cpu(int b,int n,int m,const float * xyz1,const float * xyz2,const float * match,float * cost){\n\tfor (int i=0;i<b;i++){\n\t\tdouble s=0;\n\t\tfor (int j=0;j<n;j++)\n\t\t\tfor (int k=0;k<m;k++){\n\t\t\t\tfloat x1=xyz1[j*3+0];\n\t\t\t\tfloat y1=xyz1[j*3+1];\n\t\t\t\tfloat z1=xyz1[j*3+2];\n\t\t\t\tfloat x2=xyz2[k*3+0];\n\t\t\t\tfloat y2=xyz2[k*3+1];\n\t\t\t\tfloat z2=xyz2[k*3+2];\n\t\t\t\tfloat d=sqrtf((x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1))*match[j*m+k];\n\t\t\t\ts+=d;\n\t\t\t}\n\t\tcost[0]=s;\n\t\txyz1+=n*3;\n\t\txyz2+=m*3;\n\t\tmatch+=n*m;\n\t\tcost+=1;\n\t}\n}\nvoid matchcostgrad_cpu(int b,int n,int m,const float * xyz1,const float * xyz2,const float * match,float * grad1,float * grad2){\n\tfor (int i=0;i<b;i++){\n\t\tfor (int j=0;j<n;j++)\n\t\t\tgrad1[j*3+0]=0;\n\t\tfor (int j=0;j<m;j++){\n\t\t\tfloat sx=0,sy=0,sz=0;\n\t\t\tfor (int k=0;k<n;k++){\n\t\t\t\tfloat x2=xyz2[j*3+0];\n\t\t\t\tfloat y2=xyz2[j*3+1];\n\t\t\t\tfloat z2=xyz2[j*3+2];\n\t\t\t\tfloat x1=xyz1[k*3+0];\n\t\t\t\tfloat y1=xyz1[k*3+1];\n\t\t\t\tfloat z1=xyz1[k*3+2];\n\t\t\t\tfloat d=std::max(sqrtf((x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1)),1e-20f);\n\t\t\t\tfloat dx=match[k*m+j]*((x2-x1)/d);\n\t\t\t\tfloat dy=match[k*m+j]*((y2-y1)/d);\n\t\t\t\tfloat dz=match[k*m+j]*((z2-z1)/d);\n\t\t\t\tgrad1[k*3+0]-=dx;\n\t\t\t\tgrad1[k*3+1]-=dy;\n\t\t\t\tgrad1[k*3+2]-=dz;\n\t\t\t\tsx+=dx;\n\t\t\t\tsy+=dy;\n\t\t\t\tsz+=dz;\n\t\t\t}\n\t\t\tgrad2[j*3+0]=sx;\n\t\t\tgrad2[j*3+1]=sy;\n\t\t\tgrad2[j*3+2]=sz;\n\t\t}\n\t\txyz1+=n*3;\n\t\txyz2+=m*3;\n\t\tmatch+=n*m;\n\t\tgrad1+=n*3;\n\t\tgrad2+=m*3;\n\t}\n}\nvoid approxmatchLauncher(int b,int n,int m,const float * xyz1,const float * xyz2,float * match,float * temp);\nvoid matchcostLauncher(int b,int n,int m,const float * xyz1,const float * xyz2,const float * match,float * out);\nvoid matchcostgradLauncher(int b,int n,int m,const float * xyz1,const float * xyz2,const float * match,float * grad1,float * grad2);\n\nclass ApproxMatchGpuOp: public OpKernel{\n\tpublic:\n\t\texplicit ApproxMatchGpuOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3 && xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"ApproxMatch expects (batch_size,num_points,3) xyz1 shape\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&(xyz1_flat(0));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\t\t\t//OP_REQUIRES(context,n<=4096,errors::InvalidArgument(\"ApproxMatch handles at most 4096 dataset points\"));\n\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3 && xyz2_tensor.shape().dim_size(2)==3 && xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"ApproxMatch expects (batch_size,num_points,3) xyz2 shape, and batch_size must match\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\t//OP_REQUIRES(context,m<=1024,errors::InvalidArgument(\"ApproxMatch handles at most 1024 query points\"));\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&(xyz2_flat(0));\n\t\t\tTensor * match_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b,m,n},&match_tensor));\n\t\t\tauto match_flat=match_tensor->flat<float>();\n\t\t\tfloat * match=&(match_flat(0));\n\t\t\tTensor temp_tensor;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_temp(DataTypeToEnum<float>::value,TensorShape{b,(n+m)*2},&temp_tensor));\n\t\t\tauto temp_flat=temp_tensor.flat<float>();\n\t\t\tfloat * temp=&(temp_flat(0));\n\t\t\tapproxmatchLauncher(b,n,m,xyz1,xyz2,match,temp);\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"ApproxMatch\").Device(DEVICE_GPU), ApproxMatchGpuOp);\nclass ApproxMatchOp: public OpKernel{\n\tpublic:\n\t\texplicit ApproxMatchOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3 && xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"ApproxMatch expects (batch_size,num_points,3) xyz1 shape\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&(xyz1_flat(0));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\t\t\t//OP_REQUIRES(context,n<=4096,errors::InvalidArgument(\"ApproxMatch handles at most 4096 dataset points\"));\n\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3 && xyz2_tensor.shape().dim_size(2)==3 && xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"ApproxMatch expects (batch_size,num_points,3) xyz2 shape, and batch_size must match\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\t//OP_REQUIRES(context,m<=1024,errors::InvalidArgument(\"ApproxMatch handles at most 1024 query points\"));\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&(xyz2_flat(0));\n\t\t\tTensor * match_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b,m,n},&match_tensor));\n\t\t\tauto match_flat=match_tensor->flat<float>();\n\t\t\tfloat * match=&(match_flat(0));\n\t\t\tapproxmatch_cpu(b,n,m,xyz1,xyz2,match);\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"ApproxMatch\").Device(DEVICE_CPU), ApproxMatchOp);\nclass MatchCostGpuOp: public OpKernel{\n\tpublic:\n\t\texplicit MatchCostGpuOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3 && xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"MatchCost expects (batch_size,num_points,3) xyz1 shape\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&(xyz1_flat(0));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3 && xyz2_tensor.shape().dim_size(2)==3 && xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"MatchCost expects (batch_size,num_points,3) xyz2 shape, and batch_size must match\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&(xyz2_flat(0));\n\n\t\t\tconst Tensor& match_tensor=context->input(2);\n\t\t\tOP_REQUIRES(context,match_tensor.dims()==3 && match_tensor.shape().dim_size(0)==b && match_tensor.shape().dim_size(1)==m && match_tensor.shape().dim_size(2)==n,errors::InvalidArgument(\"MatchCost expects (batch_size,#query,#dataset) match shape\"));\n\t\t\tauto match_flat=match_tensor.flat<float>();\n\t\t\tconst float * match=&(match_flat(0));\n\n\t\t\tTensor * cost_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b},&cost_tensor));\n\t\t\tauto cost_flat=cost_tensor->flat<float>();\n\t\t\tfloat * cost=&(cost_flat(0));\n\t\t\tmatchcostLauncher(b,n,m,xyz1,xyz2,match,cost);\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"MatchCost\").Device(DEVICE_GPU), MatchCostGpuOp);\nclass MatchCostOp: public OpKernel{\n\tpublic:\n\t\texplicit MatchCostOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3 && xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"MatchCost expects (batch_size,num_points,3) xyz1 shape\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&(xyz1_flat(0));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3 && xyz2_tensor.shape().dim_size(2)==3 && xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"MatchCost expects (batch_size,num_points,3) xyz2 shape, and batch_size must match\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&(xyz2_flat(0));\n\n\t\t\tconst Tensor& match_tensor=context->input(2);\n\t\t\tOP_REQUIRES(context,match_tensor.dims()==3 && match_tensor.shape().dim_size(0)==b && match_tensor.shape().dim_size(1)==m && match_tensor.shape().dim_size(2)==n,errors::InvalidArgument(\"MatchCost expects (batch_size,#query,#dataset) match shape\"));\n\t\t\tauto match_flat=match_tensor.flat<float>();\n\t\t\tconst float * match=&(match_flat(0));\n\n\t\t\tTensor * cost_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b},&cost_tensor));\n\t\t\tauto cost_flat=cost_tensor->flat<float>();\n\t\t\tfloat * cost=&(cost_flat(0));\n\t\t\tmatchcost_cpu(b,n,m,xyz1,xyz2,match,cost);\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"MatchCost\").Device(DEVICE_CPU), MatchCostOp);\n\nclass MatchCostGradGpuOp: public OpKernel{\n\tpublic:\n\t\texplicit MatchCostGradGpuOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3 && xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"MatchCostGrad expects (batch_size,num_points,3) xyz1 shape\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&(xyz1_flat(0));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3 && xyz2_tensor.shape().dim_size(2)==3 && xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"MatchCostGrad expects (batch_size,num_points,3) xyz2 shape, and batch_size must match\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&(xyz2_flat(0));\n\n\t\t\tconst Tensor& match_tensor=context->input(2);\n\t\t\tOP_REQUIRES(context,match_tensor.dims()==3 && match_tensor.shape().dim_size(0)==b && match_tensor.shape().dim_size(1)==m && match_tensor.shape().dim_size(2)==n,errors::InvalidArgument(\"MatchCost expects (batch_size,#query,#dataset) match shape\"));\n\t\t\tauto match_flat=match_tensor.flat<float>();\n\t\t\tconst float * match=&(match_flat(0));\n\n\t\t\tTensor * grad1_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b,n,3},&grad1_tensor));\n\t\t\tauto grad1_flat=grad1_tensor->flat<float>();\n\t\t\tfloat * grad1=&(grad1_flat(0));\n\t\t\tTensor * grad2_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(1,TensorShape{b,m,3},&grad2_tensor));\n\t\t\tauto grad2_flat=grad2_tensor->flat<float>();\n\t\t\tfloat * grad2=&(grad2_flat(0));\n\t\t\tmatchcostgradLauncher(b,n,m,xyz1,xyz2,match,grad1,grad2);\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"MatchCostGrad\").Device(DEVICE_GPU), MatchCostGradGpuOp);\nclass MatchCostGradOp: public OpKernel{\n\tpublic:\n\t\texplicit MatchCostGradOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3 && xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"MatchCost expects (batch_size,num_points,3) xyz1 shape\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&(xyz1_flat(0));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3 && xyz2_tensor.shape().dim_size(2)==3 && xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"MatchCost expects (batch_size,num_points,3) xyz2 shape, and batch_size must match\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&(xyz2_flat(0));\n\n\t\t\tconst Tensor& match_tensor=context->input(2);\n\t\t\tOP_REQUIRES(context,match_tensor.dims()==3 && match_tensor.shape().dim_size(0)==b && match_tensor.shape().dim_size(1)==m && match_tensor.shape().dim_size(2)==n,errors::InvalidArgument(\"MatchCost expects (batch_size,#query,#dataset) match shape\"));\n\t\t\tauto match_flat=match_tensor.flat<float>();\n\t\t\tconst float * match=&(match_flat(0));\n\n\t\t\tTensor * grad1_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b,n,3},&grad1_tensor));\n\t\t\tauto grad1_flat=grad1_tensor->flat<float>();\n\t\t\tfloat * grad1=&(grad1_flat(0));\n\t\t\tTensor * grad2_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(1,TensorShape{b,m,3},&grad2_tensor));\n\t\t\tauto grad2_flat=grad2_tensor->flat<float>();\n\t\t\tfloat * grad2=&(grad2_flat(0));\n\t\t\tmatchcostgrad_cpu(b,n,m,xyz1,xyz2,match,grad1,grad2);\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"MatchCostGrad\").Device(DEVICE_CPU), MatchCostGradOp);\n"
  },
  {
    "path": "OcCo_TF/pc_distance/tf_approxmatch.cu",
    "content": "__global__ void approxmatch(int b,int n,int m,const float * __restrict__ xyz1,const float * __restrict__ xyz2,float * __restrict__ match,float * temp){\n\tfloat * remainL=temp+blockIdx.x*(n+m)*2, * remainR=temp+blockIdx.x*(n+m)*2+n,*ratioL=temp+blockIdx.x*(n+m)*2+n+m,*ratioR=temp+blockIdx.x*(n+m)*2+n+m+n;\n\tfloat multiL,multiR;\n\tif (n>=m){\n\t\tmultiL=1;\n\t\tmultiR=n/m;\n\t}else{\n\t\tmultiL=m/n;\n\t\tmultiR=1;\n\t}\n\tconst int Block=1024;\n\t__shared__ float buf[Block*4];\n\tfor (int i=blockIdx.x;i<b;i+=gridDim.x){\n\t\tfor (int j=threadIdx.x;j<n*m;j+=blockDim.x)\n\t\t\tmatch[i*n*m+j]=0;\n\t\tfor (int j=threadIdx.x;j<n;j+=blockDim.x)\n\t\t\tremainL[j]=multiL;\n\t\tfor (int j=threadIdx.x;j<m;j+=blockDim.x)\n\t\t\tremainR[j]=multiR;\n\t\t__syncthreads();\n\t\tfor (int j=7;j>=-2;j--){\n\t\t\tfloat level=-powf(4.0f,j);\n\t\t\tif (j==-2){\n\t\t\t\tlevel=0;\n\t\t\t}\n\t\t\tfor (int k0=0;k0<n;k0+=blockDim.x){\n\t\t\t\tint k=k0+threadIdx.x;\n\t\t\t\tfloat x1=0,y1=0,z1=0;\n\t\t\t\tif (k<n){\n\t\t\t\t\tx1=xyz1[i*n*3+k*3+0];\n\t\t\t\t\ty1=xyz1[i*n*3+k*3+1];\n\t\t\t\t\tz1=xyz1[i*n*3+k*3+2];\n\t\t\t\t}\n\t\t\t\tfloat suml=1e-9f;\n\t\t\t\tfor (int l0=0;l0<m;l0+=Block){\n\t\t\t\t\tint lend=min(m,l0+Block)-l0;\n\t\t\t\t\tfor (int l=threadIdx.x;l<lend;l+=blockDim.x){\n\t\t\t\t\t\tfloat x2=xyz2[i*m*3+l0*3+l*3+0];\n\t\t\t\t\t\tfloat y2=xyz2[i*m*3+l0*3+l*3+1];\n\t\t\t\t\t\tfloat z2=xyz2[i*m*3+l0*3+l*3+2];\n\t\t\t\t\t\tbuf[l*4+0]=x2;\n\t\t\t\t\t\tbuf[l*4+1]=y2;\n\t\t\t\t\t\tbuf[l*4+2]=z2;\n\t\t\t\t\t\tbuf[l*4+3]=remainR[l0+l];\n\t\t\t\t\t}\n\t\t\t\t\t__syncthreads();\n\t\t\t\t\tfor (int l=0;l<lend;l++){\n\t\t\t\t\t\tfloat x2=buf[l*4+0];\n\t\t\t\t\t\tfloat y2=buf[l*4+1];\n\t\t\t\t\t\tfloat z2=buf[l*4+2];\n\t\t\t\t\t\tfloat d=level*((x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1));\n\t\t\t\t\t\tfloat w=__expf(d)*buf[l*4+3];\n\t\t\t\t\t\tsuml+=w;\n\t\t\t\t\t}\n\t\t\t\t\t__syncthreads();\n\t\t\t\t}\n\t\t\t\tif (k<n)\n\t\t\t\t\tratioL[k]=remainL[k]/suml;\n\t\t\t}\n\t\t\t/*for (int k=threadIdx.x;k<n;k+=gridDim.x){\n\t\t\t\tfloat x1=xyz1[i*n*3+k*3+0];\n\t\t\t\tfloat y1=xyz1[i*n*3+k*3+1];\n\t\t\t\tfloat z1=xyz1[i*n*3+k*3+2];\n\t\t\t\tfloat suml=1e-9f;\n\t\t\t\tfor (int l=0;l<m;l++){\n\t\t\t\t\tfloat x2=xyz2[i*m*3+l*3+0];\n\t\t\t\t\tfloat y2=xyz2[i*m*3+l*3+1];\n\t\t\t\t\tfloat z2=xyz2[i*m*3+l*3+2];\n\t\t\t\t\tfloat w=expf(level*((x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1)))*remainR[l];\n\t\t\t\t\tsuml+=w;\n\t\t\t\t}\n\t\t\t\tratioL[k]=remainL[k]/suml;\n\t\t\t}*/\n\t\t\t__syncthreads();\n\t\t\tfor (int l0=0;l0<m;l0+=blockDim.x){\n\t\t\t\tint l=l0+threadIdx.x;\n\t\t\t\tfloat x2=0,y2=0,z2=0;\n\t\t\t\tif (l<m){\n\t\t\t\t\tx2=xyz2[i*m*3+l*3+0];\n\t\t\t\t\ty2=xyz2[i*m*3+l*3+1];\n\t\t\t\t\tz2=xyz2[i*m*3+l*3+2];\n\t\t\t\t}\n\t\t\t\tfloat sumr=0;\n\t\t\t\tfor (int k0=0;k0<n;k0+=Block){\n\t\t\t\t\tint kend=min(n,k0+Block)-k0;\n\t\t\t\t\tfor (int k=threadIdx.x;k<kend;k+=blockDim.x){\n\t\t\t\t\t\tbuf[k*4+0]=xyz1[i*n*3+k0*3+k*3+0];\n\t\t\t\t\t\tbuf[k*4+1]=xyz1[i*n*3+k0*3+k*3+1];\n\t\t\t\t\t\tbuf[k*4+2]=xyz1[i*n*3+k0*3+k*3+2];\n\t\t\t\t\t\tbuf[k*4+3]=ratioL[k0+k];\n\t\t\t\t\t}\n\t\t\t\t\t__syncthreads();\n\t\t\t\t\tfor (int k=0;k<kend;k++){\n\t\t\t\t\t\tfloat x1=buf[k*4+0];\n\t\t\t\t\t\tfloat y1=buf[k*4+1];\n\t\t\t\t\t\tfloat z1=buf[k*4+2];\n\t\t\t\t\t\tfloat w=__expf(level*((x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1)))*buf[k*4+3];\n\t\t\t\t\t\tsumr+=w;\n\t\t\t\t\t}\n\t\t\t\t\t__syncthreads();\n\t\t\t\t}\n\t\t\t\tif (l<m){\n\t\t\t\t\tsumr*=remainR[l];\n\t\t\t\t\tfloat consumption=fminf(remainR[l]/(sumr+1e-9f),1.0f);\n\t\t\t\t\tratioR[l]=consumption*remainR[l];\n\t\t\t\t\tremainR[l]=fmaxf(0.0f,remainR[l]-sumr);\n\t\t\t\t}\n\t\t\t}\n\t\t\t/*for (int l=threadIdx.x;l<m;l+=blockDim.x){\n\t\t\t\tfloat x2=xyz2[i*m*3+l*3+0];\n\t\t\t\tfloat y2=xyz2[i*m*3+l*3+1];\n\t\t\t\tfloat z2=xyz2[i*m*3+l*3+2];\n\t\t\t\tfloat sumr=0;\n\t\t\t\tfor (int k=0;k<n;k++){\n\t\t\t\t\tfloat x1=xyz1[i*n*3+k*3+0];\n\t\t\t\t\tfloat y1=xyz1[i*n*3+k*3+1];\n\t\t\t\t\tfloat z1=xyz1[i*n*3+k*3+2];\n\t\t\t\t\tfloat w=expf(level*((x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1)))*ratioL[k];\n\t\t\t\t\tsumr+=w;\n\t\t\t\t}\n\t\t\t\tsumr*=remainR[l];\n\t\t\t\tfloat consumption=fminf(remainR[l]/(sumr+1e-9f),1.0f);\n\t\t\t\tratioR[l]=consumption*remainR[l];\n\t\t\t\tremainR[l]=fmaxf(0.0f,remainR[l]-sumr);\n\t\t\t}*/\n\t\t\t__syncthreads();\n\t\t\tfor (int k0=0;k0<n;k0+=blockDim.x){\n\t\t\t\tint k=k0+threadIdx.x;\n\t\t\t\tfloat x1=0,y1=0,z1=0;\n\t\t\t\tif (k<n){\n\t\t\t\t\tx1=xyz1[i*n*3+k*3+0];\n\t\t\t\t\ty1=xyz1[i*n*3+k*3+1];\n\t\t\t\t\tz1=xyz1[i*n*3+k*3+2];\n\t\t\t\t}\n\t\t\t\tfloat suml=0;\n\t\t\t\tfor (int l0=0;l0<m;l0+=Block){\n\t\t\t\t\tint lend=min(m,l0+Block)-l0;\n\t\t\t\t\tfor (int l=threadIdx.x;l<lend;l+=blockDim.x){\n\t\t\t\t\t\tbuf[l*4+0]=xyz2[i*m*3+l0*3+l*3+0];\n\t\t\t\t\t\tbuf[l*4+1]=xyz2[i*m*3+l0*3+l*3+1];\n\t\t\t\t\t\tbuf[l*4+2]=xyz2[i*m*3+l0*3+l*3+2];\n\t\t\t\t\t\tbuf[l*4+3]=ratioR[l0+l];\n\t\t\t\t\t}\n\t\t\t\t\t__syncthreads();\n\t\t\t\t\tfloat rl=ratioL[k];\n\t\t\t\t\tif (k<n){\n\t\t\t\t\t\tfor (int l=0;l<lend;l++){\n\t\t\t\t\t\t\tfloat x2=buf[l*4+0];\n\t\t\t\t\t\t\tfloat y2=buf[l*4+1];\n\t\t\t\t\t\t\tfloat z2=buf[l*4+2];\n\t\t\t\t\t\t\tfloat w=__expf(level*((x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1)))*rl*buf[l*4+3];\n\t\t\t\t\t\t\tmatch[i*n*m+(l0+l)*n+k]+=w;\n\t\t\t\t\t\t\tsuml+=w;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\t__syncthreads();\n\t\t\t\t}\n\t\t\t\tif (k<n)\n\t\t\t\t\tremainL[k]=fmaxf(0.0f,remainL[k]-suml);\n\t\t\t}\n\t\t\t/*for (int k=threadIdx.x;k<n;k+=blockDim.x){\n\t\t\t\tfloat x1=xyz1[i*n*3+k*3+0];\n\t\t\t\tfloat y1=xyz1[i*n*3+k*3+1];\n\t\t\t\tfloat z1=xyz1[i*n*3+k*3+2];\n\t\t\t\tfloat suml=0;\n\t\t\t\tfor (int l=0;l<m;l++){\n\t\t\t\t\tfloat x2=xyz2[i*m*3+l*3+0];\n\t\t\t\t\tfloat y2=xyz2[i*m*3+l*3+1];\n\t\t\t\t\tfloat z2=xyz2[i*m*3+l*3+2];\n\t\t\t\t\tfloat w=expf(level*((x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1)))*ratioL[k]*ratioR[l];\n\t\t\t\t\tmatch[i*n*m+l*n+k]+=w;\n\t\t\t\t\tsuml+=w;\n\t\t\t\t}\n\t\t\t\tremainL[k]=fmaxf(0.0f,remainL[k]-suml);\n\t\t\t}*/\n\t\t\t__syncthreads();\n\t\t}\n\t}\n}\nvoid approxmatchLauncher(int b,int n,int m,const float * xyz1,const float * xyz2,float * match,float * temp){\n\tapproxmatch<<<32,512>>>(b,n,m,xyz1,xyz2,match,temp);\n}\n__global__ void matchcost(int b,int n,int m,const float * __restrict__ xyz1,const float * __restrict__ xyz2,const float * __restrict__ match,float * __restrict__ out){\n\t__shared__ float allsum[512];\n\tconst int Block=1024;\n\t__shared__ float buf[Block*3];\n\tfor (int i=blockIdx.x;i<b;i+=gridDim.x){\n\t\tfloat subsum=0;\n\t\tfor (int k0=0;k0<n;k0+=blockDim.x){\n\t\t\tint k=k0+threadIdx.x;\n\t\t\tfloat x1=0,y1=0,z1=0;\n\t\t\tif (k<n){\n\t\t\t\tx1=xyz1[i*n*3+k*3+0];\n\t\t\t\ty1=xyz1[i*n*3+k*3+1];\n\t\t\t\tz1=xyz1[i*n*3+k*3+2];\n\t\t\t}\n\t\t\tfor (int l0=0;l0<m;l0+=Block){\n\t\t\t\tint lend=min(m,l0+Block)-l0;\n\t\t\t\tfor (int l=threadIdx.x;l<lend*3;l+=blockDim.x)\n\t\t\t\t\tbuf[l]=xyz2[i*m*3+l0*3+l];\n\t\t\t\t__syncthreads();\n\t\t\t\tif (k<n){\n\t\t\t\t\tfor (int l=0;l<lend;l++){\n\t\t\t\t\t\tfloat x2=buf[l*3+0];\n\t\t\t\t\t\tfloat y2=buf[l*3+1];\n\t\t\t\t\t\tfloat z2=buf[l*3+2];\n\t\t\t\t\t\tfloat d=sqrtf((x2-x1)*(x2-x1)+(y2-y1)*(y2-y1)+(z2-z1)*(z2-z1));\n\t\t\t\t\t\tsubsum+=d*match[i*n*m+(l0+l)*n+k];\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t__syncthreads();\n\t\t\t}\n\t\t}\n\t\tallsum[threadIdx.x]=subsum;\n\t\tfor (int j=1;j<blockDim.x;j<<=1){\n\t\t\t__syncthreads();\n\t\t\tif ((threadIdx.x&j)==0 && threadIdx.x+j<blockDim.x){\n\t\t\t\tallsum[threadIdx.x]+=allsum[threadIdx.x+j];\n\t\t\t}\n\t\t}\n\t\tif (threadIdx.x==0)\n\t\t\tout[i]=allsum[0];\n\t\t__syncthreads();\n\t}\n}\nvoid matchcostLauncher(int b,int n,int m,const float * xyz1,const float * xyz2,const float * match,float * out){\n\tmatchcost<<<32,512>>>(b,n,m,xyz1,xyz2,match,out);\n}\n__global__ void matchcostgrad2(int b,int n,int m,const float * __restrict__ xyz1,const float * __restrict__ xyz2,const float * __restrict__ match,float * __restrict__ grad2){\n\t__shared__ float sum_grad[256*3];\n\tfor (int i=blockIdx.x;i<b;i+=gridDim.x){\n\t\tint kbeg=m*blockIdx.y/gridDim.y;\n\t\tint kend=m*(blockIdx.y+1)/gridDim.y;\n\t\tfor (int k=kbeg;k<kend;k++){\n\t\t\tfloat x2=xyz2[(i*m+k)*3+0];\n\t\t\tfloat y2=xyz2[(i*m+k)*3+1];\n\t\t\tfloat z2=xyz2[(i*m+k)*3+2];\n\t\t\tfloat subsumx=0,subsumy=0,subsumz=0;\n\t\t\tfor (int j=threadIdx.x;j<n;j+=blockDim.x){\n\t\t\t\tfloat x1=x2-xyz1[(i*n+j)*3+0];\n\t\t\t\tfloat y1=y2-xyz1[(i*n+j)*3+1];\n\t\t\t\tfloat z1=z2-xyz1[(i*n+j)*3+2];\n\t\t\t\tfloat d=match[i*n*m+k*n+j]*rsqrtf(fmaxf(x1*x1+y1*y1+z1*z1,1e-20f));\n\t\t\t\tsubsumx+=x1*d;\n\t\t\t\tsubsumy+=y1*d;\n\t\t\t\tsubsumz+=z1*d;\n\t\t\t}\n\t\t\tsum_grad[threadIdx.x*3+0]=subsumx;\n\t\t\tsum_grad[threadIdx.x*3+1]=subsumy;\n\t\t\tsum_grad[threadIdx.x*3+2]=subsumz;\n\t\t\tfor (int j=1;j<blockDim.x;j<<=1){\n\t\t\t\t__syncthreads();\n\t\t\t\tint j1=threadIdx.x;\n\t\t\t\tint j2=threadIdx.x+j;\n\t\t\t\tif ((j1&j)==0 && j2<blockDim.x){\n\t\t\t\t\tsum_grad[j1*3+0]+=sum_grad[j2*3+0];\n\t\t\t\t\tsum_grad[j1*3+1]+=sum_grad[j2*3+1];\n\t\t\t\t\tsum_grad[j1*3+2]+=sum_grad[j2*3+2];\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (threadIdx.x==0){\n\t\t\t\tgrad2[(i*m+k)*3+0]=sum_grad[0];\n\t\t\t\tgrad2[(i*m+k)*3+1]=sum_grad[1];\n\t\t\t\tgrad2[(i*m+k)*3+2]=sum_grad[2];\n\t\t\t}\n\t\t\t__syncthreads();\n\t\t}\n\t}\n}\n__global__ void matchcostgrad1(int b,int n,int m,const float * __restrict__ xyz1,const float * __restrict__ xyz2,const float * __restrict__ match,float * __restrict__ grad1){\n\tfor (int i=blockIdx.x;i<b;i+=gridDim.x){\n\t\tfor (int l=threadIdx.x;l<n;l+=blockDim.x){\n\t\t\tfloat x1=xyz1[i*n*3+l*3+0];\n\t\t\tfloat y1=xyz1[i*n*3+l*3+1];\n\t\t\tfloat z1=xyz1[i*n*3+l*3+2];\n\t\t\tfloat dx=0,dy=0,dz=0;\n\t\t\tfor (int k=0;k<m;k++){\n\t\t\t\tfloat x2=xyz2[i*m*3+k*3+0];\n\t\t\t\tfloat y2=xyz2[i*m*3+k*3+1];\n\t\t\t\tfloat z2=xyz2[i*m*3+k*3+2];\n\t\t\t\tfloat d=match[i*n*m+k*n+l]*rsqrtf(fmaxf((x1-x2)*(x1-x2)+(y1-y2)*(y1-y2)+(z1-z2)*(z1-z2),1e-20f));\n\t\t\t\tdx+=(x1-x2)*d;\n\t\t\t\tdy+=(y1-y2)*d;\n\t\t\t\tdz+=(z1-z2)*d;\n\t\t\t}\n\t\t\tgrad1[i*n*3+l*3+0]=dx;\n\t\t\tgrad1[i*n*3+l*3+1]=dy;\n\t\t\tgrad1[i*n*3+l*3+2]=dz;\n\t\t}\n\t}\n}\nvoid matchcostgradLauncher(int b,int n,int m,const float * xyz1,const float * xyz2,const float * match,float * grad1,float * grad2){\n\tmatchcostgrad1<<<32,512>>>(b,n,m,xyz1,xyz2,match,grad1);\n\tmatchcostgrad2<<<dim3(32,32),256>>>(b,n,m,xyz1,xyz2,match,grad2);\n}\n\n"
  },
  {
    "path": "OcCo_TF/pc_distance/tf_approxmatch.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport tensorflow as tf\nfrom tensorflow.python.framework import ops  # it turns out work\nimport os.path as osp\n\nbase_dir = osp.dirname(osp.abspath(__file__))\napproxmatch_module = tf.load_op_library(osp.join(base_dir, 'tf_approxmatch_so.so'))\n\n\ndef approx_match(xyz1, xyz2):\n\t\"\"\"\n\t:param xyz1: batch_size * #dataset_points * 3\n\t:param xyz2: batch_size * #query_points * 3\n\t:return:\n\t\tmatch : batch_size * #query_points * #dataset_points\n\t\"\"\"\n\n\treturn approxmatch_module.approx_match(xyz1, xyz2)\n\n\nops.NoGradient('ApproxMatch')\n# @tf.RegisterShape('ApproxMatch')\n@ops.RegisterShape('ApproxMatch')\ndef _approx_match_shape(op):\n\tshape1 = op.inputs[0].get_shape().with_rank(3)\n\tshape2 = op.inputs[1].get_shape().with_rank(3)\n\treturn [tf.TensorShape([shape1.dims[0], shape2.dims[1], shape1.dims[1]])]\n\n\ndef match_cost(xyz1, xyz2, match):\n\t\"\"\"\n\t:param xyz1: batch_size * #dataset_points * 3\n\t:param xyz2: batch_size * #query_points * 3\n\t:param match: batch_size * #query_points * #dataset_points\n\t:return: cost : batch_size,\n\t\"\"\"\n\treturn approxmatch_module.match_cost(xyz1, xyz2, match)\n\n\n# @tf.RegisterShape('MatchCost')\n@ops.RegisterShape('MatchCost')\ndef _match_cost_shape(op):\n\tshape1 = op.inputs[0].get_shape().with_rank(3)\n\t# shape2 = op.inputs[1].get_shape().with_rank(3)\n\t# shape3 = op.inputs[2].get_shape().with_rank(3)\n\treturn [tf.TensorShape([shape1.dims[0]])]\n\n\n@tf.RegisterGradient('MatchCost')\ndef _match_cost_grad(op,grad_cost):\n\txyz1 = op.inputs[0]\n\txyz2 = op.inputs[1]\n\tmatch = op.inputs[2]\n\tgrad_1, grad_2 = approxmatch_module.match_cost_grad(xyz1, xyz2, match)\n\treturn [grad_1 * tf.expand_dims(tf.expand_dims(grad_cost, 1), 2),\n\t\t\tgrad_2 * tf.expand_dims(tf.expand_dims(grad_cost, 1), 2), None]\n\n\nif __name__ == '__main__':\n\talpha = 0.5\n\tbeta = 2.0\n\t# import bestmatch\n\timport numpy as np\n\t# import math\n\timport random\n\timport cv2\n\n\t# import tf_nndistance\n\n\tnpoint = 100\n\n\twith tf.device('/gpu:2'):\n\t\tpt_in = tf.placeholder(tf.float32, shape=(1, npoint * 4, 3))\n\t\tmypoints = tf.Variable(np.random.randn(1, npoint, 3).astype('float32'))\n\t\tmatch = approx_match(pt_in, mypoints)\n\t\tloss = tf.reduce_sum(match_cost(pt_in, mypoints, match))\n\t\t# match=approx_match(mypoints,pt_in)\n\t\t# loss=tf.reduce_sum(match_cost(mypoints,pt_in,match))\n\t\t# distf,_,distb,_=tf_nndistance.nn_distance(pt_in,mypoints)\n\t\t# loss=tf.reduce_sum((distf+1e-9)**0.5)*0.5+tf.reduce_sum((distb+1e-9)**0.5)*0.5\n\t\t# loss=tf.reduce_max((distf+1e-9)**0.5)*0.5*npoint+tf.reduce_max((distb+1e-9)**0.5)*0.5*npoint\n\n\t\toptimizer = tf.train.GradientDescentOptimizer(1e-4).minimize(loss)\n\twith tf.Session('') as sess:\n\t\t# sess.run(tf.initialize_all_variables())\n\t\tsess.run(tf.global_variables_initializer())\n\t\twhile True:\n\t\t\tmeanloss = 0\n\t\t\tmeantrueloss = 0\n\t\t\tfor i in range(1001):\n\t\t\t\t# phi=np.random.rand(4*npoint)*math.pi*2\n\t\t\t\t# tpoints=(np.hstack([np.cos(phi)[:,None],np.sin(phi)[:,None],(phi*0)[:,None]])*random.random())[None,:,:]\n\t\t\t\t# tpoints=((np.random.rand(400)-0.5)[:,None]*[0,2,0]+[(random.random()-0.5)*2,0,0]).astype('float32')[None,:,:]\n\t\t\t\ttpoints = np.hstack([np.linspace(-1, 1, 400)[:, None],\n\t\t\t\t\t\t\t\t\t (random.random() * 2 * np.linspace(1,0,400)**2)[:, None],\n\t\t\t\t\t\t\t\t\t np.zeros((400,1))])[None, :, :]\n\t\t\t\ttrainloss, _ = sess.run([loss, optimizer], feed_dict={pt_in: tpoints.astype('float32')})\n\t\t\ttrainloss, trainmatch = sess.run([loss, match], feed_dict={pt_in: tpoints.astype('float32')})\n\t\t\t# trainmatch=trainmatch.transpose((0,2,1))\n\t\t\tprint('trainloss: %f'%trainloss)\n\t\t\tshow = np.zeros((400,400,3), dtype='uint8')^255\n\t\t\ttrainmypoints = sess.run(mypoints)\n\t\t\t''' === visualisation ===\n\t\t\tfor i in range(len(tpoints[0])):\n\t\t\t\tu = np.random.choice(range(len(trainmypoints[0])), p=trainmatch[0].T[i])\n\t\t\t\tcv2.line(show,\n\t\t\t\t\t\t (int(tpoints[0][i,1]*100+200),int(tpoints[0][i,0]*100+200)),\n\t\t\t\t\t\t (int(trainmypoints[0][u,1]*100+200),int(trainmypoints[0][u,0]*100+200)),\n\t\t\t\t\t\t cv2.cv.CV_RGB(0,255,0))\n\t\t\tfor x, y, z in tpoints[0]:\n\t\t\t\tcv2.circle(show, (int(y*100+200), int(x*100+200)), 2, cv2.cv.CV_RGB(255, 0, 0))\n\t\t\tfor x, y, z in trainmypoints[0]:\n\t\t\t\tcv2.circle(show, (int(y*100+200),int(x*100+200)), 3, cv2.cv.CV_RGB(0, 0, 255))\n\t\t\t'''\n\t\t\tcost = ((tpoints[0][:, None, :] - np.repeat(trainmypoints[0][None, :, :], 4, axis=1))**2).sum(axis=2)**0.5\n\t\t\t# trueloss=bestmatch.bestmatch(cost)[0]\n\t\t\tprint(trainloss)  # true loss\n\t\t\t# cv2.imshow('show', show)\n\t\t\tcmd = cv2.waitKey(10) % 256\n\t\t\tif cmd == ord('q'):\n\t\t\t\tbreak\n"
  },
  {
    "path": "OcCo_TF/pc_distance/tf_nndistance.cpp",
    "content": "#include \"tensorflow/core/framework/op.h\"\n#include \"tensorflow/core/framework/op_kernel.h\"\nREGISTER_OP(\"NnDistance\")\n\t.Input(\"xyz1: float32\")\n\t.Input(\"xyz2: float32\")\n\t.Output(\"dist1: float32\")\n\t.Output(\"idx1: int32\")\n\t.Output(\"dist2: float32\")\n\t.Output(\"idx2: int32\");\nREGISTER_OP(\"NnDistanceGrad\")\n\t.Input(\"xyz1: float32\")\n\t.Input(\"xyz2: float32\")\n\t.Input(\"grad_dist1: float32\")\n\t.Input(\"idx1: int32\")\n\t.Input(\"grad_dist2: float32\")\n\t.Input(\"idx2: int32\")\n\t.Output(\"grad_xyz1: float32\")\n\t.Output(\"grad_xyz2: float32\");\nusing namespace tensorflow;\n\nstatic void nnsearch(int b,int n,int m,const float * xyz1,const float * xyz2,float * dist,int * idx){\n\tfor (int i=0;i<b;i++){\n\t\tfor (int j=0;j<n;j++){\n\t\t\tfloat x1=xyz1[(i*n+j)*3+0];\n\t\t\tfloat y1=xyz1[(i*n+j)*3+1];\n\t\t\tfloat z1=xyz1[(i*n+j)*3+2];\n\t\t\tdouble best=0;\n\t\t\tint besti=0;\n\t\t\tfor (int k=0;k<m;k++){\n\t\t\t\tfloat x2=xyz2[(i*m+k)*3+0]-x1;\n\t\t\t\tfloat y2=xyz2[(i*m+k)*3+1]-y1;\n\t\t\t\tfloat z2=xyz2[(i*m+k)*3+2]-z1;\n\t\t\t\tdouble d=x2*x2+y2*y2+z2*z2;\n\t\t\t\tif (k==0 || d<best){\n\t\t\t\t\tbest=d;\n\t\t\t\t\tbesti=k;\n\t\t\t\t}\n\t\t\t}\n\t\t\tdist[i*n+j]=best;\n\t\t\tidx[i*n+j]=besti;\n\t\t}\n\t}\n}\n\nclass NnDistanceOp : public OpKernel{\n\tpublic:\n\t\texplicit NnDistanceOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3,errors::InvalidArgument(\"NnDistance requires xyz1 be of shape (batch,#points,3)\"));\n\t\t\tOP_REQUIRES(context,xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"NnDistance only accepts 3d point set xyz1\"));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3,errors::InvalidArgument(\"NnDistance requires xyz2 be of shape (batch,#points,3)\"));\n\t\t\tOP_REQUIRES(context,xyz2_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"NnDistance only accepts 3d point set xyz2\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"NnDistance expects xyz1 and xyz2 have same batch size\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&xyz1_flat(0);\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&xyz2_flat(0);\n\t\t\tTensor * dist1_tensor=NULL;\n\t\t\tTensor * idx1_tensor=NULL;\n\t\t\tTensor * dist2_tensor=NULL;\n\t\t\tTensor * idx2_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b,n},&dist1_tensor));\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(1,TensorShape{b,n},&idx1_tensor));\n\t\t\tauto dist1_flat=dist1_tensor->flat<float>();\n\t\t\tauto idx1_flat=idx1_tensor->flat<int>();\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(2,TensorShape{b,m},&dist2_tensor));\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(3,TensorShape{b,m},&idx2_tensor));\n\t\t\tauto dist2_flat=dist2_tensor->flat<float>();\n\t\t\tauto idx2_flat=idx2_tensor->flat<int>();\n\t\t\tfloat * dist1=&(dist1_flat(0));\n\t\t\tint * idx1=&(idx1_flat(0));\n\t\t\tfloat * dist2=&(dist2_flat(0));\n\t\t\tint * idx2=&(idx2_flat(0));\n\t\t\tnnsearch(b,n,m,xyz1,xyz2,dist1,idx1);\n\t\t\tnnsearch(b,m,n,xyz2,xyz1,dist2,idx2);\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"NnDistance\").Device(DEVICE_CPU), NnDistanceOp);\nclass NnDistanceGradOp : public OpKernel{\n\tpublic:\n\t\texplicit NnDistanceGradOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tconst Tensor& grad_dist1_tensor=context->input(2);\n\t\t\tconst Tensor& idx1_tensor=context->input(3);\n\t\t\tconst Tensor& grad_dist2_tensor=context->input(4);\n\t\t\tconst Tensor& idx2_tensor=context->input(5);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3,errors::InvalidArgument(\"NnDistanceGrad requires xyz1 be of shape (batch,#points,3)\"));\n\t\t\tOP_REQUIRES(context,xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"NnDistanceGrad only accepts 3d point set xyz1\"));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3,errors::InvalidArgument(\"NnDistanceGrad requires xyz2 be of shape (batch,#points,3)\"));\n\t\t\tOP_REQUIRES(context,xyz2_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"NnDistanceGrad only accepts 3d point set xyz2\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"NnDistanceGrad expects xyz1 and xyz2 have same batch size\"));\n\t\t\tOP_REQUIRES(context,grad_dist1_tensor.shape()==(TensorShape{b,n}),errors::InvalidArgument(\"NnDistanceGrad requires grad_dist1 be of shape(batch,#points)\"));\n\t\t\tOP_REQUIRES(context,idx1_tensor.shape()==(TensorShape{b,n}),errors::InvalidArgument(\"NnDistanceGrad requires idx1 be of shape(batch,#points)\"));\n\t\t\tOP_REQUIRES(context,grad_dist2_tensor.shape()==(TensorShape{b,m}),errors::InvalidArgument(\"NnDistanceGrad requires grad_dist2 be of shape(batch,#points)\"));\n\t\t\tOP_REQUIRES(context,idx2_tensor.shape()==(TensorShape{b,m}),errors::InvalidArgument(\"NnDistanceGrad requires idx2 be of shape(batch,#points)\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&xyz1_flat(0);\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&xyz2_flat(0);\n\t\t\tauto idx1_flat=idx1_tensor.flat<int>();\n\t\t\tconst int * idx1=&idx1_flat(0);\n\t\t\tauto idx2_flat=idx2_tensor.flat<int>();\n\t\t\tconst int * idx2=&idx2_flat(0);\n\t\t\tauto grad_dist1_flat=grad_dist1_tensor.flat<float>();\n\t\t\tconst float * grad_dist1=&grad_dist1_flat(0);\n\t\t\tauto grad_dist2_flat=grad_dist2_tensor.flat<float>();\n\t\t\tconst float * grad_dist2=&grad_dist2_flat(0);\n\t\t\tTensor * grad_xyz1_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b,n,3},&grad_xyz1_tensor));\n\t\t\tTensor * grad_xyz2_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(1,TensorShape{b,m,3},&grad_xyz2_tensor));\n\t\t\tauto grad_xyz1_flat=grad_xyz1_tensor->flat<float>();\n\t\t\tfloat * grad_xyz1=&grad_xyz1_flat(0);\n\t\t\tauto grad_xyz2_flat=grad_xyz2_tensor->flat<float>();\n\t\t\tfloat * grad_xyz2=&grad_xyz2_flat(0);\n\t\t\tfor (int i=0;i<b*n*3;i++)\n\t\t\t\tgrad_xyz1[i]=0;\n\t\t\tfor (int i=0;i<b*m*3;i++)\n\t\t\t\tgrad_xyz2[i]=0;\n\t\t\tfor (int i=0;i<b;i++){\n\t\t\t\tfor (int j=0;j<n;j++){\n\t\t\t\t\tfloat x1=xyz1[(i*n+j)*3+0];\n\t\t\t\t\tfloat y1=xyz1[(i*n+j)*3+1];\n\t\t\t\t\tfloat z1=xyz1[(i*n+j)*3+2];\n\t\t\t\t\tint j2=idx1[i*n+j];\n\t\t\t\t\tfloat x2=xyz2[(i*m+j2)*3+0];\n\t\t\t\t\tfloat y2=xyz2[(i*m+j2)*3+1];\n\t\t\t\t\tfloat z2=xyz2[(i*m+j2)*3+2];\n\t\t\t\t\tfloat g=grad_dist1[i*n+j]*2;\n\t\t\t\t\tgrad_xyz1[(i*n+j)*3+0]+=g*(x1-x2);\n\t\t\t\t\tgrad_xyz1[(i*n+j)*3+1]+=g*(y1-y2);\n\t\t\t\t\tgrad_xyz1[(i*n+j)*3+2]+=g*(z1-z2);\n\t\t\t\t\tgrad_xyz2[(i*m+j2)*3+0]-=(g*(x1-x2));\n\t\t\t\t\tgrad_xyz2[(i*m+j2)*3+1]-=(g*(y1-y2));\n\t\t\t\t\tgrad_xyz2[(i*m+j2)*3+2]-=(g*(z1-z2));\n\t\t\t\t}\n\t\t\t\tfor (int j=0;j<m;j++){\n\t\t\t\t\tfloat x1=xyz2[(i*m+j)*3+0];\n\t\t\t\t\tfloat y1=xyz2[(i*m+j)*3+1];\n\t\t\t\t\tfloat z1=xyz2[(i*m+j)*3+2];\n\t\t\t\t\tint j2=idx2[i*m+j];\n\t\t\t\t\tfloat x2=xyz1[(i*n+j2)*3+0];\n\t\t\t\t\tfloat y2=xyz1[(i*n+j2)*3+1];\n\t\t\t\t\tfloat z2=xyz1[(i*n+j2)*3+2];\n\t\t\t\t\tfloat g=grad_dist2[i*m+j]*2;\n\t\t\t\t\tgrad_xyz2[(i*m+j)*3+0]+=g*(x1-x2);\n\t\t\t\t\tgrad_xyz2[(i*m+j)*3+1]+=g*(y1-y2);\n\t\t\t\t\tgrad_xyz2[(i*m+j)*3+2]+=g*(z1-z2);\n\t\t\t\t\tgrad_xyz1[(i*n+j2)*3+0]-=(g*(x1-x2));\n\t\t\t\t\tgrad_xyz1[(i*n+j2)*3+1]-=(g*(y1-y2));\n\t\t\t\t\tgrad_xyz1[(i*n+j2)*3+2]-=(g*(z1-z2));\n\t\t\t\t}\n\t\t\t}\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"NnDistanceGrad\").Device(DEVICE_CPU), NnDistanceGradOp);\n\nvoid NmDistanceKernelLauncher(int b,int n,const float * xyz,int m,const float * xyz2,float * result,int * result_i,float * result2,int * result2_i);\nclass NnDistanceGpuOp : public OpKernel{\n\tpublic:\n\t\texplicit NnDistanceGpuOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3,errors::InvalidArgument(\"NnDistance requires xyz1 be of shape (batch,#points,3)\"));\n\t\t\tOP_REQUIRES(context,xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"NnDistance only accepts 3d point set xyz1\"));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3,errors::InvalidArgument(\"NnDistance requires xyz2 be of shape (batch,#points,3)\"));\n\t\t\tOP_REQUIRES(context,xyz2_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"NnDistance only accepts 3d point set xyz2\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"NnDistance expects xyz1 and xyz2 have same batch size\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&xyz1_flat(0);\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&xyz2_flat(0);\n\t\t\tTensor * dist1_tensor=NULL;\n\t\t\tTensor * idx1_tensor=NULL;\n\t\t\tTensor * dist2_tensor=NULL;\n\t\t\tTensor * idx2_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b,n},&dist1_tensor));\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(1,TensorShape{b,n},&idx1_tensor));\n\t\t\tauto dist1_flat=dist1_tensor->flat<float>();\n\t\t\tauto idx1_flat=idx1_tensor->flat<int>();\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(2,TensorShape{b,m},&dist2_tensor));\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(3,TensorShape{b,m},&idx2_tensor));\n\t\t\tauto dist2_flat=dist2_tensor->flat<float>();\n\t\t\tauto idx2_flat=idx2_tensor->flat<int>();\n\t\t\tfloat * dist1=&(dist1_flat(0));\n\t\t\tint * idx1=&(idx1_flat(0));\n\t\t\tfloat * dist2=&(dist2_flat(0));\n\t\t\tint * idx2=&(idx2_flat(0));\n\t\t\tNmDistanceKernelLauncher(b,n,xyz1,m,xyz2,dist1,idx1,dist2,idx2);\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"NnDistance\").Device(DEVICE_GPU), NnDistanceGpuOp);\n\nvoid NmDistanceGradKernelLauncher(int b,int n,const float * xyz1,int m,const float * xyz2,const float * grad_dist1,const int * idx1,const float * grad_dist2,const int * idx2,float * grad_xyz1,float * grad_xyz2);\nclass NnDistanceGradGpuOp : public OpKernel{\n\tpublic:\n\t\texplicit NnDistanceGradGpuOp(OpKernelConstruction* context):OpKernel(context){}\n\t\tvoid Compute(OpKernelContext * context)override{\n\t\t\tconst Tensor& xyz1_tensor=context->input(0);\n\t\t\tconst Tensor& xyz2_tensor=context->input(1);\n\t\t\tconst Tensor& grad_dist1_tensor=context->input(2);\n\t\t\tconst Tensor& idx1_tensor=context->input(3);\n\t\t\tconst Tensor& grad_dist2_tensor=context->input(4);\n\t\t\tconst Tensor& idx2_tensor=context->input(5);\n\t\t\tOP_REQUIRES(context,xyz1_tensor.dims()==3,errors::InvalidArgument(\"NnDistanceGrad requires xyz1 be of shape (batch,#points,3)\"));\n\t\t\tOP_REQUIRES(context,xyz1_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"NnDistanceGrad only accepts 3d point set xyz1\"));\n\t\t\tint b=xyz1_tensor.shape().dim_size(0);\n\t\t\tint n=xyz1_tensor.shape().dim_size(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.dims()==3,errors::InvalidArgument(\"NnDistanceGrad requires xyz2 be of shape (batch,#points,3)\"));\n\t\t\tOP_REQUIRES(context,xyz2_tensor.shape().dim_size(2)==3,errors::InvalidArgument(\"NnDistanceGrad only accepts 3d point set xyz2\"));\n\t\t\tint m=xyz2_tensor.shape().dim_size(1);\n\t\t\tOP_REQUIRES(context,xyz2_tensor.shape().dim_size(0)==b,errors::InvalidArgument(\"NnDistanceGrad expects xyz1 and xyz2 have same batch size\"));\n\t\t\tOP_REQUIRES(context,grad_dist1_tensor.shape()==(TensorShape{b,n}),errors::InvalidArgument(\"NnDistanceGrad requires grad_dist1 be of shape(batch,#points)\"));\n\t\t\tOP_REQUIRES(context,idx1_tensor.shape()==(TensorShape{b,n}),errors::InvalidArgument(\"NnDistanceGrad requires idx1 be of shape(batch,#points)\"));\n\t\t\tOP_REQUIRES(context,grad_dist2_tensor.shape()==(TensorShape{b,m}),errors::InvalidArgument(\"NnDistanceGrad requires grad_dist2 be of shape(batch,#points)\"));\n\t\t\tOP_REQUIRES(context,idx2_tensor.shape()==(TensorShape{b,m}),errors::InvalidArgument(\"NnDistanceGrad requires idx2 be of shape(batch,#points)\"));\n\t\t\tauto xyz1_flat=xyz1_tensor.flat<float>();\n\t\t\tconst float * xyz1=&xyz1_flat(0);\n\t\t\tauto xyz2_flat=xyz2_tensor.flat<float>();\n\t\t\tconst float * xyz2=&xyz2_flat(0);\n\t\t\tauto idx1_flat=idx1_tensor.flat<int>();\n\t\t\tconst int * idx1=&idx1_flat(0);\n\t\t\tauto idx2_flat=idx2_tensor.flat<int>();\n\t\t\tconst int * idx2=&idx2_flat(0);\n\t\t\tauto grad_dist1_flat=grad_dist1_tensor.flat<float>();\n\t\t\tconst float * grad_dist1=&grad_dist1_flat(0);\n\t\t\tauto grad_dist2_flat=grad_dist2_tensor.flat<float>();\n\t\t\tconst float * grad_dist2=&grad_dist2_flat(0);\n\t\t\tTensor * grad_xyz1_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(0,TensorShape{b,n,3},&grad_xyz1_tensor));\n\t\t\tTensor * grad_xyz2_tensor=NULL;\n\t\t\tOP_REQUIRES_OK(context,context->allocate_output(1,TensorShape{b,m,3},&grad_xyz2_tensor));\n\t\t\tauto grad_xyz1_flat=grad_xyz1_tensor->flat<float>();\n\t\t\tfloat * grad_xyz1=&grad_xyz1_flat(0);\n\t\t\tauto grad_xyz2_flat=grad_xyz2_tensor->flat<float>();\n\t\t\tfloat * grad_xyz2=&grad_xyz2_flat(0);\n\t\t\tNmDistanceGradKernelLauncher(b,n,xyz1,m,xyz2,grad_dist1,idx1,grad_dist2,idx2,grad_xyz1,grad_xyz2);\n\t\t}\n};\nREGISTER_KERNEL_BUILDER(Name(\"NnDistanceGrad\").Device(DEVICE_GPU), NnDistanceGradGpuOp);\n"
  },
  {
    "path": "OcCo_TF/pc_distance/tf_nndistance.cu",
    "content": "#if GOOGLE_CUDA\n#define EIGEN_USE_GPU\n// #include \"third_party/eigen3/unsupported/Eigen/CXX11/Tensor\"\n\n__global__ void NmDistanceKernel(int b,int n,const float * xyz,int m,const float * xyz2,float * result,int * result_i){\n\tconst int batch=512;\n\t__shared__ float buf[batch*3];\n\tfor (int i=blockIdx.x;i<b;i+=gridDim.x){\n\t\tfor (int k2=0;k2<m;k2+=batch){\n\t\t\tint end_k=min(m,k2+batch)-k2;\n\t\t\tfor (int j=threadIdx.x;j<end_k*3;j+=blockDim.x){\n\t\t\t\tbuf[j]=xyz2[(i*m+k2)*3+j];\n\t\t\t}\n\t\t\t__syncthreads();\n\t\t\tfor (int j=threadIdx.x+blockIdx.y*blockDim.x;j<n;j+=blockDim.x*gridDim.y){\n\t\t\t\tfloat x1=xyz[(i*n+j)*3+0];\n\t\t\t\tfloat y1=xyz[(i*n+j)*3+1];\n\t\t\t\tfloat z1=xyz[(i*n+j)*3+2];\n\t\t\t\tint best_i=0;\n\t\t\t\tfloat best=0;\n\t\t\t\tint end_ka=end_k-(end_k&3);\n\t\t\t\tif (end_ka==batch){\n\t\t\t\t\tfor (int k=0;k<batch;k+=4){\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+0]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+1]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+2]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (k==0 || d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+3]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+4]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+5]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+1;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+6]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+7]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+8]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+2;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+9]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+10]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+11]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+3;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}else{\n\t\t\t\t\tfor (int k=0;k<end_ka;k+=4){\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+0]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+1]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+2]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (k==0 || d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+3]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+4]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+5]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+1;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+6]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+7]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+8]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+2;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+9]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+10]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+11]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+3;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tfor (int k=end_ka;k<end_k;k++){\n\t\t\t\t\tfloat x2=buf[k*3+0]-x1;\n\t\t\t\t\tfloat y2=buf[k*3+1]-y1;\n\t\t\t\t\tfloat z2=buf[k*3+2]-z1;\n\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\tif (k==0 || d<best){\n\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\tbest_i=k+k2;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (k2==0 || result[(i*n+j)]>best){\n\t\t\t\t\tresult[(i*n+j)]=best;\n\t\t\t\t\tresult_i[(i*n+j)]=best_i;\n\t\t\t\t}\n\t\t\t}\n\t\t\t__syncthreads();\n\t\t}\n\t}\n}\nvoid NmDistanceKernelLauncher(int b,int n,const float * xyz,int m,const float * xyz2,float * result,int * result_i,float * result2,int * result2_i){\n\tNmDistanceKernel<<<dim3(32,16,1),512>>>(b,n,xyz,m,xyz2,result,result_i);\n\tNmDistanceKernel<<<dim3(32,16,1),512>>>(b,m,xyz2,n,xyz,result2,result2_i);\n}\n__global__ void NmDistanceGradKernel(int b,int n,const float * xyz1,int m,const float * xyz2,const float * grad_dist1,const int * idx1,float * grad_xyz1,float * grad_xyz2){\n\tfor (int i=blockIdx.x;i<b;i+=gridDim.x){\n\t\tfor (int j=threadIdx.x+blockIdx.y*blockDim.x;j<n;j+=blockDim.x*gridDim.y){\n\t\t\tfloat x1=xyz1[(i*n+j)*3+0];\n\t\t\tfloat y1=xyz1[(i*n+j)*3+1];\n\t\t\tfloat z1=xyz1[(i*n+j)*3+2];\n\t\t\tint j2=idx1[i*n+j];\n\t\t\tfloat x2=xyz2[(i*m+j2)*3+0];\n\t\t\tfloat y2=xyz2[(i*m+j2)*3+1];\n\t\t\tfloat z2=xyz2[(i*m+j2)*3+2];\n\t\t\tfloat g=grad_dist1[i*n+j]*2;\n\t\t\tatomicAdd(&(grad_xyz1[(i*n+j)*3+0]),g*(x1-x2));\n\t\t\tatomicAdd(&(grad_xyz1[(i*n+j)*3+1]),g*(y1-y2));\n\t\t\tatomicAdd(&(grad_xyz1[(i*n+j)*3+2]),g*(z1-z2));\n\t\t\tatomicAdd(&(grad_xyz2[(i*m+j2)*3+0]),-(g*(x1-x2)));\n\t\t\tatomicAdd(&(grad_xyz2[(i*m+j2)*3+1]),-(g*(y1-y2)));\n\t\t\tatomicAdd(&(grad_xyz2[(i*m+j2)*3+2]),-(g*(z1-z2)));\n\t\t}\n\t}\n}\nvoid NmDistanceGradKernelLauncher(int b,int n,const float * xyz1,int m,const float * xyz2,const float * grad_dist1,const int * idx1,const float * grad_dist2,const int * idx2,float * grad_xyz1,float * grad_xyz2){\n\tcudaMemset(grad_xyz1,0,b*n*3*4);\n\tcudaMemset(grad_xyz2,0,b*m*3*4);\n\tNmDistanceGradKernel<<<dim3(1,16,1),256>>>(b,n,xyz1,m,xyz2,grad_dist1,idx1,grad_xyz1,grad_xyz2);\n\tNmDistanceGradKernel<<<dim3(1,16,1),256>>>(b,m,xyz2,n,xyz1,grad_dist2,idx2,grad_xyz2,grad_xyz1);\n}\n\n#endif\n"
  },
  {
    "path": "OcCo_TF/pc_distance/tf_nndistance.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\"\"\"Scripts for Chamfer Distance\"\"\"\nimport os, tensorflow as tf\nfrom tensorflow.python.framework import ops\nos.environ[\"LD_LIBRARY_PATH\"] = \"/usr/local/lib/python3.5/dist-packages/tensorflow/\"\nBASE_DIR = os.path.dirname(os.path.abspath(__file__))\nnn_distance_module = tf.load_op_library(os.path.join(BASE_DIR, 'tf_nndistance_so.so'))\n\n\ndef nn_distance(xyz1, xyz2):\n    \"\"\"\n    Computes the distance of nearest neighbors for a pair of point clouds\n    input: xyz1: (batch_size,#points_1,3)  the first point cloud\n    input: xyz2: (batch_size,#points_2,3)  the second point cloud\n    output: dist1: (batch_size,#point_1)   distance from first to second\n    output: idx1:  (batch_size,#point_1)   nearest neighbor from first to second\n    output: dist2: (batch_size,#point_2)   distance from second to first\n    output: idx2:  (batch_size,#point_2)   nearest neighbor from second to first\n    \"\"\"\n    return nn_distance_module.nn_distance(xyz1, xyz2)\n\n\n@ops.RegisterGradient('NnDistance')\ndef _nn_distance_grad(op, grad_dist1, grad_idx1, grad_dist2, grad_idx2):\n    xyz1 = op.inputs[0]\n    xyz2 = op.inputs[1]\n    idx1 = op.outputs[1]\n    idx2 = op.outputs[3]\n    return nn_distance_module.nn_distance_grad(xyz1, xyz2, grad_dist1, idx1, grad_dist2, idx2)\n\n\nif __name__ == '__main__':\n    import random, numpy as np\n    random.seed(100)\n    np.random.seed(100)\n\n    ''' === test code ==='''\n    with tf.Session('') as sess:\n        xyz1 = np.random.randn(32, 16384, 3).astype('float32')\n        xyz2 = np.random.randn(32, 1024, 3).astype('float32')\n        # with tf.device('/gpu:0'):\n        if True:\n            inp1 = tf.Variable(xyz1)\n            inp2 = tf.constant(xyz2)\n            reta, retb, retc, retd = nn_distance(inp1, inp2)\n            loss = tf.reduce_mean(reta) + tf.reduce_mean(retc)\n            train = tf.train.GradientDescentOptimizer(learning_rate=0.05).minimize(loss)\n        sess.run(tf.initialize_all_variables())\n\n        best = 1e100\n        for i in range(1):\n            # loss, _ = sess.run([loss, train])\n            loss, _ = sess.run([loss])\n            best = min(best, loss)\n            print(i, loss, best)\n"
  },
  {
    "path": "OcCo_TF/readme.md",
    "content": "## OcCo in TensorFlow\n\n\n\n"
  },
  {
    "path": "OcCo_TF/train_cls.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport os, sys, pdb, time, argparse, datetime, importlib, numpy as np, tensorflow as tf\nfrom termcolor import colored\nfrom utils.Dataset_Assign import Dataset_Assign\nfrom utils.EarlyStoppingCriterion import EarlyStoppingCriterion\nfrom utils.tf_util import add_train_summary, get_bn_decay, get_learning_rate\nfrom utils.io_util import shuffle_data, loadh5DataFile\nfrom utils.pc_util import rotate_point_cloud, jitter_point_cloud, random_point_dropout, \\\n    random_scale_point_cloud, random_shift_point_cloud\n\n# from utils.transfer_pretrained_w import load_pretrained_var\n\nparser = argparse.ArgumentParser()\n\n''' === Basic Learning Settings === '''\nparser.add_argument('--gpu', type=int, default=1)\nparser.add_argument('--log_dir', default='log/log_cls/pointnet_cls')\nparser.add_argument('--model', default='pointnet_cls')\nparser.add_argument('--epoch', type=int, default=200)\nparser.add_argument('--restore', action='store_true')\nparser.add_argument('--restore_path', default='log/pointnet_cls')\nparser.add_argument('--batch_size', type=int, default=32)\nparser.add_argument('--num_point', type=int, default=1024)\nparser.add_argument('--base_lr', type=float, default=0.001)\nparser.add_argument('--lr_clip', type=float, default=1e-5)\nparser.add_argument('--decay_steps', type=int, default=20)\nparser.add_argument('--decay_rate', type=float, default=0.7)\n# parser.add_argument('--verbose', type=bool, default=True)\nparser.add_argument('--dataset', type=str, default='modelnet40')\nparser.add_argument('--partial', action='store_true')\nparser.add_argument('--filename', type=str, default='')\nparser.add_argument('--data_bn', action='store_true')\n\n''' === Data Augmentation Settings === '''\nparser.add_argument('--data_aug', action='store_true')\nparser.add_argument('--just_save', action='store_true')  # pretrained encoder restoration\nparser.add_argument('--patience', type=int, default=200)  # early stopping, set it as 200 for deprecation\nparser.add_argument('--fewshot', action='store_true')\n\nargs = parser.parse_args()\n\nNUM_CLASSES, NUM_TRAINOBJECTS, TRAIN_FILES, VALID_FILES = Dataset_Assign(\n    dataset=args.dataset, fname=args.filename, partial=args.partial, bn=args.data_bn, few_shot=args.fewshot)\n\nBATCH_SIZE = args.batch_size\nNUM_POINT = args.num_point\nBASE_LR = args.base_lr\nLR_CLIP = args.lr_clip\nDECAY_RATE = args.decay_rate\n# DECAY_STEP = args.decay_steps\nDECAY_STEP = NUM_TRAINOBJECTS//BATCH_SIZE * args.decay_steps\nBN_INIT_DECAY = 0.5\nBN_DECAY_RATE = 0.5\nBN_DECAY_STEP = float(DECAY_STEP)\nBN_DECAY_CLIP = 0.99\nLOG_DIR = args.log_dir\nBEST_EVAL_ACC = 0\nos.system('mkdir -p %s' % LOG_DIR) if not os.path.exists(LOG_DIR) else None\nLOG_FOUT = open(os.path.join(LOG_DIR, 'log_train.txt'), 'a+')\n\ndef log_string(out_str):\n    LOG_FOUT.write(out_str + '\\n')\n    LOG_FOUT.flush()\n    print(out_str)\n\n\ndef train(args):\n\n    log_string('\\n\\n' + '=' * 44)\n    log_string('Start Training, Time: %s' % datetime.datetime.now())\n    log_string('=' * 44 + '\\n\\n')\n\n    is_training_pl = tf.placeholder(tf.bool, shape=(), name='is_training')\n    global_step = tf.Variable(0, trainable=False, name='global_step')  # will be used in defining train_op\n    inputs_pl = tf.placeholder(tf.float32, (1, None, 3), 'inputs')\n    labels_pl = tf.placeholder(tf.int32, (BATCH_SIZE,), 'labels')\n    npts_pl = tf.placeholder(tf.int32, (BATCH_SIZE,), 'num_points')\n\n    bn_decay = get_bn_decay(global_step, BN_INIT_DECAY, BATCH_SIZE, BN_DECAY_STEP, BN_DECAY_RATE, BN_DECAY_CLIP)\n\n    # model_module = importlib.import_module('.%s' % args.model, 'cls_models')\n    # MODEL = model_module.Model(inputs_pl, npts_pl, labels_pl, is_training_pl, bn_decay=bn_decay)\n    ''' === To fix issues when running on woma === '''\n    ldic = locals()\n    exec('from cls_models.%s import Model' % args.model, globals(), ldic)\n    MODEL = ldic['Model'](inputs_pl, npts_pl, labels_pl, is_training_pl, bn_decay=bn_decay)\n    pred, loss = MODEL.pred, MODEL.loss\n    tf.summary.scalar('loss', loss)\n\n    # useful information in displaying during training\n    correct = tf.equal(tf.argmax(pred, 1), tf.to_int64(labels_pl))\n    accuracy = tf.reduce_sum(tf.cast(correct, tf.float32)) / float(BATCH_SIZE)\n    tf.summary.scalar('accuracy', accuracy)\n\n    learning_rate = get_learning_rate(global_step, BASE_LR, BATCH_SIZE, DECAY_STEP, DECAY_RATE, LR_CLIP)\n    add_train_summary('learning_rate', learning_rate)\n    trainer = tf.train.AdamOptimizer(learning_rate)\n    train_op = trainer.minimize(MODEL.loss, global_step)\n    saver = tf.train.Saver()\n\n    config = tf.ConfigProto()\n    config.gpu_options.allow_growth = True\n    config.allow_soft_placement = True\n    # config.log_device_placement = True\n    sess = tf.Session(config=config)\n\n    merged = tf.summary.merge_all()\n    train_writer = tf.summary.FileWriter(os.path.join(LOG_DIR, 'train'), sess.graph)\n    val_writer = tf.summary.FileWriter(os.path.join(LOG_DIR, 'val'))\n\n    # Init variables\n    init = tf.global_variables_initializer()\n    log_string('\\nModel Parameters has been Initialized\\n')\n    sess.run(init, {is_training_pl: True})  # restore will cover the random initialized parameters\n\n    # to save the randomized variables\n    if not args.restore and args.just_save:\n        save_path = saver.save(sess, os.path.join(LOG_DIR, \"model.ckpt\"))\n        print(colored('random initialised model saved at %s' % save_path, 'white', 'on_blue'))\n        print(colored('just save the model, now exit', 'white', 'on_red'))\n        sys.exit()\n\n    '''current solution: first load pretrained head, assemble with output layers then save as a checkpoint'''\n    # to partially load the saved head from:\n    # if args.load_pretrained_head:\n    #   sess.close()\n    #   load_pretrained_head(args.pretrained_head_path, os.path.join(LOG_DIR, 'model.ckpt'), None, args.verbose)\n    #   print('shared varibles have been restored from ', args.pretrained_head_path)\n    #\n    #   sess = tf.Session(config=config)\n    #   log_string('\\nModel Parameters has been Initialized\\n')\n    #   sess.run(init, {is_training_pl: True})\n    #   saver.restore(sess, tf.train.latest_checkpoint(LOG_DIR))\n    #   log_string('\\nModel Parameters have been restored with pretrained weights from %s' % args.pretrained_head_path)\n\n    if args.restore:\n        # load_pretrained_var(args.restore_path, os.path.join(LOG_DIR, \"model.ckpt\"), args.verbose)\n        saver.restore(sess, tf.train.latest_checkpoint(args.restore_path))\n        log_string('\\n')\n        log_string(colored('Model Parameters have been restored from %s' % args.restore_path, 'white', 'on_red'))\n\n    for arg in sorted(vars(args)):\n        print(arg + ': ' + str(getattr(args, arg)) + '\\n')  # log of arguments\n    os.system('cp cls_models/%s.py %s' % (args.model, LOG_DIR))  # bkp of model def\n    os.system('cp train_cls.py %s' % LOG_DIR)  # bkp of train procedure\n\n    train_start = time.time()\n\n    ops = {'pointclouds_pl': inputs_pl,\n           'labels_pl': labels_pl,\n           'is_training_pl': is_training_pl,\n           'npts_pl': npts_pl,\n           'pred': pred,\n           'loss': loss,\n           'train_op': train_op,\n           'merged': merged,\n           'step': global_step}\n\n    ESC = EarlyStoppingCriterion(patience=args.patience)\n\n    for epoch in range(args.epoch):\n        log_string('\\n\\n')\n        log_string(colored('**** EPOCH %03d ****' % epoch, 'grey', 'on_green'))\n        sys.stdout.flush()\n\n        '''=== training the model ==='''\n        train_one_epoch(sess, ops, train_writer)\n\n        '''=== evaluating the model ==='''\n        eval_mean_loss, eval_acc, eval_cls_acc = eval_one_epoch(sess, ops, val_writer)\n\n        '''=== check whether to early stop ==='''\n        early_stop, save_checkpoint = ESC.step(eval_acc, epoch=epoch)\n        if save_checkpoint:\n            save_path = saver.save(sess, os.path.join(LOG_DIR, \"model.ckpt\"))\n            log_string(colored('model saved at %s' % save_path, 'white', 'on_blue'))\n        if early_stop:\n            break\n\n    log_string('total time: %s' % datetime.timedelta(seconds=time.time() - train_start))\n    log_string('stop epoch: %d, best eval acc: %f' % (ESC.best_epoch, ESC.best_dev_score))\n    sess.close()\n\n\ndef train_one_epoch(sess, ops, train_writer):\n    is_training = True\n\n    total_correct, total_seen, loss_sum = 0, 0, 0\n    train_file_idxs = np.arange(0, len(TRAIN_FILES))\n    np.random.shuffle(train_file_idxs)\n\n    for fn in range(len(TRAIN_FILES)):\n        current_data, current_label = loadh5DataFile(TRAIN_FILES[train_file_idxs[fn]])\n        current_data = current_data[:, :NUM_POINT, :]\n        current_data, current_label, _ = shuffle_data(current_data, np.squeeze(current_label))\n        current_label = np.squeeze(current_label)\n\n        file_size = current_data.shape[0]\n        num_batches = file_size // BATCH_SIZE\n\n        for batch_idx in range(num_batches):\n            start_idx = batch_idx * BATCH_SIZE\n            end_idx = (batch_idx + 1) * BATCH_SIZE\n            feed_data = current_data[start_idx:end_idx, :, :]\n\n            if args.data_aug:\n                feed_data = random_point_dropout(feed_data)\n                feed_data[:, :, 0:3] = random_scale_point_cloud(feed_data[:, :, 0:3])\n                feed_data[:, :, 0:3] = random_shift_point_cloud(feed_data[:, :, 0:3])\n\n            feed_dict = {\n                ops['pointclouds_pl']: feed_data.reshape([1, BATCH_SIZE * NUM_POINT, 3]),\n                ops['labels_pl']: current_label[start_idx:end_idx].reshape(BATCH_SIZE, ),\n                ops['npts_pl']: [NUM_POINT] * BATCH_SIZE,\n                ops['is_training_pl']: is_training}\n\n            summary, step, _, loss_val, pred_val = sess.run([\n                ops['merged'], ops['step'], ops['train_op'], ops['loss'], ops['pred']], feed_dict=feed_dict)\n            train_writer.add_summary(summary, step)\n\n            pred_val = np.argmax(pred_val, 1)\n            correct = np.sum(pred_val == current_label[start_idx:end_idx].reshape(BATCH_SIZE, ))\n            total_correct += correct\n            total_seen += BATCH_SIZE\n            loss_sum += loss_val\n\n    log_string('\\n=== training ===')\n    log_string('total correct: %d, total_seen: %d' % (total_correct, total_seen))\n    # log_string('mean batch loss: %f' % (loss_sum / num_batches))\n    log_string('accuracy: %f' % (total_correct / float(total_seen)))\n\n\ndef eval_one_epoch(sess, ops, val_writer):\n    is_training = False\n\n    total_correct, total_seen, loss_sum = 0, 0, 0\n    total_seen_class = [0 for _ in range(NUM_CLASSES)]\n    total_correct_class = [0 for _ in range(NUM_CLASSES)]\n\n    for fn in VALID_FILES:\n        current_data, current_label = loadh5DataFile(fn)\n        current_data = current_data[:, :NUM_POINT, :]\n        file_size = current_data.shape[0]\n        num_batches = file_size // BATCH_SIZE\n\n        for batch_idx in range(num_batches):\n            start_idx, end_idx = batch_idx * BATCH_SIZE, (batch_idx + 1) * BATCH_SIZE\n\n            feed_dict = {\n                ops['pointclouds_pl']: current_data[start_idx:end_idx, :, :].reshape([1, BATCH_SIZE * NUM_POINT, 3]),\n                ops['labels_pl']: current_label[start_idx:end_idx].reshape(BATCH_SIZE, ),\n                ops['npts_pl']: np.array([NUM_POINT] * BATCH_SIZE),\n                ops['is_training_pl']: is_training}\n\n            summary, step, loss_val, pred_val = sess.run(\n                [ops['merged'], ops['step'], ops['loss'], ops['pred']], feed_dict=feed_dict)\n            val_writer.add_summary(summary, step)\n            pred_val = np.argmax(pred_val, 1)\n            correct = np.sum(pred_val == current_label[start_idx:end_idx].reshape(BATCH_SIZE, ))\n            total_correct += correct\n            total_seen += BATCH_SIZE\n            loss_sum += (loss_val * BATCH_SIZE)\n\n            for i in range(start_idx, end_idx):\n                l = int(current_label.reshape(-1)[i])\n                total_seen_class[l] += 1\n                total_correct_class[l] += (pred_val[i - start_idx] == l)\n\n    eval_mean_loss = loss_sum / float(total_seen)\n    eval_acc = total_correct / float(total_seen)\n    eval_cls_acc = np.mean(np.array(total_correct_class) / np.array(total_seen_class, dtype=np.float))\n    log_string('\\n=== evaluating ===')\n    log_string('total correct: %d, total_seen: %d' % (total_correct, total_seen))\n    log_string('eval mean loss: %f' % eval_mean_loss)\n    log_string('eval accuracy: %f' % eval_acc)\n    log_string('eval avg class acc: %f' % eval_cls_acc)\n\n    global BEST_EVAL_ACC\n    if eval_acc > BEST_EVAL_ACC:\n        BEST_EVAL_ACC = eval_acc\n    log_string('best eval accuracy: %f' % BEST_EVAL_ACC)\n    return eval_mean_loss, eval_acc, eval_cls_acc\n\n\nif __name__ == '__main__':\n    print('Now Using GPU:%d to train the model' % args.gpu)\n    os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'\n    os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu)\n\n    train(args)\n    LOG_FOUT.close()\n"
  },
  {
    "path": "OcCo_TF/train_cls_dgcnn_torchloader.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n#  Ref: https://github.com/hansen7/NRS_3D/blob/master/train_dgcnn_cls.py\nimport os, sys, pdb, shutil, argparse, numpy as np, tensorflow as tf\nfrom tqdm import tqdm\nfrom termcolor import colored\nfrom utils.Train_Logger import TrainLogger\nfrom utils.Dataset_Assign import Dataset_Assign\n# from utils.tf_util import get_bn_decay, get_lr_dgcnn\n# from utils.io_util import shuffle_data, loadh5DataFile\n# from utils.transfer_pretrained_w import load_pretrained_var\nfrom utils.pc_util import random_point_dropout, random_scale_point_cloud, random_shift_point_cloud\nfrom utils.ModelNetDataLoader import General_CLSDataLoader_HDF5\nfrom torch.utils.data import DataLoader\n\ndef parse_args():\n\tparser = argparse.ArgumentParser(description='DGCNN Point Cloud Recognition Training Configuration')\n\n\tparser.add_argument('--gpu', type=str, default='0')\n\tparser.add_argument('--log_dir', default='occo_dgcnn_cls')\n\tparser.add_argument('--model', default='dgcnn_cls')\n\tparser.add_argument('--epoch', type=int, default=250)\n\tparser.add_argument('--restore', action='store_true')\n\tparser.add_argument('--restore_path', type=str, default='')\n\tparser.add_argument('--batch_size', type=int, default=24)\n\tparser.add_argument('--num_points', type=int, default=1024)\n\tparser.add_argument('--base_lr', type=float, default=0.001)\n\t# parser.add_argument('--decay_steps', type=int, default=20)\n\t# parser.add_argument('--decay_rate', type=float, default=0.7)\n\tparser.add_argument('--momentum', type=float, default=0.9)\n\n\tparser.add_argument('--dataset', type=str, default='modelnet40')\n\tparser.add_argument('--filename', type=str, default='')\n\tparser.add_argument('--data_bn', action='store_true')\n\tparser.add_argument('--partial', action='store_true')\n\tparser.add_argument('--data_aug', action='store_true')\n\tparser.add_argument('--just_save', action='store_true')  # use only in the pretrained encoder restoration\n\tparser.add_argument('--fewshot', action='store_true')\n\n\treturn parser.parse_args()\n\n\nargs = parse_args()\n\nDATA_PATH = 'data/modelnet40_normal_resampled/'\nNUM_CLASSES, NUM_TRAINOBJECTS, TRAIN_FILES, VALID_FILES = Dataset_Assign(\n\tdataset=args.dataset, fname=args.filename, partial=args.partial, bn=args.data_bn, few_shot=args.fewshot)\nBATCH_SIZE, NUM_POINT = args.batch_size, args.num_points\n# DECAY_STEP = NUM_TRAINOBJECTS//BATCH_SIZE * args.decay_steps\n\nTRAIN_DATASET = General_CLSDataLoader_HDF5(file_list=TRAIN_FILES, num_point=1024)\nTEST_DATASET = General_CLSDataLoader_HDF5(file_list=VALID_FILES, num_point=1024)\ntrainDataLoader = DataLoader(TRAIN_DATASET, batch_size=BATCH_SIZE, shuffle=True, num_workers=4, drop_last=True)\ntestDataLoader = DataLoader(TEST_DATASET, batch_size=BATCH_SIZE, shuffle=False, num_workers=4, drop_last=True)\n# reduce the num_workers if the loaded data are huge, ref: https://github.com/pytorch/pytorch/issues/973\n\ndef main(args):\n\tMyLogger = TrainLogger(args, name=args.model.upper(), subfold='log_cls')\n\tshutil.copy(os.path.join('cls_models', '%s.py' % args.model), MyLogger.log_dir)\n\tshutil.copy(os.path.abspath(__file__), MyLogger.log_dir)\n\t\n\t# is_training_pl -> to decide whether to apply batch normalisation\n\tis_training_pl = tf.placeholder(tf.bool, shape=(), name='is_training')\n\tglobal_step = tf.Variable(0, trainable=False, name='global_step')\n\n\tinputs_pl = tf.placeholder(tf.float32, (1, None, 3), 'inputs')\n\tlabels_pl = tf.placeholder(tf.int32, (BATCH_SIZE,), 'labels')\n\tnpts_pl = tf.placeholder(tf.int32, (BATCH_SIZE,), 'num_points')\n\n\t# bn_decay = get_bn_decay(batch=global_step, bn_init_decay=0.5, batch_size=args.batch_size,\n\t# \t\t\t\t\t\tbn_decay_step=DECAY_STEP, bn_decay_rate=0.5, bn_decay_clip=0.99)\n\n\tbn_decay = 0.9\n\t# See \"BatchNorm1d\" in https://pytorch.org/docs/stable/nn.html\n\t''' === fix issues of importlib when running on some servers (i.e., woma) === '''\n\t# model_module = importlib.import_module('.%s' % args.model_type, 'cls_models')\n\t# MODEL = model_module.Model(inputs_pl, npts_pl, labels_pl, is_training_pl, bn_decay=bn_decay)\n\tldic = locals()\n\texec('from cls_models.%s import Model' % args.model, globals(), ldic)\n\tMODEL = ldic['Model'](inputs_pl, npts_pl, labels_pl, is_training_pl, bn_decay=bn_decay)\n\tpred, loss = MODEL.pred, MODEL.loss\n\ttf.summary.scalar('loss', loss)\n\n\tcorrect = tf.equal(tf.argmax(pred, 1), tf.to_int64(labels_pl))\n\taccuracy = tf.reduce_sum(tf.cast(correct, tf.float32)) / float(args.batch_size)\n\ttf.summary.scalar('accuracy', accuracy)\n\n\t''' === Learning Rate === '''\n\tdef get_lr_dgcnn(args, global_step, alpha):\n\t\tlearning_rate = tf.train.cosine_decay(\n\t\t\tlearning_rate=100 * args.base_lr,  # Base Learning Rate, 0.1\n\t\t\tglobal_step=global_step,  # Training Step Index\n\t\t\tdecay_steps=NUM_TRAINOBJECTS//BATCH_SIZE * args.epoch,  # Total Training Step\n\t\t\talpha=alpha  # Fraction of the Minimum Value of the Set lr\n\t\t)\n\t\t# learning_rate = tf.maximum(learning_rate, args.base_lr)\n\t\treturn learning_rate\n\n\tlearning_rate = get_lr_dgcnn(args, global_step, alpha=0.01)\n\ttf.summary.scalar('learning rate', learning_rate)\n\t# scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(opt, args.epoch, eta_min=args.lr)\n\t# doc: https://pytorch.org/docs/stable/optim.html\n\t# doc: https://www.tensorflow.org/api_docs/python/tf/compat/v1/train/cosine_decay\n\n\t''' === Optimiser === '''\n\t# trainer = tf.train.GradientDescentOptimizer(learning_rate)\n\ttrainer = tf.train.MomentumOptimizer(learning_rate, momentum=args.momentum)\n\t# equivalent to torch.optim.SGD\n\t# doc: https://www.tensorflow.org/api_docs/python/tf/compat/v1/train/MomentumOptimizer\n\t# another alternative is to use keras\n\t# trainer = tf.keras.optimizers.SGD(learning_rate, momentum=args.momentum)\n\t# doc: https://www.tensorflow.org/versions/r1.15/api_docs/python/tf/keras/optimizers/SGD\n\t# opt = torch.optim.SGD(model.parameters(), lr=args.lr * 100, momentum=args.momentum, weight_decay=1e-4)\n\n\ttrain_op = trainer.minimize(loss=MODEL.loss, global_step=global_step)\n\tsaver = tf.train.Saver()\n\n\t# ref: https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/protobuf/config.proto\n\tconfig = tf.ConfigProto()\n\t# config.gpu_options.allow_growth = True\n\t# config.allow_soft_placement = True  # Uncomment it if GPU option is not available\n\t# config.log_device_placement = True  # Uncomment it if you want device placements to be logged\n\tsess = tf.Session(config=config)\n\n\tmerged = tf.summary.merge_all()\n\ttrain_writer = tf.summary.FileWriter(os.path.join(MyLogger.experiment_dir, 'runs', 'train'), sess.graph)\n\tval_writer = tf.summary.FileWriter(os.path.join(MyLogger.experiment_dir, 'runs', 'valid'), sess.graph)\n\n\t# Initialise all the variables of the models\n\tinit = tf.global_variables_initializer()\n\n\tsess.run(init, {is_training_pl: True})\n\n\t# to save the randomized initialised models then exit\n\tif args.just_save:\n\t\tsave_path = saver.save(sess, os.path.join(MyLogger.checkpoints_dir, \"model.ckpt\"))\n\t\tprint(colored('random initialised model saved at %s' % save_path, 'white', 'on_blue'))\n\t\tprint(colored('just save the model, now exit', 'white', 'on_red'))\n\t\tsys.exit()\n\n\t'''current solution: first load pretrained encoder, \n\tassemble with randomly initialised FC layers then save to the checkpoint'''\n\n\tif args.restore:\n\t\tsaver.restore(sess, tf.train.latest_checkpoint(args.restore_path))\n\t\tMyLogger.logger.info('Model Parameters has been Restored')\n\n\tops = {'pointclouds_pl': inputs_pl,\n\t\t   'labels_pl': labels_pl,\n\t\t   'is_training_pl': is_training_pl,\n\t\t   'npts_pl': npts_pl,\n\t\t   'pred': pred,\n\t\t   'loss': loss,\n\t\t   'train_op': train_op,\n\t\t   'merged': merged,\n\t\t   'step': global_step}\n\n\tfor epoch in range(args.epoch):\n\n\t\t'''=== training the model ==='''\n\t\ttrain_one_epoch(sess, ops, MyLogger, train_writer)\n\n\t\t'''=== evaluating the model ==='''\n\t\tsave_checkpoint = eval_one_epoch(sess, ops, MyLogger, val_writer)\n\n\t\t'''=== check whether to store the checkpoints ==='''\n\t\tif save_checkpoint:\n\t\t\tsave_path = saver.save(sess, os.path.join(MyLogger.savepath, \"model.ckpt\"))\n\t\t\tMyLogger.logger.info('model saved at %s' % MyLogger.savepath)\n\n\tsess.close()\n\tMyLogger.train_summary()\n\n\ndef train_one_epoch(sess, ops, MyLogger, train_writer):\n\tis_training = True\n\tMyLogger.epoch_init(training=is_training)\n\n\tfor points, target in tqdm(trainDataLoader, total=len(trainDataLoader), smoothing=0.9):\n\t\t# pdb.set_trace()\n\t\tpoints, target = points.numpy(), target.numpy()\n\n\t\tif args.data_aug:\n\t\t\tpoints = random_point_dropout(points)\n\t\t\tpoints[:, :, 0:3] = random_scale_point_cloud(points[:, :, 0:3])\n\t\t\tpoints[:, :, 0:3] = random_shift_point_cloud(points[:, :, 0:3])\n\n\t\tfeed_dict = {\n\t\t\tops['pointclouds_pl']: points.reshape([1, BATCH_SIZE * NUM_POINT, 3]),\n\t\t\tops['labels_pl']: target.reshape(BATCH_SIZE, ),\n\t\t\tops['npts_pl']: [NUM_POINT] * BATCH_SIZE,\n\t\t\tops['is_training_pl']: is_training}\n\n\t\tsummary, step, _, loss, pred = sess.run([\n\t\t\tops['merged'], ops['step'], ops['train_op'], ops['loss'], ops['pred']], feed_dict=feed_dict)\n\t\ttrain_writer.add_summary(summary, step)\n\n\t\t# pdb.set_trace()\n\t\tMyLogger.step_update(np.argmax(pred, 1), target.reshape(BATCH_SIZE, ), loss)\n\n\tMyLogger.epoch_summary(writer=None, training=is_training)\n\n\treturn None\n\n\ndef eval_one_epoch(sess, ops, MyLogger, val_writer):\n\tis_training = False\n\tMyLogger.epoch_init(training=is_training)\n\n\tfor points, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n\t\t# pdb.set_trace()\n\t\tpoints, target = points.numpy(), target.numpy()\t\t\n\n\t\tfeed_dict = {\n\t\t\t\tops['pointclouds_pl']: points.reshape([1, BATCH_SIZE * NUM_POINT, 3]),\n\t\t\t\tops['labels_pl']: target.reshape(BATCH_SIZE, ),\n\t\t\t\tops['npts_pl']: np.array([NUM_POINT] * BATCH_SIZE),\n\t\t\t\tops['is_training_pl']: is_training}\n\n\t\tsummary, step, loss_val, pred_val = sess.run(\n\t\t\t[ops['merged'], ops['step'], ops['loss'], ops['pred']], feed_dict=feed_dict)\n\t\tval_writer.add_summary(summary, step)\n\t\t# pdb.set_trace()\n\t\tMyLogger.step_update(np.argmax(pred_val, 1), target.reshape(BATCH_SIZE, ), loss_val)\n\n\tMyLogger.epoch_summary(writer=None, training=is_training)\n\n\treturn MyLogger.save_model\n\n\nif __name__ == '__main__':\n\n\tprint('Now Using GPU:%s to train the model' % args.gpu)\n\tos.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'\n\tos.environ['CUDA_VISIBLE_DEVICES'] = args.gpu\n\n\tmain(args)\n"
  },
  {
    "path": "OcCo_TF/train_cls_torchloader.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport os, sys, pdb, time, argparse, datetime, importlib, numpy as np, tensorflow as tf\nfrom tqdm import tqdm\nfrom termcolor import colored\nfrom utils.Dataset_Assign import Dataset_Assign\nfrom utils.io_util import shuffle_data, loadh5DataFile\nfrom utils.EarlyStoppingCriterion import EarlyStoppingCriterion\nfrom utils.tf_util import add_train_summary, get_bn_decay, get_learning_rate\nfrom utils.pc_util import rotate_point_cloud, jitter_point_cloud, random_point_dropout, \\\n\trandom_scale_point_cloud, random_shift_point_cloud\n\n# from utils.transfer_pretrained_w import load_pretrained_var\nfrom utils.ModelNetDataLoader import General_CLSDataLoader_HDF5\nfrom torch.utils.data import DataLoader\n\nparser = argparse.ArgumentParser()\n\n''' === Basic Learning Settings === '''\nparser.add_argument('--gpu', type=int, default=1)\nparser.add_argument('--log_dir', default='log/log_cls/pointnet_cls')\nparser.add_argument('--model', default='pointnet_cls')\nparser.add_argument('--epoch', type=int, default=200)\nparser.add_argument('--restore', action='store_true')\nparser.add_argument('--restore_path', default='log/pointnet_cls')\nparser.add_argument('--batch_size', type=int, default=32)\nparser.add_argument('--num_point', type=int, default=1024)\nparser.add_argument('--base_lr', type=float, default=0.001)\nparser.add_argument('--lr_clip', type=float, default=1e-5)\nparser.add_argument('--decay_steps', type=int, default=20)\nparser.add_argument('--decay_rate', type=float, default=0.7)\n# parser.add_argument('--verbose', type=bool, default=True)\nparser.add_argument('--dataset', type=str, default='modelnet40')\nparser.add_argument('--partial', action='store_true')\nparser.add_argument('--filename', type=str, default='')\nparser.add_argument('--data_bn', action='store_true')\n\n''' === Data Augmentation Settings === '''\nparser.add_argument('--data_aug', action='store_true')\nparser.add_argument('--just_save', action='store_true')  # pretrained encoder restoration\nparser.add_argument('--patience', type=int, default=200)  # early stopping, set it as 200 for deprecation\nparser.add_argument('--fewshot', action='store_true')\n\nargs = parser.parse_args()\n\nDATA_PATH = 'data/modelnet40_normal_resampled/'\nNUM_CLASSES, NUM_TRAINOBJECTS, TRAIN_FILES, VALID_FILES = Dataset_Assign(\n\tdataset=args.dataset, fname=args.filename, partial=args.partial, bn=args.data_bn, few_shot=args.fewshot)\nTRAIN_DATASET = General_CLSDataLoader_HDF5(root=DATA_PATH, file_list=TRAIN_FILES, num_point=1024)\nTEST_DATASET = General_CLSDataLoader_HDF5(root=DATA_PATH, file_list=VALID_FILES, num_point=1024)\ntrainDataLoader = DataLoader(TRAIN_DATASET, batch_size=args.batch_size, shuffle=True, num_workers=4, drop_last=True)\ntestDataLoader = DataLoader(TEST_DATASET, batch_size=args.batch_size, shuffle=False, num_workers=4, drop_last=True)\n\nBATCH_SIZE = args.batch_size\nNUM_POINT = args.num_point\nBASE_LR = args.base_lr\nLR_CLIP = args.lr_clip\nDECAY_RATE = args.decay_rate\n# DECAY_STEP = args.decay_steps\nDECAY_STEP = NUM_TRAINOBJECTS//BATCH_SIZE * args.decay_steps\nBN_INIT_DECAY = 0.5\nBN_DECAY_RATE = 0.5\nBN_DECAY_STEP = float(DECAY_STEP)\nBN_DECAY_CLIP = 0.99\nLOG_DIR = args.log_dir\nBEST_EVAL_ACC = 0\nos.system('mkdir -p %s' % LOG_DIR) if not os.path.exists(LOG_DIR) else None\nLOG_FOUT = open(os.path.join(LOG_DIR, 'log_train.txt'), 'a+')\n\ndef log_string(out_str):\n\tLOG_FOUT.write(out_str + '\\n')\n\tLOG_FOUT.flush()\n\tprint(out_str)\n\n\ndef train(args):\n\n\tlog_string('\\n\\n' + '=' * 50)\n\tlog_string('Start Training, Time: %s' % datetime.datetime.now())\n\tlog_string('=' * 50 + '\\n\\n')\n\n\tis_training_pl = tf.placeholder(tf.bool, shape=(), name='is_training')\n\tglobal_step = tf.Variable(0, trainable=False, name='global_step')  # will be used in defining train_op\n\tinputs_pl = tf.placeholder(tf.float32, (1, None, 3), 'inputs')\n\tlabels_pl = tf.placeholder(tf.int32, (BATCH_SIZE,), 'labels')\n\tnpts_pl = tf.placeholder(tf.int32, (BATCH_SIZE,), 'num_points')\n\n\tbn_decay = get_bn_decay(global_step, BN_INIT_DECAY, BATCH_SIZE, BN_DECAY_STEP, BN_DECAY_RATE, BN_DECAY_CLIP)\n\n\t# model_module = importlib.import_module('.%s' % args.model, 'cls_models')\n\t# MODEL = model_module.Model(inputs_pl, npts_pl, labels_pl, is_training_pl, bn_decay=bn_decay)\n\t''' === To fix issues when running on woma === '''\n\tldic = locals()\n\texec('from cls_models.%s import Model' % args.model, globals(), ldic)\n\tMODEL = ldic['Model'](inputs_pl, npts_pl, labels_pl, is_training_pl, bn_decay=bn_decay)\n\tpred, loss = MODEL.pred, MODEL.loss\n\ttf.summary.scalar('loss', loss)\n\t# pdb.set_trace()\n\n\t# useful information in displaying during training\n\tcorrect = tf.equal(tf.argmax(pred, 1), tf.to_int64(labels_pl))\n\taccuracy = tf.reduce_sum(tf.cast(correct, tf.float32)) / float(BATCH_SIZE)\n\ttf.summary.scalar('accuracy', accuracy)\n\n\tlearning_rate = get_learning_rate(global_step, BASE_LR, BATCH_SIZE, DECAY_STEP, DECAY_RATE, LR_CLIP)\n\tadd_train_summary('learning_rate', learning_rate)\n\ttrainer = tf.train.AdamOptimizer(learning_rate)\n\ttrain_op = trainer.minimize(MODEL.loss, global_step)\n\tsaver = tf.train.Saver()\n\n\tconfig = tf.ConfigProto()\n\tconfig.gpu_options.allow_growth = True\n\tconfig.allow_soft_placement = True\n\t# config.log_device_placement = True\n\tsess = tf.Session(config=config)\n\n\tmerged = tf.summary.merge_all()\n\ttrain_writer = tf.summary.FileWriter(os.path.join(LOG_DIR, 'train'), sess.graph)\n\tval_writer = tf.summary.FileWriter(os.path.join(LOG_DIR, 'val'))\n\n\t# Init variables\n\tinit = tf.global_variables_initializer()\n\tlog_string('\\nModel Parameters has been Initialized\\n')\n\tsess.run(init, {is_training_pl: True})  # restore will cover the random initialized parameters\n\n\t# to save the randomized variables\n\tif not args.restore and args.just_save:\n\t\tsave_path = saver.save(sess, os.path.join(LOG_DIR, \"model.ckpt\"))\n\t\tprint(colored('random initialised model saved at %s' % save_path, 'white', 'on_blue'))\n\t\tprint(colored('just save the model, now exit', 'white', 'on_red'))\n\t\tsys.exit()\n\n\t'''current solution: first load pretrained head, assemble with output layers then save as a checkpoint'''\n\t# to partially load the saved head from:\n\t# if args.load_pretrained_head:\n\t#   sess.close()\n\t#   load_pretrained_head(args.pretrained_head_path, os.path.join(LOG_DIR, 'model.ckpt'), None, args.verbose)\n\t#   print('shared varibles have been restored from ', args.pretrained_head_path)\n\t#\n\t#   sess = tf.Session(config=config)\n\t#   log_string('\\nModel Parameters has been Initialized\\n')\n\t#   sess.run(init, {is_training_pl: True})\n\t#   saver.restore(sess, tf.train.latest_checkpoint(LOG_DIR))\n\t#   log_string('\\nModel Parameters have been restored with pretrained weights from %s' % args.pretrained_head_path)\n\n\tif args.restore:\n\t\t# load_pretrained_var(args.restore_path, os.path.join(LOG_DIR, \"model.ckpt\"), args.verbose)\n\t\tsaver.restore(sess, tf.train.latest_checkpoint(args.restore_path))\n\t\tlog_string('\\n')\n\t\tlog_string(colored('Model Parameters have been restored from %s' % args.restore_path, 'white', 'on_red'))\n\n\tfor arg in sorted(vars(args)):\n\t\tprint(arg + ': ' + str(getattr(args, arg)) + '\\n')  # log of arguments\n\tos.system('cp cls_models/%s.py %s' % (args.model, LOG_DIR))  # bkp of model def\n\tos.system('cp train_cls.py %s' % LOG_DIR)  # bkp of train procedure\n\n\ttrain_start = time.time()\n\n\tops = {'pointclouds_pl': inputs_pl,\n\t\t   'labels_pl': labels_pl,\n\t\t   'is_training_pl': is_training_pl,\n\t\t   'npts_pl': npts_pl,\n\t\t   'pred': pred,\n\t\t   'loss': loss,\n\t\t   'train_op': train_op,\n\t\t   'merged': merged,\n\t\t   'step': global_step}\n\n\tESC = EarlyStoppingCriterion(patience=args.patience)\n\n\tfor epoch in range(args.epoch):\n\t\tlog_string('\\n\\n')\n\t\tlog_string(colored('**** EPOCH %03d ****' % epoch, 'grey', 'on_green'))\n\t\tsys.stdout.flush()\n\n\t\t'''=== training the model ==='''\n\t\ttrain_one_epoch(sess, ops, train_writer)\n\n\t\t'''=== evaluating the model ==='''\n\t\teval_mean_loss, eval_acc, eval_cls_acc = eval_one_epoch(sess, ops, val_writer)\n\n\t\t'''=== check whether to early stop ==='''\n\t\tearly_stop, save_checkpoint = ESC.step(eval_acc, epoch=epoch)\n\t\tif save_checkpoint:\n\t\t\tsave_path = saver.save(sess, os.path.join(LOG_DIR, \"model.ckpt\"))\n\t\t\tlog_string(colored('model saved at %s' % save_path, 'white', 'on_blue'))\n\t\tif early_stop:\n\t\t\tbreak\n\n\tlog_string('total time: %s' % datetime.timedelta(seconds=time.time() - train_start))\n\tlog_string('stop epoch: %d, best eval acc: %f' % (ESC.best_epoch + 1, ESC.best_dev_score))\n\tsess.close()\n\n\ndef train_one_epoch(sess, ops, train_writer):\n\tis_training = True\n\ttotal_correct, total_seen, loss_sum = 0, 0, 0\n\n\tfor points, target in tqdm(trainDataLoader, total=len(trainDataLoader), smoothing=0.9):\n\t\t# pdb.set_trace()\n\t\tpoints, target = points.numpy(), target.numpy()\n\n\t\tif args.data_aug:\n\t\t\tpoints = random_point_dropout(points)\n\t\t\tpoints[:, :, 0:3] = random_scale_point_cloud(points[:, :, 0:3])\n\t\t\tpoints[:, :, 0:3] = random_shift_point_cloud(points[:, :, 0:3])\n\n\t\tfeed_dict = {\n\t\t\tops['pointclouds_pl']: points.reshape([1, BATCH_SIZE * NUM_POINT, 3]),\n\t\t\tops['labels_pl']: target.reshape(BATCH_SIZE, ),\n\t\t\tops['npts_pl']: [NUM_POINT] * BATCH_SIZE,\n\t\t\tops['is_training_pl']: is_training}\n\n\t\tsummary, step, _, loss_val, pred_val = sess.run([\n\t\t\tops['merged'], ops['step'], ops['train_op'], ops['loss'], ops['pred']], feed_dict=feed_dict)\n\t\ttrain_writer.add_summary(summary, step)\n\n\t\tpred_val = np.argmax(pred_val, 1)\n\t\tcorrect = np.sum(pred_val == target.reshape(BATCH_SIZE, ))\n\t\ttotal_correct += correct\n\t\ttotal_seen += BATCH_SIZE\n\t\t# loss_sum += loss_val\n\n\t# train_file_idxs = np.arange(0, len(TRAIN_FILES))\n\t# np.random.shuffle(train_file_idxs)\n\t#\n\t# for fn in range(len(TRAIN_FILES)):\n\t# \tcurrent_data, current_label = loadh5DataFile(TRAIN_FILES[train_file_idxs[fn]])\n\t# \tcurrent_data = current_data[:, :NUM_POINT, :]\n\t# \tcurrent_data, current_label, _ = shuffle_data(current_data, np.squeeze(current_label))\n\t# \tcurrent_label = np.squeeze(current_label)\n\t#\n\t# \tfile_size = current_data.shape[0]\n\t# \tnum_batches = file_size // BATCH_SIZE\n\t#\n\t# \tfor batch_idx in range(num_batches):\n\t# \t\tstart_idx = batch_idx * BATCH_SIZE\n\t# \t\tend_idx = (batch_idx + 1) * BATCH_SIZE\n\t# \t\tfeed_data = current_data[start_idx:end_idx, :, :]\n\t#\n\t# \t\tif args.data_aug:\n\t# \t\t\tfeed_data = random_point_dropout(feed_data)\n\t# \t\t\tfeed_data[:, :, 0:3] = random_scale_point_cloud(feed_data[:, :, 0:3])\n\t# \t\t\tfeed_data[:, :, 0:3] = random_shift_point_cloud(feed_data[:, :, 0:3])\n\t#\n\t# \t\tfeed_dict = {\n\t# \t\t\tops['pointclouds_pl']: feed_data.reshape([1, BATCH_SIZE * NUM_POINT, 3]),\n\t# \t\t\tops['labels_pl']: current_label[start_idx:end_idx].reshape(BATCH_SIZE, ),\n\t# \t\t\tops['npts_pl']: [NUM_POINT] * BATCH_SIZE,\n\t# \t\t\tops['is_training_pl']: is_training}\n\t#\n\t# \t\tsummary, step, _, loss_val, pred_val = sess.run([\n\t# \t\t\tops['merged'], ops['step'], ops['train_op'], ops['loss'], ops['pred']], feed_dict=feed_dict)\n\t# \t\ttrain_writer.add_summary(summary, step)\n\t#\n\t# \t\tpred_val = np.argmax(pred_val, 1)\n\t# \t\tcorrect = np.sum(pred_val == current_label[start_idx:end_idx].reshape(BATCH_SIZE, ))\n\t# \t\ttotal_correct += correct\n\t# \t\ttotal_seen += BATCH_SIZE\n\t# \t\tloss_sum += loss_val\n\n\tlog_string('\\n=== training ===')\n\tlog_string('total correct: %d, total_seen: %d' % (total_correct, total_seen))\n\t# log_string('mean batch loss: %f' % (loss_sum / num_batches))\n\tlog_string('accuracy: %f' % (total_correct / float(total_seen)))\n\n\ndef eval_one_epoch(sess, ops, val_writer):\n\tis_training = False\n\n\ttotal_correct, total_seen, loss_sum = 0, 0, 0\n\ttotal_seen_class = [0 for _ in range(NUM_CLASSES)]\n\ttotal_correct_class = [0 for _ in range(NUM_CLASSES)]\n\n\tfor points, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n\t\t# pdb.set_trace()\n\t\tpoints, target = points.numpy(), target.numpy()\n\n\t\tfeed_dict = {\n\t\t\tops['pointclouds_pl']: points.reshape([1, BATCH_SIZE * NUM_POINT, 3]),\n\t\t\tops['labels_pl']: target.reshape(BATCH_SIZE, ),\n\t\t\tops['npts_pl']: np.array([NUM_POINT] * BATCH_SIZE),\n\t\t\tops['is_training_pl']: is_training}\n\n\t\tsummary, step, loss_val, pred_val = sess.run(\n\t\t\t[ops['merged'], ops['step'], ops['loss'], ops['pred']], feed_dict=feed_dict)\n\t\tval_writer.add_summary(summary, step)\n\t\tpred_val = np.argmax(pred_val, 1)\n\t\tcorrect = np.sum(pred_val == target.reshape(BATCH_SIZE, ))\n\t\ttotal_correct += correct\n\t\ttotal_seen += BATCH_SIZE\n\t\tloss_sum += (loss_val * BATCH_SIZE)\n\n\t\tfor i, l in enumerate(target):\n\t\t\t# l = int(target.reshape(-1)[i])\n\t\t\t# pdb.set_trace()\n\t\t\ttotal_seen_class[int(l)] += 1\n\t\t\ttotal_correct_class[int(l)] += (int(pred_val[i]) == int(l))\n\n\t# for fn in VALID_FILES:\n\t# \tcurrent_data, current_label = loadh5DataFile(fn)\n\t# \tcurrent_data = current_data[:, :NUM_POINT, :]\n\t# \tfile_size = current_data.shape[0]\n\t# \tnum_batches = file_size // BATCH_SIZE\n\t#\n\t# \tfor batch_idx in range(num_batches):\n\t# \t\tstart_idx, end_idx = batch_idx * BATCH_SIZE, (batch_idx + 1) * BATCH_SIZE\n\t#\n\t# \t\tfeed_dict = {\n\t# \t\t\tops['pointclouds_pl']: current_data[start_idx:end_idx, :, :].reshape([1, BATCH_SIZE * NUM_POINT, 3]),\n\t# \t\t\tops['labels_pl']: current_label[start_idx:end_idx].reshape(BATCH_SIZE, ),\n\t# \t\t\tops['npts_pl']: np.array([NUM_POINT] * BATCH_SIZE),\n\t# \t\t\tops['is_training_pl']: is_training}\n\t#\n\t# \t\tsummary, step, loss_val, pred_val = sess.run(\n\t# \t\t\t[ops['merged'], ops['step'], ops['loss'], ops['pred']], feed_dict=feed_dict)\n\t# \t\tval_writer.add_summary(summary, step)\n\t# \t\tpred_val = np.argmax(pred_val, 1)\n\t# \t\tcorrect = np.sum(pred_val == current_label[start_idx:end_idx].reshape(BATCH_SIZE, ))\n\t# \t\ttotal_correct += correct\n\t# \t\ttotal_seen += BATCH_SIZE\n\t# \t\tloss_sum += (loss_val * BATCH_SIZE)\n\t#\n\t# \t\tfor i in range(start_idx, end_idx):\n\t# \t\t\tl = int(current_label.reshape(-1)[i])\n\t# \t\t\ttotal_seen_class[l] += 1\n\t# \t\t\ttotal_correct_class[l] += (pred_val[i - start_idx] == l)\n\n\teval_mean_loss = loss_sum / float(total_seen)\n\teval_acc = total_correct / float(total_seen)\n\teval_cls_acc = np.mean(np.array(total_correct_class) / np.array(total_seen_class, dtype=np.float))\n\tlog_string('\\n=== evaluating ===')\n\tlog_string('total correct: %d, total_seen: %d' % (total_correct, total_seen))\n\tlog_string('eval mean loss: %f' % eval_mean_loss)\n\tlog_string('eval accuracy: %f' % eval_acc)\n\tlog_string('eval avg class acc: %f' % eval_cls_acc)\n\n\tglobal BEST_EVAL_ACC\n\tif eval_acc > BEST_EVAL_ACC:\n\t\tBEST_EVAL_ACC = eval_acc\n\tlog_string('best eval accuracy: %f' % BEST_EVAL_ACC)\n\treturn eval_mean_loss, eval_acc, eval_cls_acc\n\n\nif __name__ == '__main__':\n\tprint('Now Using GPU:%d to train the model' % args.gpu)\n\tos.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'\n\tos.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu)\n\n\ttrain(args)\n\tLOG_FOUT.close()\n"
  },
  {
    "path": "OcCo_TF/train_completion.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport os, pdb, time, argparse, datetime, importlib, numpy as np, tensorflow as tf\nfrom utils import lmdb_dataflow, add_train_summary, plot_pcd_three_views\nfrom termcolor import colored\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument('--gpu', type=str, default='1')\nparser.add_argument('--lmdb_train', default='data/modelnet/train.lmdb')\nparser.add_argument('--lmdb_valid', default='data/modelnet/test.lmdb')\nparser.add_argument('--log_dir', type=str, default='')\nparser.add_argument('--model_type', default='pcn_cd')\nparser.add_argument('--restore', action='store_true')\nparser.add_argument('--restore_path', default='log/pcn_cd')\nparser.add_argument('--batch_size', type=int, default=16)\nparser.add_argument('--num_gt_points', type=int, default=16384)\nparser.add_argument('--base_lr', type=float, default=1e-4)\nparser.add_argument('--lr_decay', action='store_true')\nparser.add_argument('--lr_decay_steps', type=int, default=50000)\nparser.add_argument('--lr_decay_rate', type=float, default=0.7)\nparser.add_argument('--lr_clip', type=float, default=1e-6)\nparser.add_argument('--max_step', type=int, default=3000000)\nparser.add_argument('--epoch', type=int, default=50)\nparser.add_argument('--steps_per_print', type=int, default=100)\nparser.add_argument('--steps_per_eval', type=int, default=1000)\nparser.add_argument('--steps_per_visu', type=int, default=3456)\nparser.add_argument('--epochs_per_save', type=int, default=5)\nparser.add_argument('--visu_freq', type=int, default=10)\nparser.add_argument('--store_grad', action='store_true')\nparser.add_argument('--num_input_points', type=int, default=1024)\nparser.add_argument('--dataset', default='modelnet40')\n\nargs = parser.parse_args()\n\nBATCH_SIZE = args.batch_size\nNUM_POINT = args.num_input_points\nNUM_GT_POINT = args.num_gt_points\nDECAY_STEP = args.lr_decay_steps\nDATASET = args.dataset\n\nBN_INIT_DECAY = 0.5\nBN_DECAY_DECAY_RATE = 0.5\nBN_DECAY_DECAY_STEP = float(DECAY_STEP)\nBN_DECAY_CLIP = 0.99\n\n\ndef get_bn_decay(batch):\n    bn_momentum = tf.train.exponential_decay(\n        BN_INIT_DECAY,\n        batch * BATCH_SIZE,\n        BN_DECAY_DECAY_STEP,\n        BN_DECAY_DECAY_RATE,\n        staircase=True)\n    bn_decay = tf.minimum(BN_DECAY_CLIP, 1 - bn_momentum)\n    return bn_decay\n\n\ndef vary2fix(inputs, npts):\n    inputs_ls = np.split(inputs[0], npts.cumsum())\n    ret_inputs = np.zeros((1, BATCH_SIZE * NUM_POINT, 3), dtype=np.float32)\n    ret_npts = npts.copy()\n    for idx, obj in enumerate(inputs_ls[:-1]):\n        if len(obj) <= NUM_POINT:\n            select_idx = np.concatenate([\n                np.arange(len(obj)), np.random.choice(len(obj), NUM_POINT - len(obj))])\n        else:\n            select_idx = np.arange(len(obj))\n            np.random.shuffle(select_idx)\n            pdb.set_trace()\n\n        ret_inputs[0][idx * NUM_POINT:(idx + 1) * NUM_POINT] = obj[select_idx].copy()\n        ret_npts[idx] = NUM_POINT\n    return ret_inputs, ret_npts\n\n\ndef train(args):\n\n    is_training_pl = tf.placeholder(tf.bool, shape=(), name='is_training')\n    global_step = tf.Variable(0, trainable=False, name='global_step')\n    alpha = tf.train.piecewise_constant(global_step, [10000, 20000, 50000],\n                                        [0.01, 0.1, 0.5, 1.0], 'alpha_op')\n\n    # for ModelNet, it is with Fixed Number of Input Points\n    # for ShapeNet, it is with Varying Number of Input Points\n    inputs_pl = tf.placeholder(tf.float32, (1, BATCH_SIZE * NUM_POINT, 3), 'inputs')\n    npts_pl = tf.placeholder(tf.int32, (BATCH_SIZE,), 'num_points')\n    gt_pl = tf.placeholder(tf.float32, (BATCH_SIZE, args.num_gt_points, 3), 'ground_truths')\n    add_train_summary('alpha', alpha)\n    bn_decay = get_bn_decay(global_step)\n    add_train_summary('bn_decay', bn_decay)\n\n    model_module = importlib.import_module('.%s' % args.model_type, 'completion_models')\n    model = model_module.Model(inputs_pl, npts_pl, gt_pl, alpha,\n                               bn_decay=bn_decay, is_training=is_training_pl)\n\n    # Another Solution instead of importlib:\n    # ldic = locals()\n    # exec('from completion_models.%s import Model' % args.model_type, globals(), ldic)\n    # model = ldic['Model'](inputs_pl, npts_pl, gt_pl, alpha,\n    # bn_decay=bn_decay, is_training=is_training_pl)\n\n    if args.lr_decay:\n        learning_rate = tf.train.exponential_decay(args.base_lr, global_step,\n                                                   args.lr_decay_steps, args.lr_decay_rate,\n                                                   staircase=True, name='lr')\n        learning_rate = tf.maximum(learning_rate, args.lr_clip)\n        add_train_summary('learning_rate', learning_rate)\n    else:\n        learning_rate = tf.constant(args.base_lr, name='lr')\n\n    trainer = tf.train.AdamOptimizer(learning_rate)\n    train_op = trainer.minimize(model.loss, global_step)\n    saver = tf.train.Saver(max_to_keep=10)\n    ''' from PCN paper:\n    All our completion_models are trained using the Adam optimizer \n    with an initial learning rate of 0.0001 for 50 epochs\n    and a batch size of 32. The learning rate is decayed by 0.7 every 50K iterations.\n    '''\n\n    if args.store_grad:\n        grads_and_vars = trainer.compute_gradients(model.loss)\n        for g, v in grads_and_vars:\n            tf.summary.histogram(v.name, v, collections=['train_summary'])\n            tf.summary.histogram(v.name + '_grad', g, collections=['train_summary'])\n\n    train_summary = tf.summary.merge_all('train_summary')\n    valid_summary = tf.summary.merge_all('valid_summary')\n\n    # the input number of points for the partial observed data is not a fixed number\n    df_train, num_train = lmdb_dataflow(\n        args.lmdb_train, args.batch_size,\n        args.num_input_points, args.num_gt_points, is_training=True)\n    train_gen = df_train.get_data()\n    df_valid, num_valid = lmdb_dataflow(\n        args.lmdb_valid, args.batch_size,\n        args.num_input_points, args.num_gt_points, is_training=False)\n    valid_gen = df_valid.get_data()\n\n    config = tf.ConfigProto()\n    config.gpu_options.allow_growth = True\n    config.allow_soft_placement = True\n    sess = tf.Session(config=config)\n\n    if args.restore:\n        saver.restore(sess, tf.train.latest_checkpoint(args.log_dir))\n        writer = tf.summary.FileWriter(args.log_dir)\n    else:\n        sess.run(tf.global_variables_initializer())\n        if os.path.exists(args.log_dir):\n            delete_key = input(colored('%s exists. Delete? [y/n]' % args.log_dir, 'white', 'on_red'))\n            if delete_key == 'y' or delete_key == \"yes\":\n                os.system('rm -rf %s/*' % args.log_dir)\n                os.makedirs(os.path.join(args.log_dir, 'plots'))\n        else:\n            os.makedirs(os.path.join(args.log_dir, 'plots'))\n        with open(os.path.join(args.log_dir, 'args.txt'), 'w') as log:\n            for arg in sorted(vars(args)):\n                log.write(arg + ': ' + str(getattr(args, arg)) + '\\n')\n        log.close()\n        os.system('cp completion_models/%s.py %s' % (args.model_type, args.log_dir))  # bkp of model scripts\n        os.system('cp train_completion.py %s' % args.log_dir)  # bkp of train procedure\n        writer = tf.summary.FileWriter(args.log_dir, sess.graph)  # GOOD habit\n\n    log_fout = open(os.path.join(args.log_dir, 'log_train.txt'), 'a+')\n    for arg in sorted(vars(args)):\n        log_fout.write(arg + ': ' + str(getattr(args, arg)) + '\\n')\n        log_fout.flush()\n\n    total_time = 0\n    train_start = time.time()\n    init_step = sess.run(global_step)\n\n    for step in range(init_step + 1, args.max_step + 1):\n        epoch = step * args.batch_size // num_train + 1\n        ids, inputs, npts, gt = next(train_gen)\n        if epoch > args.epoch:\n            break\n        if DATASET == 'shapenet8':\n            inputs, npts = vary2fix(inputs, npts)\n\n        start = time.time()\n        feed_dict = {inputs_pl: inputs, npts_pl: npts, gt_pl: gt, is_training_pl: True}\n        _, loss, summary = sess.run([train_op, model.loss, train_summary], feed_dict=feed_dict)\n        total_time += time.time() - start\n        writer.add_summary(summary, step)\n\n        if step % args.steps_per_print == 0:\n            print('epoch %d  step %d  loss %.8f - time per batch %.4f' %\n                  (epoch, step, loss, total_time / args.steps_per_print))\n            total_time = 0\n\n        if step % args.steps_per_eval == 0:\n            print(colored('Testing...', 'grey', 'on_green'))\n            num_eval_steps = num_valid // args.batch_size\n            total_loss, total_time = 0, 0\n            sess.run(tf.local_variables_initializer())\n            for i in range(num_eval_steps):\n                start = time.time()\n                _, inputs, npts, gt = next(valid_gen)\n                if DATASET == 'shapenet8':\n                    inputs, npts = vary2fix(inputs, npts)\n                feed_dict = {inputs_pl: inputs, npts_pl: npts, gt_pl: gt, is_training_pl: False}\n                loss, _ = sess.run([model.loss, model.update], feed_dict=feed_dict)\n                total_loss += loss\n                total_time += time.time() - start\n            summary = sess.run(valid_summary, feed_dict={is_training_pl: False})\n            writer.add_summary(summary, step)\n            print(colored('epoch %d  step %d  loss %.8f - time per batch %.4f' %\n                          (epoch, step, total_loss / num_eval_steps, total_time / num_eval_steps),\n                          'grey', 'on_green'))\n            total_time = 0\n\n        if step % args.steps_per_visu == 0:\n            all_pcds = sess.run(model.visualize_ops, feed_dict=feed_dict)\n            for i in range(0, args.batch_size, args.visu_freq):\n                plot_path = os.path.join(args.log_dir, 'plots',\n                                         'epoch_%d_step_%d_%s.png' % (epoch, step, ids[i]))\n                pcds = [x[i] for x in all_pcds]\n                plot_pcd_three_views(plot_path, pcds, model.visualize_titles)\n\n        if (epoch % args.epochs_per_save == 0) and \\\n                not os.path.exists(os.path.join(args.log_dir, 'model-%d.meta' % epoch)):\n            saver.save(sess, os.path.join(args.log_dir, 'model'), epoch)\n            print(colored('Epoch:%d, Model saved at %s' % (epoch, args.log_dir), 'white', 'on_blue'))\n\n    print('Total time', datetime.timedelta(seconds=time.time() - train_start))\n    sess.close()\n\n\nif __name__ == '__main__':\n\n    print('Now Using GPU:%s to train the model' % args.gpu)\n    os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'\n    os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu\n\n    train(args)\n"
  },
  {
    "path": "OcCo_TF/utils/Dataset_Assign.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport h5py\n\ndef Dataset_Assign(dataset, fname, partial=True, bn=False, few_shot=False):\n\n    def fetch_files(filelist):\n        return [item.strip() for item in open(filelist).readlines()]\n\n    def loadh5DataFile(PathtoFile):\n        f = h5py.File(PathtoFile, 'r')\n        return f['data'][:], f['label'][:]\n\n    dataset = dataset.lower()\n\n    if dataset == 'shapenet8':\n        NUM_CLASSES = 8\n        if partial:\n            NUM_TRAINOBJECTS = 231792\n            TRAIN_FILES = fetch_files('./data/shapenet/hdf5_partial_1024/train_file.txt')\n            VALID_FILES = fetch_files('./data/shapenet/hdf5_partial_1024/valid_file.txt')\n        else:\n            raise ValueError(\"For ShapeNet we are only interested in the partial objects recognition\")\n\n    elif dataset == 'shapenet10':\n        # Number of Objects:  17378\n        # Number of Objects:  2492\n        NUM_CLASSES, NUM_TRAINOBJECTS = 10, 17378\n        TRAIN_FILES = fetch_files('./data/ShapeNet10/Cleaned/train_file.txt')\n        VALID_FILES = fetch_files('./data/ShapeNet10/Cleaned/test_file.txt')\n\n    elif dataset == 'modelnet40':\n        '''Actually we find that using data from PointNet++: \n        https://shapenet.cs.stanford.edu/media/modelnet40_normal_resampled.zip\n        will increase the accuracy a bit, however to make a fair comparison: we use the same data as \n        the original data provided by PointNet: https://shapenet.cs.stanford.edu/media/modelnet40_ply_hdf5_2048.zip'''\n        NUM_CLASSES = 40\n        if partial:\n            NUM_TRAINOBJECTS = 98430\n            TRAIN_FILES = fetch_files('./data/modelnet40_pcn/hdf5_partial_1024/train_file.txt')\n            VALID_FILES = fetch_files('./data/modelnet40_pcn/hdf5_partial_1024/test_file.txt')\n        else:\n            VALID_FILES = fetch_files('./data/modelnet40_ply_hdf5_2048/test_files.txt')\n            if few_shot:\n                TRAIN_FILES = fetch_files('./data/modelnet40_ply_hdf5_2048/few_labels/%s.h5' % fname)\n                data, _ = loadh5DataFile('./data/modelnet40_ply_hdf5_2048/few_labels/%s.h5' % fname)\n                NUM_TRAINOBJECTS = len(data)\n            else:\n                NUM_TRAINOBJECTS = 9843\n                TRAIN_FILES = fetch_files('./data/modelnet40_ply_hdf5_2048/train_files.txt')\n\n    elif dataset == 'scannet10':\n        NUM_CLASSES, NUM_TRAINOBJECTS = 10, 6110\n        TRAIN_FILES = fetch_files('./data/ScanNet10/ScanNet_Cleaned/train_file.txt')\n        VALID_FILES = fetch_files('./data/ScanNet10/ScanNet_Cleaned/test_file.txt')\n\n    elif dataset == 'scanobjectnn':\n        NUM_CLASSES = 15\n        if bn:\n            TRAIN_FILES = ['./data/ScanNetObjectNN/h5_files/main_split/training_objectdataset' + fname + '.h5']\n            VALID_FILES = ['./data/ScanNetObjectNN/h5_files/main_split/test_objectdataset' + fname + '.h5']\n            data, _ = loadh5DataFile('./data/ScanNetObjectNN/h5_files/main_split/training_objectdataset' + fname + '.h5')\n            NUM_TRAINOBJECTS = len(data)\n        else:\n            TRAIN_FILES = ['./data/ScanNetObjectNN/h5_files/main_split_nobg/training_objectdataset' + fname + '.h5']\n            VALID_FILES = ['./data/ScanNetObjectNN/h5_files/main_split_nobg/test_objectdataset' + fname + '.h5']\n            data, _ = loadh5DataFile('./data/ScanNetObjectNN/h5_files/main_split_nobg/training_objectdataset' + fname + '.h5')\n            NUM_TRAINOBJECTS = len(data)\n    else:\n        raise ValueError('dataset not exists')\n\n    return NUM_CLASSES, NUM_TRAINOBJECTS, TRAIN_FILES, VALID_FILES\n"
  },
  {
    "path": "OcCo_TF/utils/EarlyStoppingCriterion.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nclass EarlyStoppingCriterion(object):\n    \"\"\"\n    adapted from https://github.com/facebookresearch/hgnn/blob/master/utils/EarlyStoppingCriterion.py\n    Arguments:\n        patience (int): The maximum number of epochs with no improvement before early stopping should take place\n        mode (str, can only be 'max' or 'min'): To take the maximum or minimum of the score for optimization\n        min_delta (float, optional): Minimum change in the score to qualify as an improvement (default: 0.0)\n    \"\"\"\n\n    def __init__(self, patience=10, mode='max', min_delta=0.0):\n        assert patience >= 0\n        assert mode in {'min', 'max'}\n        assert min_delta >= 0.0\n        self.patience = patience\n        self.mode = mode\n        self.min_delta = min_delta\n\n        self._count = 0\n        self.best_dev_score = None\n        self.best_test_score = None\n        self.best_epoch = None\n        self.is_improved = None\n\n    def step(self, cur_dev_score, epoch):\n        \"\"\"\n        Checks if training should be continued given the current score.\n        Arguments:\n            cur_dev_score (float): the current development score\n            # cur_test_score (float): the current test score\n        Output:\n            bool: if training should be continued\n        \"\"\"\n        save_checkpoint = False\n\n        if self.best_dev_score is None:\n            self.best_dev_score = cur_dev_score\n            self.best_epoch = epoch\n            save_checkpoint = True\n            return False, save_checkpoint\n        else:\n            if self.mode == 'max':\n                self.is_improved = (cur_dev_score > self.best_dev_score + self.min_delta)\n            else:\n                self.is_improved = (cur_dev_score < self.best_dev_score - self.min_delta)\n\n            if self.is_improved:\n                self._count = 0\n                self.best_dev_score = cur_dev_score\n                self.best_epoch = epoch\n                save_checkpoint = True\n            else:\n                self._count += 1\n            return self._count >= self.patience, save_checkpoint\n"
  },
  {
    "path": "OcCo_TF/utils/ModelNetDataLoader.py",
    "content": "import os, torch, h5py, warnings, numpy as np\nfrom torch.utils.data import Dataset\n\nwarnings.filterwarnings('ignore')\n\n\ndef pc_normalize(pc):\n    centroid = np.mean(pc, axis=0)\n    pc = pc - centroid\n    m = np.max(np.sqrt(np.sum(pc ** 2, axis=1)))\n    pc = pc / m\n    return pc\n\n\ndef farthest_point_sample(point, npoint):\n    \"\"\"\n    Input:\n        xyz: point cloud data, [N, D]\n        npoint: number of samples\n    Return:\n        centroids: sampled point cloud index, [npoint, D]\n    \"\"\"\n    N, D = point.shape\n    xyz = point[:, :3]\n    centroids = np.zeros((npoint,))\n    distance = np.ones((N,)) * 1e10\n    farthest = np.random.randint(0, N)\n    for i in range(npoint):\n        centroids[i] = farthest\n        centroid = xyz[farthest, :]\n        dist = np.sum((xyz - centroid) ** 2, -1)\n        mask = dist < distance\n        distance[mask] = dist[mask]\n        farthest = np.argmax(distance, -1)\n    point = point[centroids.astype(np.int32)]\n    return point\n\n\nclass ModelNetDataLoader(Dataset):\n    def __init__(self, root, npoint=1024, split='train', uniform=False, normal_channel=True, cache_size=15000):\n        self.root = root\n        self.npoints = npoint\n        self.uniform = uniform\n        self.catfile = os.path.join(self.root, 'modelnet40_shape_names.txt')\n\n        self.cat = [line.rstrip() for line in open(self.catfile)]\n        self.classes = dict(zip(self.cat, range(len(self.cat))))\n        self.normal_channel = normal_channel\n\n        shape_ids = {'train': [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_train.txt'))],\n                     'test': [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_test.txt'))]}\n\n        assert (split == 'train' or split == 'test')\n        shape_names = ['_'.join(x.split('_')[0:-1]) for x in shape_ids[split]]\n        # list of (shape_name, shape_txt_file_path) tuple\n        self.datapath = [(shape_names[i], os.path.join(self.root, shape_names[i], shape_ids[split][i]) + '.txt') for i\n                         in range(len(shape_ids[split]))]\n        print('The size of %s data is %d' % (split, len(self.datapath)))\n\n        self.cache_size = cache_size  # how many data points to cache in memory\n        self.cache = {}  # from index to (point_set, cls) tuple\n\n    def __len__(self):\n        return len(self.datapath)\n\n    def _get_item(self, index):\n        if index in self.cache:\n            point_set, cls = self.cache[index]\n        else:\n            fn = self.datapath[index]\n            cls = self.classes[self.datapath[index][0]]\n            cls = np.array([cls]).astype(np.int32)\n            point_set = np.loadtxt(fn[1], delimiter=',').astype(np.float32)\n            if self.uniform:\n                point_set = farthest_point_sample(point_set, self.npoints)\n            else:\n                point_set = point_set[0:self.npoints, :]\n\n            point_set[:, 0:3] = pc_normalize(point_set[:, 0:3])\n\n            if not self.normal_channel:\n                point_set = point_set[:, 0:3]\n\n            if len(self.cache) < self.cache_size:\n                self.cache[index] = (point_set, cls)\n\n        return point_set, cls\n\n    def __getitem__(self, index):\n        return self._get_item(index)\n\n\nclass General_CLSDataLoader_HDF5(Dataset):\n    def __init__(self, file_list, num_point=1024):\n        # self.root = root\n        self.num_point = num_point\n        self.file_list = file_list\n        self.points_list = np.zeros((1, num_point, 3))\n        self.labels_list = np.zeros((1,))\n\n        for file in self.file_list:\n            # pdb.set_trace()\n            # file = os.path.join(root, file)\n            # pdb.set_trace()\n            data, label = self.loadh5DataFile(file)\n            self.points_list = np.concatenate([self.points_list,\n                                               data[:, :self.num_point, :]], axis=0)\n            self.labels_list = np.concatenate([self.labels_list, label.ravel()], axis=0)\n\n        self.points_list = self.points_list[1:]\n        self.labels_list = self.labels_list[1:]\n        assert len(self.points_list) == len(self.labels_list)\n        print('Number of Objects: ', len(self.labels_list))\n\n    @staticmethod\n    def loadh5DataFile(PathtoFile):\n        f = h5py.File(PathtoFile, 'r')\n        return f['data'][:], f['label'][:]\n\n    def __len__(self):\n        return len(self.points_list)\n\n    def __getitem__(self, index):\n\n        point_xyz = self.points_list[index][:, 0:3]\n        point_label = self.labels_list[index].astype(np.int32)\n\n        return point_xyz, point_label\n\n\nclass ModelNetJigsawDataLoader(Dataset):\n    def __init__(self, root=r'./data/modelnet40_ply_hdf5_2048/jigsaw',\n                 n_points=1024, split='train', k=3):\n        self.npoints = n_points\n        self.root = root\n        self.split = split\n        assert split in ['train', 'test']\n        if self.split == 'train':\n            self.file_list = [d for d in os.listdir(root) if d.find('train') is not -1]\n        else:\n            self.file_list = [d for d in os.listdir(root) if d.find('test') is not -1]\n        self.points_list = np.zeros((1, n_points, 3))\n        self.labels_list = np.zeros((1, n_points))\n\n        for file in self.file_list:\n            file = os.path.join(root, file)\n            data, label = self.loadh5DataFile(file)\n            # data = np.load(root + file)\n            self.points_list = np.concatenate([self.points_list, data], axis=0)  # .append(data)\n            self.labels_list = np.concatenate([self.labels_list, label], axis=0)\n        # self.labels_list.append(label)\n\n        self.points_list = self.points_list[1:]\n        self.labels_list = self.labels_list[1:]\n        assert len(self.points_list) == len(self.labels_list)\n        print('Number of %s Objects: '%self.split, len(self.labels_list))\n\n        # just use the average weights\n        self.labelweights = np.ones(k ** 3)\n\n    # pdb.set_trace()\n\n    @staticmethod\n    def loadh5DataFile(PathtoFile):\n        f = h5py.File(PathtoFile, 'r')\n        return f['data'][:], f['label'][:]\n\n    def __getitem__(self, index):\n\n        point_set = self.points_list[index][:, 0:3]\n        semantic_seg = self.labels_list[index].astype(np.int32)\n        # sample_weight = self.labelweights[semantic_seg]\n\n        # return point_set, semantic_seg, sample_weight\n        return point_set, semantic_seg\n\n    def __len__(self):\n        return len(self.points_list)\n\n\nif __name__ == '__main__':\n\n    data = ModelNetDataLoader('/data/modelnet40_normal_resampled/', split='train', uniform=False, normal_channel=True, )\n    DataLoader = torch.utils.data.DataLoader(data, batch_size=12, shuffle=True)\n    for point, label in DataLoader:\n        print(point.shape)\n        print(label.shape)\n"
  },
  {
    "path": "OcCo_TF/utils/Train_Logger.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport os, logging, datetime, numpy as np, sklearn.metrics as metrics\nfrom pathlib import Path\n\n\nclass TrainLogger:\n\n    def __init__(self, args, name='Model', subfold='cls', cls2name=None):\n        self.step = 1\n        self.epoch = 1\n        self.args = args\n        self.name = name\n        self.sf = subfold\n        self.make_logdir()\n        self.logger_setup()\n        self.epoch_init()\n        self.save_model = False\n        self.cls2name = cls2name\n        self.best_instance_acc, self.best_class_acc = 0., 0.\n        self.best_instance_epoch, self.best_class_epoch = 0, 0\n        self.savepath = str(self.checkpoints_dir) + '/best_model.pth'\n\n    def logger_setup(self):\n        self.logger = logging.getLogger(self.name)\n        self.logger.setLevel(logging.INFO)\n        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n        file_handler = logging.FileHandler(os.path.join(self.log_dir, 'train_log.txt'))\n        file_handler.setLevel(logging.INFO)\n        file_handler.setFormatter(formatter)\n        # ref: https://stackoverflow.com/a/53496263/12525201\n        # define a Handler which writes INFO messages or higher to the sys.stderr\n        console = logging.StreamHandler()\n        console.setLevel(logging.INFO)\n        # logging.getLogger('').addHandler(console) # this is root logger\n        self.logger.addHandler(console)\n        self.logger.addHandler(file_handler)\n        self.logger.info('PARAMETER ...')\n        self.logger.info(self.args)\n        self.logger.removeHandler(console)\n\n    def make_logdir(self):\n        timestr = str(datetime.datetime.now().strftime('%Y-%m-%d_%H-%M'))\n        experiment_dir = Path('./log/')\n        experiment_dir.mkdir(exist_ok=True)\n        experiment_dir = experiment_dir.joinpath(self.sf)\n        experiment_dir.mkdir(exist_ok=True)\n\n        if self.args.log_dir is None:\n            self.experiment_dir = experiment_dir.joinpath(timestr)\n        else:\n            self.experiment_dir = experiment_dir.joinpath(self.args.log_dir)\n\n        self.experiment_dir.mkdir(exist_ok=True)\n        self.checkpoints_dir = self.experiment_dir.joinpath('checkpoints/')\n        self.checkpoints_dir.mkdir(exist_ok=True)\n        self.log_dir = self.experiment_dir.joinpath('logs/')\n        self.log_dir.mkdir(exist_ok=True)\n        self.experiment_dir.joinpath('runs').mkdir(exist_ok=True)\n\n    # @property.setter\n    def epoch_init(self, training=True):\n        self.loss, self.count, self.pred, self.gt = 0., 0., [], []\n        if training:\n            self.logger.info('\\nEpoch %d/%d:' % (self.epoch, self.args.epoch))\n\n    def step_update(self, pred, gt, loss, training=True):\n        if training:\n            self.step += 1\n        self.gt.append(gt)\n        self.pred.append(pred)\n        batch_size = len(pred)\n        self.count += batch_size\n        self.loss += loss * batch_size\n\n    def cls_epoch_update(self, training=True):\n        self.save_model = False\n        self.gt = np.concatenate(self.gt)\n        self.pred = np.concatenate(self.pred)\n        instance_acc = metrics.accuracy_score(self.gt, self.pred)\n        class_acc = metrics.balanced_accuracy_score(self.gt, self.pred)\n\n        if instance_acc > self.best_instance_acc and not training:\n            self.best_instance_acc = instance_acc\n            self.best_instance_epoch = self.epoch\n            self.save_model = True\n        if class_acc > self.best_class_acc and not training:\n            self.best_class_acc = class_acc\n            self.best_class_epoch = self.epoch\n\n        if not training:\n            self.epoch += 1\n        return instance_acc, class_acc\n\n    def seg_epoch_update(self, training=True):\n        self.save_model = False\n        self.gt = np.concatenate(self.gt)\n        self.pred = np.concatenate(self.pred)\n        instance_acc = metrics.accuracy_score(self.gt, self.pred)\n        if instance_acc > self.best_instance_acc and not training:\n            self.best_instance_acc = instance_acc\n            self.best_instance_epoch = self.epoch\n            self.save_model = True\n\n        if not training:\n            self.epoch += 1\n        return instance_acc\n\n    def epoch_summary(self, writer=None, training=True):\n        instance_acc, class_acc = self.cls_epoch_update(training)\n        if training:\n            if writer is not None:\n                writer.add_scalar('Train Class Accuracy', class_acc, self.step)\n                writer.add_scalar('Train Instance Accuracy', instance_acc, self.step)\n            self.logger.info('Train Instance Accuracy: %.3f, Class Accuracy: %.3f' % (instance_acc, class_acc))\n        else:\n            if writer is not None:\n                writer.add_scalar('Test Class Accuracy', class_acc, self.step)\n                writer.add_scalar('Test Instance Accuracy', instance_acc, self.step)\n            self.logger.info('Test Instance Accuracy: %.3f, Class Accuracy: %.3f' % (instance_acc, class_acc))\n            self.logger.info('Best Instance Accuracy: %.3f at Epoch %d ' % (\n                self.best_instance_acc, self.best_instance_epoch))\n            self.logger.info('Best Class Accuracy: %.3f at Epoch %d' % (\n                self.best_class_acc, self.best_class_epoch))\n\n        if self.save_model:\n            self.logger.info('Saving the Model Params to %s' % self.savepath)\n\n    def train_summary(self):\n        self.logger.info('\\n\\nEnd of Training...')\n        self.logger.info('Best Instance Accuracy: %.3f at Epoch %d ' % (\n            self.best_instance_acc, self.best_instance_epoch))\n        self.logger.info('Best Class Accuracy: %.3f at Epoch %d' % (\n            self.best_class_acc, self.best_class_epoch))\n\n    def update_from_checkpoints(self, checkpoint):\n        self.logger.info('Use Pre-Trained Weights')\n        self.step = checkpoint['step']\n        self.epoch = checkpoint['epoch']\n        self.best_instance_epoch, self.best_instance_acc = checkpoint['epoch'], checkpoint['instance_acc']\n        self.best_class_epoch, self.best_class_acc = checkpoint['best_class_epoch'], checkpoint['best_class_acc']\n        self.logger.info('Best Class Acc {:.3f} at Epoch {}'.format(self.best_instance_acc, self.best_class_epoch))\n        self.logger.info('Best Instance Acc {:.3f} at Epoch {}'.format(self.best_instance_acc, self.best_instance_epoch))\n\n    def update_from_checkpoints_tf(self, checkpoint):\n        self.logger.info('Use Pre-Trained Weights')\n        self.step = checkpoint['step']\n        self.epoch = checkpoint['epoch']\n        self.best_instance_epoch, self.best_instance_acc = checkpoint['epoch'], checkpoint['instance_acc']\n        self.best_class_epoch, self.best_class_acc = checkpoint['best_class_epoch'], checkpoint['best_class_acc']\n        self.logger.info('Best Class Acc {:.3f} at Epoch {}'.format(self.best_instance_acc, self.best_class_epoch))\n        self.logger.info('Best Instance Acc {:.3f} at Epoch {}'.format(self.best_instance_acc, self.best_instance_epoch))\n"
  },
  {
    "path": "OcCo_TF/utils/__init__.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#\nfrom tf_util import *\nfrom pc_util import *\nfrom io_util import *\nfrom data_util import *\nfrom visu_util import *"
  },
  {
    "path": "OcCo_TF/utils/check_num_point.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\n# Author: Hanchen Wang, hw501@cam.ac.uk\n\nimport numpy as np\nimport os, json, argparse\nfrom data_util import lmdb_dataflow\nfrom io_util import read_pcd\nfrom tqdm import tqdm\n\nMODELNET40_PATH = r\"../render/dump_modelnet_normalised_\"\nSCANNET10_PATH = r\"../data/ScanNet10\"\nSHAPENET8_PATH = r\"../data/shapenet\"\n\n\nif __name__ == \"__main__\":\n\t\n\tparser = argparse.ArgumentParser()\n\tparser.add_argument('--dataset', type=str, default='modelnet40', help=\"modelnet40, shapenet8 or scannet10\")\n\t\n\targs = parser.parse_args()\n\tos.system(\"mkdir -p ./dump_sum_points\")\n\t\n\tif args.dataset == 'modelnet40':\n\t\tshape_names = open(r'../render/shape_names.txt').read().splitlines()\n\t\tfile_ = open(r'../render/ModelNet_flist_normalised.txt').read().splitlines()\n\t\t\n\t\tprint(\"=== ModelNet40 ===\\n\")\n\t\tfor t in ['train', 'test']:\n\t\t\t# for res in ['fine', 'middle', 'coarse', 'supercoarse']:\n\t\t\tfor res in ['supercoarse']:\n\t\t\t\tsum_dict = {}\n\t\t\t\tfor shape in shape_names:\n\t\t\t\t\tsum_dict[shape] = np.zeros(3,dtype=np.int32)  # num of objects, num of points, average\n\t\t\t\t\n\t\t\t\tmodel_list = [_file for _file in file_ if t in _file]\n\t\t\t\tfor model_id in tqdm(model_list):\n\t\t\t\t\tmodel_name = model_id.split('/')[0]\n\t\t\t\t\tfor i in range(10):\n\t\t\t\t\t\tpartial_pc = read_pcd(os.path.join(MODELNET40_PATH + res, 'pcd', model_id + '_%d.pcd' % i))\n\t\t\t\t\t\tsum_dict[model_name][1] += len(partial_pc)\n\t\t\t\t\t\tsum_dict[model_name][0] += 1\n\t\t\t\t\t\n\t\t\t\t\tsum_dict[model_name][2] = sum_dict[model_name][1]/sum_dict[model_name][0]\n\t\t\t\t\n\t\t\t\tf = open(\"./dump_sum_points/modelnet40_%s_%s.txt\" % (t, res), \"w+\")\n\t\t\t\tfor key in sum_dict.keys():\n\t\t\t\t\tf.writelines([key, str(sum_dict[key]), '\\n'])\n\t\t\t\tf.close()\n\t\t\t\tprint(\"=== ModelNet40 %s %s Done ===\\n\" % (t, res))\n\t\n\telif args.dataset == 'shapenet8':\n\t\tprint(\"\\n\\n=== ShapeNet8 ===\\n\")\n\t\tfor t in ['train', 'valid']:\n\t\t\tsum_dict = json.loads(open(os.path.join(SHAPENET8_PATH, 'keys.json')).read())\n\t\t\tfor key in sum_dict.keys():\n\t\t\t\tsum_dict[key] = np.zeros(3)  # num of objects, num of points, average\n\t\t\t\n\t\t\t# the data stored in the lmdb files is with varying number of points\n\t\t\tdf, num = lmdb_dataflow(lmdb_path=os.path.join(SHAPENET8_PATH, '%s.lmdb' % t),\n\t\t\t                        batch_size=1, input_size=1000000, output_size=1, is_training=False)\n\t\t\t\n\t\t\tdata_gen = df.get_data()\n\t\t\tfor _ in tqdm(range(num)):\n\t\t\t\tids, _, npts, _ = next(data_gen)\n\t\t\t\tmodel_name = ids[0][:8]\n\t\t\t\tsum_dict[model_name][1] += npts[0]\n\t\t\t\tsum_dict[model_name][0] += 1\n\t\t\t\t\n\t\t\t\tsum_dict[model_name][2] = sum_dict[model_name][1] / sum_dict[model_name][0]\n\t\t\t\t\n\t\t\tf = open(\"./dump_sum_points/shapenet8_%s.json\" % t, \"w+\")\n\t\t\tfor key in sum_dict.keys():\n\t\t\t\tf.writelines([key, str(sum_dict[key]), '\\n'])\n\t\t\t# f.write(json.dumps(sum_dict))\n\t\t\tf.close()\n\t\t\tprint(\"=== ShapeNet8 %s Done ===\\n\" % t)\n\t\n\telif args.dataset == 'scannet10':\n\t\tprint(\"\\n\\n=== ScanNet10 is not ready yet ===\\n\")\n\t\t\n\telse:\n\t\traise ValueError('Assigned dataset do not exist.')\n"
  },
  {
    "path": "OcCo_TF/utils/check_scale.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport numpy as np\nimport os, open3d, sys\n\nLOG_F = open(r'./scale_sum_modelnet40raw.txt', 'w+')\nopen3d.utility.set_verbosity_level = 0\n\ndef log_string(msg):\n    print(msg)\n    LOG_F.writelines(msg + '\\n')\n\n\nif __name__ == \"__main__\":\n\n    lmdb_f = r'./data/shapenet/train.lmdb'\n    modelnet_raw_path = r'./data/modelnet40_raw/'\n    shapenet_raw_path = r'./data/ShapeNet_raw/'\n    modelnet40_pn_processed_f = r'./data/'\n\n    off_set, max_radius = 0, 0\n\n    '''=== ModelNet40 ==='''\n    log_string('=== ModelNet40 Raw ===\\n\\n\\n')\n    for root, dirs, files in os.walk(modelnet_raw_path):\n        for name in files:\n            if '.ply' in name:\n                mesh = open3d.io.read_triangle_mesh(os.path.join(root, name))\n                off_set_bias = (mesh.get_center()**2).sum()\n\n                if off_set_bias > off_set:\n                    off_set = off_set_bias\n                    log_string('update offset: %f by %s' % (off_set, os.path.join(root, name)))\n                    radius_bias = (np.asarray(mesh.vertices)**2).sum(axis=1).max()\n\n                    if radius_bias > max_radius:\n                        max_radius = radius_bias\n                        log_string('update max radius: %f by %s' %(max_radius, os.path.join(root, name)))\n    log_string('\\n\\n\\n=== sum for ShapeNetCorev2 ===')\n    log_string('===offset:%f,  radius:%f===\\n\\n\\n'%(off_set, max_radius))\n\n    sys.exit('finish computing ModelNet40')\n\n\n    '''=== ShapeNetCore ==='''\n    log_string('=== now on ShapeNetCorev2 ===\\n\\n\\n')\n    for root, dirs, files in os.walk(shapenet_raw_path):\n        for name in files:\n            if '.obj' in name:\n                mesh = open3d.io.read_triangle_mesh(os.path.join(root, name))\n                off_set_bias = (mesh.get_center()**2).sum()\n                if off_set_bias > off_set:\n                    off_set = off_set_bias\n                    log_string('update offset: %f by %s' % (off_set, os.path.join(root, name)))\n\n                radius_bias = (np.asarray(mesh.vertices)**2).sum(axis=1).max()\n\n                if radius_bias > max_radius:\n                    max_radius = radius_bias\n                    log_string('update max radius: %f by %s' %(max_radius, os.path.join(root, name)))\n\n    log_string('\\n\\n\\n=== sum for ShapeNetCorev2 ===')\n    log_string('===offset:%f,  radius:%f===\\n\\n\\n'%(off_set, max_radius))\n\n    sys.exit('finish computing ShapeNetCorev2')\n\n    '''=== PCN ==='''\n    log_string('===now on PCN cleaned subset of ShapeNet===\\n\\n\\n')\n    df_train, num_train = lmdb_dataflow(lmdb_path = lmdb_f, batch_size=1,\n                                        input_size=3000, output_size=16384, is_training=True)\n    train_gen = df_train.get_data()\n\n    for idx in range(231792):\n        ids, _, _, gt = next(train_gen)\n        off_set_bias = (gt.mean(axis=1)**2).sum()\n\n        if off_set_bias > off_set:\n            off_set = off_set_bias\n            log_string('update offset: %f by %d, %s' % (off_set, idx, ids))\n\n        radius_bias = (gt**2).sum(axis=2).max()\n\n        if radius_bias > max_radius:\n            max_radius = radius_bias\n            log_string('update max radius: %f by %d, %s' %(max_radius, idx, ids))\n\n    log_string('\\n\\n\\n===for PCN cleaned subset of ShapeNet===')\n    log_string('===offset:%f,  radius:%f===\\n\\n\\n'%(off_set, max_radius))\n\n"
  },
  {
    "path": "OcCo_TF/utils/data_util.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref:\n\nimport numpy as np, tensorflow as tf\nfrom tensorpack import dataflow\n\n\ndef resample_pcd(pcd, n):\n    \"\"\"drop or duplicate points so that input of each object has exactly n points\"\"\"\n    idx = np.random.permutation(pcd.shape[0])\n    if idx.shape[0] < n:\n        idx = np.concatenate([idx, np.random.randint(pcd.shape[0], size=n-pcd.shape[0])])\n    return pcd[idx[:n]]\n\n\nclass PreprocessData(dataflow.ProxyDataFlow):\n    def __init__(self, ds, input_size, output_size):\n        # what is ds doing..?\n        super(PreprocessData, self).__init__(ds)\n        self.input_size = input_size\n        self.output_size = output_size\n\n    def get_data(self):\n        for id, input, gt in self.ds.get_data():\n            input = resample_pcd(input, self.input_size)\n            gt = resample_pcd(gt, self.output_size)\n            yield id, input, gt\n\n\nclass BatchData(dataflow.ProxyDataFlow):\n    def __init__(self, ds, batch_size, input_size, gt_size, remainder=False, use_list=False):\n        super(BatchData, self).__init__(ds)\n        self.batch_size = batch_size\n        self.input_size = input_size\n        self.gt_size = gt_size\n        self.remainder = remainder\n        self.use_list = use_list\n\n    def __len__(self):\n        \"\"\"get the number of batches\"\"\"\n        ds_size = len(self.ds)\n        div = ds_size // self.batch_size\n        rem = ds_size % self.batch_size\n        if rem == 0:\n            return div\n        return div + int(self.remainder)  # int(False) == 0\n\n    def __iter__(self):\n        \"\"\"generating data in batches\"\"\"\n        holder = []\n        for data in self.ds:\n            holder.append(data)\n            if len(holder) == self.batch_size:\n                yield self._aggregate_batch(holder, self.use_list)\n                del holder[:]  # reset holder as empty list => holder = []\n        if self.remainder and len(holder) > 0:\n            yield self._aggregate_batch(holder, self.use_list)\n\n    def _aggregate_batch(self, data_holder, use_list=False):\n        \"\"\"\n        Concatenate input points along the 0-th dimension\n            Stack all other data along the 0-th dimension\n        \"\"\"\n        ids = np.stack([x[0] for x in data_holder])\n        inputs = [resample_pcd(x[1], self.input_size) if x[1].shape[0] > self.input_size else x[1]\n            for x in data_holder]\n        inputs = np.expand_dims(np.concatenate([x for x in inputs]), 0).astype(np.float32)\n        npts = np.stack([x[1].shape[0] if x[1].shape[0] < self.input_size else self.input_size\n            for x in data_holder]).astype(np.int32)\n        gts = np.stack([resample_pcd(x[2], self.gt_size) for x in data_holder]).astype(np.float32)\n        return ids, inputs, npts, gts\n\n\ndef lmdb_dataflow(lmdb_path, batch_size, input_size, output_size, is_training, test_speed=False):\n    \"\"\"load LMDB files, then generate batches??\"\"\"\n    df = dataflow.LMDBSerializer.load(lmdb_path, shuffle=False)\n    size = df.size()\n    if is_training:\n        df = dataflow.LocallyShuffleData(df, buffer_size=2000)  # buffer_size\n        df = dataflow.PrefetchData(df, nr_prefetch=500, nr_proc=1)  # multiprocess the data\n    df = BatchData(df, batch_size, input_size, output_size)\n    if is_training:\n        df = dataflow.PrefetchDataZMQ(df, nr_proc=8)\n    df = dataflow.RepeatedData(df, -1)\n    if test_speed:\n        dataflow.TestDataSpeed(df, size=1000).start()\n    df.reset_state()\n    return df, size\n\n\ndef get_queued_data(generator, dtypes, shapes, queue_capacity=10):\n    assert len(dtypes) == len(shapes), 'dtypes and shapes must have the same length'\n    queue = tf.FIFOQueue(queue_capacity, dtypes, shapes)\n    placeholders = [tf.placeholder(dtype, shape) for dtype, shape in zip(dtypes, shapes)]\n    enqueue_op = queue.enqueue(placeholders)\n    close_op = queue.close(cancel_pending_enqueues=True)\n    feed_fn = lambda: {placeholder: value for placeholder, value in zip(placeholders, next(generator))}\n    queue_runner = tf.contrib.training.FeedingQueueRunner(\n        queue, [enqueue_op], close_op, feed_fns=[feed_fn])\n    tf.train.add_queue_runner(queue_runner)\n    return queue.dequeue()\n"
  },
  {
    "path": "OcCo_TF/utils/io_util.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport h5py, numpy as np\nfrom open3d.open3d.geometry import PointCloud\nfrom open3d.open3d.utility import Vector3dVector\nfrom open3d.open3d.io import read_point_cloud, write_point_cloud\n\n\ndef read_pcd(filename):\n    pcd = read_point_cloud(filename)\n    return np.array(pcd.points)\n\n\ndef save_pcd(filename, points):\n    pcd = PointCloud()\n    pcd.points = Vector3dVector(points)\n    write_point_cloud(filename, pcd)\n\n\ndef shuffle_data(data, labels):\n    \"\"\" Shuffle data and labels \"\"\"\n    idx = np.arange(len(labels))\n    np.random.shuffle(idx)\n    return data[idx, ...], labels[idx], idx\n\n\ndef loadh5DataFile(PathtoFile):\n    f = h5py.File(PathtoFile, 'r')\n    return f['data'][:], f['label'][:]\n\n\ndef getDataFiles(list_filename):\n    return [line.rstrip() for line in open(list_filename)]\n\n\ndef save_h5(h5_filename, data, label, data_dtype='uint8', label_dtype='uint8'):\n    h5_fout = h5py.File(h5_filename)\n    h5_fout.create_dataset(\n        name='data', data=data,\n        compression='gzip', compression_opts=4,\n        dtype=data_dtype)\n    h5_fout.create_dataset(\n        name='label', data=label,\n        compression='gzip', compression_opts=1,\n        dtype=label_dtype)\n    h5_fout.close()\n"
  },
  {
    "path": "OcCo_TF/utils/pc_util.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport numpy as np\n\n\ndef jitter_point_cloud(batch_data, sigma=0.01, clip=0.05):\n    \"\"\" Randomly jitter points. jittering is per point.\n        Input:\n          BxNx3 array, original batch of point clouds\n        Return:\n          BxNx3 array, jittered batch of point clouds\n    \"\"\"\n    B, N, C = batch_data.shape\n    assert(clip > 0)\n    jittered_data = np.clip(sigma * np.random.randn(B, N, C), -1*clip, clip)\n    jittered_data += batch_data\n    return jittered_data\n\n\ndef rotate_point_cloud(batch_data):\n    \"\"\" Randomly rotate the point clouds to argument the dataset\n        rotation is per shape based along up direction\n        Input:\n          BxNx3 array, original batch of point clouds\n        Return:\n          BxNx3 array, rotated batch of point clouds\n    \"\"\"\n    rotated_data = np.zeros(batch_data.shape, dtype=np.float32)\n    for k in range(batch_data.shape[0]):\n        rotation_angle = np.random.uniform() * 2 * np.pi\n        cosval = np.cos(rotation_angle)\n        sinval = np.sin(rotation_angle)\n        rotation_matrix = np.array([[cosval, 0, sinval],\n                                    [0, 1, 0],\n                                    [-sinval, 0, cosval]])\n        shape_pc = batch_data[k, ...]\n        rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)\n    return rotated_data\n\n\ndef rotate_point_cloud_by_angle(batch_data, rotation_angle):\n    \"\"\" Rotate the point cloud along up direction with certain angle.\n        Input:\n          BxNx3 array, original batch of point clouds\n        Return:\n          BxNx3 array, rotated batch of point clouds\n    \"\"\"\n    rotated_data = np.zeros(batch_data.shape, dtype=np.float32)\n    for k in range(batch_data.shape[0]):\n        # rotation_angle = np.random.uniform() * 2 * np.pi\n        cosval = np.cos(rotation_angle)\n        sinval = np.sin(rotation_angle)\n        rotation_matrix = np.array([[cosval, 0, sinval],\n                                    [0, 1, 0],\n                                    [-sinval, 0, cosval]])\n        shape_pc = batch_data[k, ...]\n        rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix)\n    return rotated_data\n\n\ndef random_point_dropout(batch_pc, max_dropout_ratio=0.875):\n    \"\"\" batch_pc: BxNx3 \"\"\"\n    for b in range(batch_pc.shape[0]):\n        # np.random.random() -> Return random floats in the half-open interval [0.0, 1.0).\n        dropout_ratio = np.random.random() * max_dropout_ratio  # 0 ~ 0.875\n        drop_idx = np.where(np.random.random((batch_pc.shape[1])) <= dropout_ratio)[0]\n        if len(drop_idx) > 0:\n            batch_pc[b, drop_idx, :] = batch_pc[b, 0, :]  # set to the first point\n    return batch_pc\n\n\ndef random_scale_point_cloud(batch_data, scale_low=0.8, scale_high=1.25):\n    \"\"\" Randomly scale the point cloud. Scale is per point cloud.\n        Input:\n            BxNx3 array, original batch of point clouds\n        Return:\n            BxNx3 array, scaled batch of point clouds\n    \"\"\"\n    B, N, C = batch_data.shape\n    scales = np.random.uniform(scale_low, scale_high, B)\n    for batch_index in range(B):\n        batch_data[batch_index, :, :] *= scales[batch_index]\n    return batch_data\n\n\ndef random_shift_point_cloud(batch_data, shift_range=0.1):\n    \"\"\" Randomly shift point cloud. Shift is per point cloud.\n        Input:\n          BxNx3 array, original batch of point clouds\n        Return:\n          BxNx3 array, shifted batch of point clouds\n    \"\"\"\n    B, N, C = batch_data.shape\n    shifts = np.random.uniform(-shift_range, shift_range, (B, 3))\n    for batch_index in range(B):\n        batch_data[batch_index, :, :] += shifts[batch_index, :]\n    return batch_data\n"
  },
  {
    "path": "OcCo_TF/utils/tf_util.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport tensorflow as tf\ntry:\n    from pc_distance import tf_nndistance, tf_approxmatch\nexcept:\n    pass\n\n'''mlp and conv1d with stride 1 are different'''\n\n\ndef mlp(features, layer_dims, bn=None, bn_params=None):\n    # doc: https://www.tensorflow.org/versions/r1.15/api_docs/python/tf/contrib/layers/fully_connected\n    for i, num_outputs in enumerate(layer_dims[:-1]):\n        features = tf.contrib.layers.fully_connected(\n            features, num_outputs,\n            normalizer_fn=bn,\n            normalizer_params=bn_params,\n            scope='fc_%d' % i)\n    outputs = tf.contrib.layers.fully_connected(\n        features, layer_dims[-1],\n        activation_fn=None,\n        scope='fc_%d' % (len(layer_dims) - 1))\n    return outputs\n\n\ndef mlp_conv(inputs, layer_dims, bn=None, bn_params=None):\n    # doc: https://www.tensorflow.org/versions/r1.15/api_docs/python/tf/contrib/layers/conv1d\n    for i, num_out_channel in enumerate(layer_dims[:-1]):\n        inputs = tf.contrib.layers.conv1d(\n            inputs, num_out_channel,\n            kernel_size=1,\n            normalizer_fn=bn,\n            normalizer_params=bn_params,\n            scope='conv_%d' % i)\n    # kernel size -> single value for all spatial dimensions\n    # the size of filter should be (1, 3)\n    outputs = tf.contrib.layers.conv1d(\n        inputs, layer_dims[-1],\n        kernel_size=1,\n        activation_fn=None,\n        scope='conv_%d' % (len(layer_dims) - 1))\n    return outputs\n\n\ndef point_maxpool(inputs, npts, keepdims=False):\n    # number of points, number of channels -> get the maximum value along the number of channels\n    outputs = [tf.reduce_max(f, axis=1, keepdims=keepdims) for f in tf.split(inputs, npts, axis=1)]\n    return tf.concat(outputs, axis=0)\n\n\ndef point_unpool(inputs, npts):\n    inputs = tf.split(inputs, inputs.shape[0], axis=0)\n    outputs = [tf.tile(f, [1, npts[i], 1]) for i, f in enumerate(inputs)]\n    return tf.concat(outputs, axis=1)\n\n\ndef chamfer(pcd1, pcd2):\n    \"\"\"Normalised Chamfer Distance\"\"\"\n    dist1, _, dist2, _ = tf_nndistance.nn_distance(pcd1, pcd2)\n    dist1 = tf.reduce_mean(tf.sqrt(dist1))\n    dist2 = tf.reduce_mean(tf.sqrt(dist2))\n    return (dist1 + dist2) / 2\n\n\ndef earth_mover(pcd1, pcd2):\n    \"\"\"Normalised Earth Mover Distance\"\"\"\n    assert pcd1.shape[1] == pcd2.shape[1]  # has the same number of points\n    num_points = tf.cast(pcd1.shape[1], tf.float32)\n    match = tf_approxmatch.approx_match(pcd1, pcd2)\n    cost = tf_approxmatch.match_cost(pcd1, pcd2, match)\n    return tf.reduce_mean(cost / num_points)\n\n\ndef add_train_summary(name, value):\n    tf.summary.scalar(name, value, collections=['train_summary'])\n\n\ndef add_valid_summary(name, value):\n    avg, update = tf.metrics.mean(value)\n    tf.summary.scalar(name, avg, collections=['valid_summary'])\n    return update\n\n\n''' === borrow from PointNet === '''\n\n\ndef _variable_on_cpu(name, shape, initializer, use_fp16=False):\n    \"\"\"Helper to create a Variable stored on CPU memory.\n    Args:\n        name: name of the variable\n        shape: list of ints\n        initializer: initializer for Variable\n        use_fp16: use 16 bit float or 32 bit float\n    Returns:\n        Variable Tensor\n    \"\"\"\n    with tf.device('/cpu:0'):\n        dtype = tf.float16 if use_fp16 else tf.float32\n        var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype)\n    return var\n\n\ndef _variable_with_weight_decay(name, shape, stddev, wd, use_xavier=True):\n    \"\"\"Helper to create an initialized Variable with weight decay.\n\n    Note that the Variable is initialized with a truncated normal distribution.\n    A weight decay is added only if one is specified.\n\n    Args:\n        name: name of the variable\n        shape: list of ints\n        stddev: standard deviation of a truncated Gaussian\n        wd: add L2Loss weight decay multiplied by this float. If None, weight\n            decay is not added for this Variable.\n        use_xavier: bool, whether to use xavier initializer\n\n    Returns:\n        Variable Tensor\n    \"\"\"\n    if use_xavier:\n        initializer = tf.contrib.layers.xavier_initializer()\n    else:\n        initializer = tf.truncated_normal_initializer(stddev=stddev)\n    var = _variable_on_cpu(name, shape, initializer)\n    if wd is not None:\n        weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')\n        tf.add_to_collection('losses', weight_decay)\n    return var\n\n\ndef batch_norm_template(inputs, is_training, scope, moments_dims, bn_decay):\n    \"\"\" Batch normalization on convolutional maps and beyond...\n    Ref.: http://stackoverflow.com/questions/33949786/how-could-i-use-batch-normalization-in-tensorflow\n\n    Args:\n        inputs:        Tensor, k-D input ... x C could be BC or BHWC or BDHWC\n        is_training:   boolean tf.Variable, true indicates training phase\n        scope:         string, variable scope\n        moments_dims:  a list of ints, indicating dimensions for moments calculation\n        bn_decay:      float or float tensor variable, controlling moving average weight\n    Return:\n        normed:        batch-normalized maps\n    \"\"\"\n    with tf.variable_scope(scope) as sc:\n        num_channels = inputs.get_shape()[-1].value\n        beta = tf.Variable(tf.constant(0.0, shape=[num_channels]),\n                           name='beta', trainable=True)\n        gamma = tf.Variable(tf.constant(1.0, shape=[num_channels]),\n                            name='gamma', trainable=True)\n        batch_mean, batch_var = tf.nn.moments(inputs, moments_dims, name='moments')  # basically just mean and variance\n        decay = bn_decay if bn_decay is not None else 0.9\n        ema = tf.train.ExponentialMovingAverage(decay=decay)\n        # Operator that maintains moving averages of variables.\n        ema_apply_op = tf.cond(is_training,\n                               lambda: ema.apply([batch_mean, batch_var]),\n                               lambda: tf.no_op())\n\n        # Update moving average and return current batch's avg and var.\n        def mean_var_with_update():\n            with tf.control_dependencies([ema_apply_op]):\n                return tf.identity(batch_mean), tf.identity(batch_var)\n\n        # ema.average returns the Variable holding the average of var.\n        mean, var = tf.cond(is_training,\n                            mean_var_with_update,\n                            lambda: (ema.average(batch_mean), ema.average(batch_var)))\n        normed = tf.nn.batch_normalization(inputs, mean, var, beta, gamma, 1e-3)\n        '''tf.nn.batch_normalization(x, mean, variance, offset, scale, variance_epsilon, name=None)'''\n    # 这里的beta, gamma不是3rd/4th moment, 是transferred mean and variance\n    # y_i = gamma * x_i + beta, 其中 x_i 是 normalized 之后的结果\n    # ref: https://towardsdatascience.com/batch-normalization-theory-and-how-to-use-it-with-tensorflow-1892ca0173ad\n    return normed\n\n\ndef batch_norm_for_fc(inputs, is_training, bn_decay, scope):\n    \"\"\" Batch normalization on FC data.\n\n    Args:\n        inputs:      Tensor, 2D BxC input\n        is_training: boolean tf.Variable, true indicates training phase\n        bn_decay:    float or float tensor variable, controlling moving average weight\n        scope:       string, variable scope\n    Return:\n        normed:      batch-normalized maps\n    \"\"\"\n    return batch_norm_template(inputs, is_training, scope, [0,], bn_decay)\n\n\ndef fully_connected(inputs,\n                    num_outputs,\n                    scope,\n                    use_xavier=True,\n                    stddev=1e-3,\n                    weight_decay=0.0,\n                    activation_fn=tf.nn.relu,\n                    bn=False,\n                    bias=True,\n                    bn_decay=None,\n                    is_training=None):\n    \"\"\" Fully connected layer with non-linear operation.\n\n    Args:\n      inputs: 2-D tensor BxN\n      num_outputs: int\n\n    Returns:\n      Variable tensor of size B x num_outputs.\n    \"\"\"\n    with tf.variable_scope(scope) as sc:\n        num_input_units = inputs.get_shape()[-1].value\n        weights = _variable_with_weight_decay('weights',\n                                              shape=[num_input_units, num_outputs],\n                                              use_xavier=use_xavier,\n                                              stddev=stddev,\n                                              wd=weight_decay)\n        outputs = tf.matmul(inputs, weights)\n        if bias:\n            biases = _variable_on_cpu('biases', [num_outputs],\n                                      tf.constant_initializer(0.0))\n            outputs = tf.nn.bias_add(outputs, biases)\n\n        if bn:\n            outputs = batch_norm_for_fc(outputs, is_training, bn_decay, 'bn')\n\n        if activation_fn is not None:\n            outputs = activation_fn(outputs)\n        return outputs\n\n\ndef max_pool2d(inputs,\n               kernel_size,\n               scope,\n               stride=[2, 2],\n               padding='VALID'):\n    \"\"\" 2D max pooling.\n\n    Args:\n      inputs: 4-D tensor BxHxWxC\n      kernel_size: a list of 2 ints\n      stride: a list of 2 ints\n\n    Returns:\n      Variable tensor\n    \"\"\"\n    with tf.variable_scope(scope) as sc:\n        kernel_h, kernel_w = kernel_size\n        stride_h, stride_w = stride\n        outputs = tf.nn.max_pool(inputs,\n                                 ksize=[1, kernel_h, kernel_w, 1],\n                                 strides=[1, stride_h, stride_w, 1],\n                                 padding=padding,\n                                 name=sc.name)\n        '''\n        tf.nn.max_pool(value, ksize, strides, padding, data_format='NHWC',\n                        name=None, input=None)\n        value: (NHWC) -> Number of Batch * In Height * In Width * In Channel\n        kzise:\n\n        '''\n        return outputs\n\n\ndef dropout(inputs,\n            is_training,\n            scope,\n            keep_prob=0.5,\n            noise_shape=None):\n    \"\"\" Dropout layer.\n\n    Args:\n      inputs: tensor\n      is_training: boolean tf.Variable\n      scope: string\n      keep_prob: float in [0,1]\n      noise_shape: list of ints\n\n    Returns:\n      tensor variable\n    \"\"\"\n    with tf.variable_scope(scope) as sc:\n        outputs = tf.cond(is_training,\n                          lambda: tf.nn.dropout(inputs, keep_prob, noise_shape),\n                          lambda: inputs)\n        return outputs\n\n\ndef conv2d(inputs,\n           num_output_channels,\n           kernel_size,\n           scope,\n           stride=[1, 1],\n           padding='SAME',\n           use_xavier=True,\n           stddev=1e-3,\n           weight_decay=0.0,\n           activation_fn=tf.nn.relu,\n           bn=False,\n           bias=True,\n           bn_decay=None,\n           is_training=None):\n    \"\"\" 2D convolution with non-linear operation.\n\n    Args:\n      inputs: 4-D tensor variable BxHxWxC (Batch Size * Height * Width * Channel)\n      num_output_channels: int\n      kernel_size: a list of 2 ints\n      scope: string\n      stride: a list of 2 ints\n      padding: 'SAME' or 'VALID'\n      use_xavier: bool, use xavier_initializer if true,\n                    xavier initializer is the weights initialization technique\n                    that tries to make the variance of the outputs of a layer\n                    to be equal to the variance of its inputs\n      stddev: float, stddev for truncated_normal init\n      weight_decay: float\n      activation_fn: function\n      bn: bool, whether to use batch norm\n      bias: bool, whether to add bias or not\n      bn_decay: float or float tensor variable in [0,1] -> actually no idea = =\n      is_training: bool Tensor variable\n\n    Returns:\n      Variable tensor\n    \"\"\"\n    with tf.variable_scope(scope) as sc:\n        # either [1, 1] or [1, 3]\n        kernel_h, kernel_w = kernel_size\n        # 64, 128, 256, 512, 1028\n        num_in_channels = inputs.get_shape()[-1].value\n        kernel_shape = [kernel_h, kernel_w,\n                        num_in_channels, num_output_channels]\n\n        # not using weight_dacay, since we are using xavier initializer,\n        # so stddev is not used since it is the setting for truncated_normal_initializer()\n        kernel = _variable_with_weight_decay('weights',\n                                             shape=kernel_shape,\n                                             use_xavier=use_xavier,\n                                             stddev=stddev,\n                                             wd=weight_decay)\n        # always [1, 1]\n        stride_h, stride_w = stride\n\n        # tf.nn.conv2d(input, filters, strides, padding, data_format='NHWC', dilations=None, name=None)\n        # filters -> [filter_height, filter_width, in_channels, out_channels], [1,1,1,1] or [1,1,3,1]\n        # -> Point-Based MLPs\n        outputs = tf.nn.conv2d(inputs, kernel,\n                               [1, stride_h, stride_w, 1],\n                               padding=padding)\n        if bias:\n            biases = _variable_on_cpu('biases', [num_output_channels], tf.constant_initializer(0.0))\n            outputs = tf.nn.bias_add(outputs, biases)\n\n        # always use batch normalisation\n        if bn:\n            outputs = batch_norm_for_conv2d(outputs, is_training, bn_decay=bn_decay, scope='bn')\n\n        # always use relu activation function\n        if activation_fn is not None:\n            outputs = activation_fn(outputs)\n        return outputs\n\n\ndef batch_norm_for_conv2d(inputs, is_training, bn_decay, scope):\n    \"\"\" Batch normalization on 2D convolutional maps. \"\"\"\n    return batch_norm_template(inputs, is_training, scope, [0, 1, 2], bn_decay)\n\n\ndef batch_norm_template(inputs, is_training, scope, moments_dims, bn_decay):\n    \"\"\" Batch normalization on convolutional maps and beyond...\n    Ref.: http://stackoverflow.com/questions/33949786/how-could-i-use-batch-normalization-in-tensorflow\n\n    Args:\n        inputs:        Tensor, k-D input ... x C could be BC or BHWC or BDHWC\n        is_training:   boolean tf.Variable, true indicates training phase\n        scope:         string, variable scope\n        moments_dims:  a list of ints, indicating dimensions for moments calculation\n        bn_decay:      float or float tensor variable, controlling moving average weight\n    Return:\n        normed:        batch-normalized maps\n    \"\"\"\n    with tf.variable_scope(scope) as sc:\n        num_channels = inputs.get_shape()[-1].value\n        beta = tf.Variable(tf.constant(0.0, shape=[num_channels]),\n                           name='beta', trainable=True)\n        gamma = tf.Variable(tf.constant(1.0, shape=[num_channels]),\n                            name='gamma', trainable=True)\n        batch_mean, batch_var = tf.nn.moments(inputs, moments_dims, name='moments')\n        # basically mean and variance\n        decay = bn_decay if bn_decay is not None else 0.9\n        # it seems that in the PointNet it is setting as 0.7\n\n        ema = tf.train.ExponentialMovingAverage(decay=decay)\n        # Operator that maintains moving averages of variables.\n        ema_apply_op = tf.cond(is_training,\n                               lambda: ema.apply([batch_mean, batch_var]),\n                               lambda: tf.no_op())\n\n        # Update moving average and return current batch's avg and var\n        # TODO: what is the window size???\n        def mean_var_with_update():\n            with tf.control_dependencies([ema_apply_op]):\n                return tf.identity(batch_mean), tf.identity(batch_var)\n\n        # ema.average returns the Variable holding the average of var.\n        mean, var = tf.cond(is_training,\n                            mean_var_with_update,\n                            lambda: (ema.average(batch_mean), ema.average(batch_var)))\n        normed = tf.nn.batch_normalization(inputs, mean, var, beta, gamma, 1e-3)\n        '''tf.nn.batch_normalization(x, mean, variance, offset, scale, variance_epsilon, name=None)'''\n    # 这里的beta, gamma不是3rd/4th moment, 是transferred mean and variance\n    # y_i = gamma * x_i + beta, 其中 x_i 是 normalized 之后的结果\n    # ref: https://towardsdatascience.com/batch-normalization-theory-and-how-to-use-it-with-tensorflow-1892ca0173ad\n    return normed\n\n\ndef pairwise_distance(point_cloud):\n    \"\"\"Compute pairwise distance of a point cloud.\n    Args:\n        point_cloud: tensor (batch_size, num_points, num_dims)\n\n    Returns:\n        pairwise distance: (batch_size, num_points, num_points)\n    \"\"\"\n\n    og_batch_size = point_cloud.get_shape().as_list()[0]\n    point_cloud = tf.squeeze(point_cloud)\n    if og_batch_size == 1:\n        point_cloud = tf.expand_dims(point_cloud, 0)\n\n    point_cloud_transpose = tf.transpose(point_cloud, perm=[0, 2, 1])\n    point_cloud_inner = tf.matmul(point_cloud, point_cloud_transpose)\n    point_cloud_inner = -2 * point_cloud_inner\n    point_cloud_square = tf.reduce_sum(tf.square(point_cloud), axis=-1, keep_dims=True)\n    point_cloud_square_tranpose = tf.transpose(point_cloud_square, perm=[0, 2, 1])\n    return point_cloud_square + point_cloud_inner + point_cloud_square_tranpose\n\n\ndef knn(adj_matrix, k=20):\n    \"\"\" Get KNN based on the pairwise distance.\n    Args:\n        pairwise distance: (batch_size, num_points, num_points)\n        k: int\n\n    Returns:\n        nearest neighbors: (batch_size, num_points, k)\n        \"\"\"\n    neg_adj = - adj_matrix\n    _, nn_idx = tf.nn.top_k(neg_adj, k=k)\n    return nn_idx\n\n\ndef get_edge_feature(point_cloud, nn_idx, k=20):\n    \"\"\"Construct edge feature for each point\n    Args:\n        point_cloud: (batch_size, num_points, 1, num_dims)\n        nn_idx: (batch_size, num_points, k)\n        k: int\n    Returns:\n        edge features: (batch_size, num_points, k, num_dims)\n        \"\"\"\n    og_batch_size = point_cloud.get_shape().as_list()[0]\n    point_cloud = tf.squeeze(point_cloud)\n    if og_batch_size == 1:\n        point_cloud = tf.expand_dims(point_cloud, 0)\n\n    point_cloud_central = point_cloud\n\n    point_cloud_shape = point_cloud.get_shape()\n    batch_size = point_cloud_shape[0].value\n    num_points = point_cloud_shape[1].value\n    num_dims = point_cloud_shape[2].value\n\n    idx_ = tf.range(batch_size) * num_points\n    idx_ = tf.reshape(idx_, [batch_size, 1, 1])\n\n    point_cloud_flat = tf.reshape(point_cloud, [-1, num_dims])\n    point_cloud_neighbors = tf.gather(point_cloud_flat, nn_idx + idx_)\n    point_cloud_central = tf.expand_dims(point_cloud_central, axis=-2)\n\n    point_cloud_central = tf.tile(point_cloud_central, [1, 1, k, 1])\n\n    # edge_feature = tf.concat([point_cloud_central, point_cloud_neighbors - point_cloud_central], axis=-1)\n    edge_feature = tf.concat([point_cloud_neighbors - point_cloud_central, point_cloud_central], axis=-1)\n\n    return edge_feature\n\n\ndef get_learning_rate(batch, base_lr, batch_size, decay_step, decay_rate, lr_clip):\n    learning_rate = tf.train.exponential_decay(\n        base_lr,             # Base learning rate.\n        batch * batch_size,  # Current index into the dataset.\n        decay_step,\t\t\t # Decay step.\n        decay_rate,\t\t\t # Decay rate.\n        staircase=True)\n    learning_rate = tf.maximum(learning_rate, lr_clip)  # CLIP THE LEARNING RATE!\n    return learning_rate\n\n\ndef get_lr_dgcnn(batch, base_lr, batch_size, decay_step, alpha):\n    learning_rate = tf.train.cosine_decay(\n        base_lr,             # Base learning rate.\n        batch * batch_size,  # Current index into the dataset.\n        decay_step,\t\t\t # Decay step.\n        alpha)               # alpha.\n    return learning_rate\n\n\ndef get_bn_decay(batch, bn_init_decay, batch_size, bn_decay_step, bn_decay_rate, bn_decay_clip):\n    bn_momentum = tf.train.exponential_decay(\n        bn_init_decay,\n        batch * batch_size,\n        bn_decay_step,\n        bn_decay_rate,\n        staircase=True)\n    bn_decay = tf.minimum(bn_decay_clip, 1 - bn_momentum)\n    return bn_decay\n"
  },
  {
    "path": "OcCo_TF/utils/transfer_pretrained_w.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport os, argparse, tensorflow as tf\nfrom tensorflow.python import pywrap_tensorflow\nfrom termcolor import colored\n\ndef load_para_from_saved_model(model_path, verbose=False):\n    \"\"\"load the all parameters from the saved TensorFlow checkpoint\n    the format is dict -> {var_name(str): var_value(numpy array)}\"\"\"\n    reader = pywrap_tensorflow.NewCheckpointReader(model_path)\n    var_to_map = reader.get_variable_to_shape_map()\n\n    print('\\n============================')\n    print('model checkpoint: ', model_path)\n    print('checkpoint has been loaded')\n    for key in var_to_map.keys():\n        var_to_map[key] = reader.get_tensor(key)\n        if verbose:\n            print('tensor_name:', key, ' shape:', reader.get_tensor(key).shape)\n    print('============================\\n')\n    return var_to_map\n\n\ndef intersec_saved_var(model_path1, model_path2, verbose=False):\n    \"\"\"find the intersection of two saved models in terms of variable names\"\"\"\n    var_to_map_1 = load_para_from_saved_model(model_path1, verbose=verbose)\n    var_to_map_2 = load_para_from_saved_model(model_path2, verbose=verbose)\n\n    # list of shared variable\n    intersect = [*set(var_to_map_1.keys()).intersection(set(var_to_map_2.keys())), ]\n\n    if verbose:\n        print('\\n=======================')\n        print('the shared variables are:')\n        print(intersect)\n\n    return var_to_map_1, var_to_map_2, intersect\n\n\ndef load_pretrained_var(source_model_path, target_model_path, verbose=False):\n    \"\"\"save the parameters from source to target for variables in the intersection\"\"\"\n    var_map_source, var_map_target, intersect = intersec_saved_var(\n        source_model_path, target_model_path, verbose=verbose)\n\n    out_f = open('para_restored.txt', 'w+')\n\n    with tf.Session() as my_sess:\n        new_var_list = []\n        for var in var_map_target.keys():\n            # pdb.set_trace()\n            if (var in intersect) and (var_map_source[var].shape == var_map_target[var].shape):\n                new_var = tf.Variable(var_map_source[var], name=var)\n                if verbose:\n                    print('%s has been restored from the pre-trained %s' % (var, source_model_path))\n                out_f.writelines('Restored: %s has been restored from the pre-trained %s\\n' % (var, source_model_path))\n            else:\n                new_var = tf.Variable(var_map_target[var], name=var)\n                if verbose:\n                    print('%s has been restored from the random initialized %s' % (var, target_model_path))\n                out_f.writelines('Random Initialised: %s\\n' % var)\n            new_var_list.append(new_var)\n        print('start to write the new checkpoint')\n        my_sess.run(tf.global_variables_initializer())\n        my_saver = tf.train.Saver(var_list=new_var_list)\n        my_saver.save(my_sess, target_model_path)\n        print(colored('source weights has been restored', 'white', 'on_blue'))\n\n    my_sess.close()\n    out_f.close()\n    return None\n\n\nif __name__ == '__main__':\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument('--source_path', default='./pretrained/pcn_cd')\n    parser.add_argument('--target_path', default='./log/pcn_cls_shapenet8_pretrained_init/model.ckpt')\n    parser.add_argument('--gpu', default='0')\n    parser.add_argument('--verbose', type=bool, default=True)\n    args = parser.parse_args()\n\n    os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'\n    os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu)\n\n    load_pretrained_var(args.source_path, args.target_path, args.verbose)\n"
  },
  {
    "path": "OcCo_TF/utils/transform_nets.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n\nimport os, sys, numpy as np, tensorflow as tf\nimport tf_util\n\n\ndef input_transform_net_dgcnn(edge_feature, is_training, bn_decay=None, K=3):\n    \"\"\" Input (XYZ) Transform Net, input is BxNx3 gray image\n    Return:\n        Transformation matrix of size 3xK \"\"\"\n\n    batch_size = edge_feature.get_shape()[0].value\n    num_point = edge_feature.get_shape()[1].value\n\n    net = tf_util.conv2d(edge_feature, 64, [1, 1],\n                         padding='VALID', stride=[1, 1],\n                         bn=True, is_training=is_training,\n                         scope='tconv1', bn_decay=bn_decay)\n    net = tf_util.conv2d(net, 128, [1, 1],\n                         padding='VALID', stride=[1, 1],\n                         bn=True, is_training=is_training,\n                         scope='tconv2', bn_decay=bn_decay)\n\n    net = tf.reduce_max(net, axis=-2, keep_dims=True)\n\n    net = tf_util.conv2d(net, 1024, [1, 1],\n                         padding='VALID', stride=[1, 1],\n                         bn=True, is_training=is_training,\n                         scope='tconv3', bn_decay=bn_decay)\n    net = tf_util.max_pool2d(net, [num_point, 1],\n                             padding='VALID', scope='tmaxpool')\n\n    net = tf.reshape(net, [batch_size, -1])\n    net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,\n                                  scope='tfc1', bn_decay=bn_decay)\n    net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,\n                                  scope='tfc2', bn_decay=bn_decay)\n\n    with tf.variable_scope('transform_XYZ') as sc:\n        # assert(K==3)\n        with tf.device('/cpu:0'):\n            weights = tf.get_variable('weights', [256, K * K],\n                                      initializer=tf.constant_initializer(0.0),\n                                      dtype=tf.float32)\n            biases = tf.get_variable('biases', [K * K],\n                                     initializer=tf.constant_initializer(0.0),\n                                     dtype=tf.float32)\n        biases += tf.constant(np.eye(K).flatten(), dtype=tf.float32)\n        transform = tf.matmul(net, weights)\n        transform = tf.nn.bias_add(transform, biases)\n\n    transform = tf.reshape(transform, [batch_size, K, K])\n    return transform\n\n\ndef input_transform_net(point_cloud, is_training, bn_decay=None, K=3):\n    \"\"\" Input (XYZ) Transform Net, input is BxNx3 gray image\n        Return:\n            Transformation matrix of size 3xK \"\"\"\n    # print('the input shape for t-net:', point_cloud.get_shape())\n    batch_size = point_cloud.get_shape()[0].value\n    num_point = point_cloud.get_shape()[1].value\n    # point_cloud -> Tensor of (batch size, number of points, 3d coordinates)\n\n    input_image = tf.expand_dims(point_cloud, -1)\n    # point_cloud -> (batch size, number of points, 3d coordinates, 1)\n    # batch size * height * width * channel\n\n    '''tf_util.conv2d(inputs, num_output_channels, kernel_size, scope, stride=[1, 1], padding='SAME',\n                        use_xavier=True, stddev=1e-3, weight_decay=0.0, activation_fn=tf.nn.relu,\n                        bn=False, bn_decay=None(default is set to 0.9), is_training=None)'''\n    net = tf_util.conv2d(input_image, 64, [1, 3],\n                         padding='VALID', stride=[1, 1],\n                         bn=True, is_training=is_training,\n                         scope='tconv1', bn_decay=bn_decay)\n    net = tf_util.conv2d(net, 128, [1, 1],\n                         padding='VALID', stride=[1, 1],\n                         bn=True, is_training=is_training,\n                         scope='tconv2', bn_decay=bn_decay)\n    net = tf_util.conv2d(net, 1024, [1, 1],\n                         padding='VALID', stride=[1, 1],\n                         bn=True, is_training=is_training,\n                         scope='tconv3', bn_decay=bn_decay)\n\n    # net = mlp_conv(input_image, [64, 128, 1024])\n    net = tf_util.max_pool2d(net, [num_point, 1],\n                             padding='VALID', scope='tmaxpool')\n    '''(default stride: (2, 2))'''\n    # net = tf.reduce_max(net, axis=1, keep_dims=True, name='tmaxpool')\n\n    net = tf.reshape(net, [batch_size, -1])\n    net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,\n                                  scope='tfc1', bn_decay=bn_decay)\n    net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,\n                                  scope='tfc2', bn_decay=bn_decay)\n\n    with tf.variable_scope('transform_XYZ') as sc:\n        assert (K == 3)\n        weights = tf.get_variable('weights', [256, 3 * K],\n                                  initializer=tf.constant_initializer(0.0),\n                                  dtype=tf.float32)\n        biases = tf.get_variable('biases', [3 * K],\n                                 initializer=tf.constant_initializer(0.0),\n                                 dtype=tf.float32)\n        biases += tf.constant([1, 0, 0, 0, 1, 0, 0, 0, 1], dtype=tf.float32)\n        transform = tf.matmul(net, weights)\n        transform = tf.nn.bias_add(transform, biases)\n\n    transform = tf.reshape(transform, [batch_size, 3, K])\n    return transform\n\n\ndef feature_transform_net(inputs, is_training, bn_decay=None, K=64):\n    \"\"\" Feature Transform Net, input is BxNx1xK\n        Return:\n            Transformation matrix of size KxK \"\"\"\n    batch_size = inputs.get_shape()[0].value\n    num_point = inputs.get_shape()[1].value\n\n    net = tf_util.conv2d(inputs, 64, [1, 1],\n                         padding='VALID', stride=[1, 1],\n                         bn=True, is_training=is_training,\n                         scope='tconv1', bn_decay=bn_decay)\n    net = tf_util.conv2d(net, 128, [1, 1],\n                         padding='VALID', stride=[1, 1],\n                         bn=True, is_training=is_training,\n                         scope='tconv2', bn_decay=bn_decay)\n    net = tf_util.conv2d(net, 1024, [1, 1],\n                         padding='VALID', stride=[1, 1],\n                         bn=True, is_training=is_training,\n                         scope='tconv3', bn_decay=bn_decay)\n    net = tf_util.max_pool2d(net, [num_point, 1],\n                             padding='VALID', scope='tmaxpool')\n\n    net = tf.reshape(net, [batch_size, -1])\n    net = tf_util.fully_connected(net, 512, bn=True, is_training=is_training,\n                                  scope='tfc1', bn_decay=bn_decay)\n    net = tf_util.fully_connected(net, 256, bn=True, is_training=is_training,\n                                  scope='tfc2', bn_decay=bn_decay)\n\n    with tf.variable_scope('transform_feat') as sc:\n        weights = tf.get_variable('weights', [256, K * K],\n                                  initializer=tf.constant_initializer(0.0),\n                                  dtype=tf.float32)\n        biases = tf.get_variable('biases', [K * K],\n                                 initializer=tf.constant_initializer(0.0),\n                                 dtype=tf.float32)\n        biases += tf.constant(np.eye(K).flatten(), dtype=tf.float32)\n        transform = tf.matmul(net, weights)\n        transform = tf.nn.bias_add(transform, biases)\n\n    transform = tf.reshape(transform, [batch_size, K, K])\n    return transform\n"
  },
  {
    "path": "OcCo_TF/utils/visu_util.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n#  Original Author: Wentao Yuan (wyuan1@cs.cmu.edu) 05/31/2018\n\nimport numpy as np\nfrom matplotlib import pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\nfrom open3d.open3d.io import read_point_cloud\n# from open3d.open3d_pybind.io import read_point_cloud\n\n\ndef plot_pcd_three_views(filename, pcds, titles, suptitle='', sizes=None, cmap='Reds', zdir='y',\n                         xlim=(-0.3, 0.3), ylim=(-0.3, 0.3), zlim=(-0.3, 0.3)):\n    if sizes is None:\n        sizes = [0.5 for _ in range(len(pcds))]\n    fig = plt.figure(figsize=(len(pcds) * 3, 9))\n    for i in range(3):\n        elev = 30\n        azim = -45 + 90 * i\n        for j, (pcd, size) in enumerate(zip(pcds, sizes)):\n            color = pcd[:, 0]\n            ax = fig.add_subplot(3, len(pcds), i * len(pcds) + j + 1, projection='3d')\n            ax.view_init(elev, azim)\n            ax.scatter(pcd[:, 0], pcd[:, 1], pcd[:, 2], zdir=zdir, c=color, s=size, cmap=cmap, vmin=-1, vmax=0.5)\n            ax.set_title(titles[j])\n            ax.set_axis_off()\n            ax.set_xlim(xlim)\n            ax.set_ylim(ylim)\n            ax.set_zlim(zlim)\n    plt.subplots_adjust(left=0.05, right=0.95, bottom=0.05, top=0.9, wspace=0.1, hspace=0.1)\n    plt.suptitle(suptitle)\n    fig.savefig(filename)\n    plt.close(fig)\n\n\nif __name__ == \"__main__\":\n    filenames = ['airplane.pcd', 'car.pcd', 'chair.pcd', 'lamp.pcd']  # '../demo_data'\n    for file in filenames:\n        filename = file.replace('.pcd', '')\n        pcds = [np.asarray(read_point_cloud('../demo_data/' + file).points)]\n        titles = ['viewpoint 1', 'viewpoint 2', 'viewpoint 3']\n        plot_pcd_three_views(\n            filename, pcds, titles, suptitle=filename, sizes=None, cmap='viridis', zdir='y',\n            xlim=(-0.3, 0.3), ylim=(-0.3, 0.3), zlim=(-0.3, 0.3))\n"
  },
  {
    "path": "OcCo_Torch/Requirements_Torch.txt",
    "content": "# Originally Designed for Docker Environment:\n# PyTorch 1.3.0, Python 3.6, CUDA 10.1\n# install PyTorch first if not use docker\nlmdb >= 0.98\nh5py >= 2.10.0\nfuture >= 0.18.2\npyarrow >= 1.0.0\nopen3d == 0.9.0.0\nmatplotlib >= 3.3.0\ntensorpack == 0.9.8\ntensorboard >= 1.15.0\npython-prctl >= 1.5.0\nopen3d-python==0.7.0.0\nscikit-learn >= 0.23.1\n"
  },
  {
    "path": "OcCo_Torch/bash_template/train_cls_template.sh",
    "content": "#!/usr/bin/env bash\n\ncd ../\n\n# training pointnet on ModelNet40, from scratch\npython train_cls.py \\\n\t--gpu 0 \\\n\t--model pointnet_cls \\\n\t--dataset modelnet40 \\\n\t--log_dir modelnet40_pointnet_scratch ;\n\n\n# fine tuning pcn on ScanNet10, using jigsaw pre-trained checkpoints\npython train_cls.py \\\n\t--gpu 0 \\\n\t--model pcn_cls \\\n\t--dataset scannet10 \\\n\t--log_dir scannet10_pcn_jigsaw \\\n\t--restore \\\n\t--restore_path log/jigsaw/modelnet_pcn_vanilla/checkpoints/best_model.pth ;\n\n\n# fine tuning dgcnn on ScanObjectNN(OBJ_BG), using jigsaw pre-trained checkpoints\npython train_cls.py \\\n\t--gpu 0,1 \\\n\t--epoch 250 \\\n\t--use_sgd \\\n\t--scheduler cos \\\n\t--model dgcnn_cls \\\n\t--dataset scanobjectnn \\\n\t--bn \\\n\t--log_dir scanobjectnn_dgcnn_occo \\\n\t--restore \\\n\t--restore_path log/completion/modelnet_dgcnn_vanilla/checkpoints/best_model.pth ;\n\n\n# test pointnet on ModelNet40 from pre-trained checkpoints\npython train_cls.py \\\n\t--gpu 1 \\\n\t--epoch 1 \\\n\t--mode test \\\n\t--model pointnet_cls \\\n\t--dataset modelnet40 \\\n\t--log_dir modelnet40_pointnet_scratch \\\n\t--restore \\\n\t--restore_path log/cls/modelnet40_pointnet_scratch/checkpoints/best_model.pth ;\n"
  },
  {
    "path": "OcCo_Torch/bash_template/train_completion_template.sh",
    "content": "#!/usr/bin/env bash\n\ncd ../\n\n# train pointnet-occo model on ModelNet, from scratch\npython train_completion.py \\\n\t--gpu 0,1 \\\n\t--dataset modelnet \\\n\t--model pointnet_occo \\\n\t--log_dir modelnet_pointnet_vanilla ;\n\n# train dgcnn-occo model on ShapeNet, from scratch\npython train_completion.py \\\n\t--gpu 0,1 \\\n\t--batch_size 16 \\\n\t--dataset shapenet \\\n\t--model dgcnn_occo \\\n\t--log_dir shapenet_dgcnn_vanilla ;\n"
  },
  {
    "path": "OcCo_Torch/bash_template/train_jigsaw_template.sh",
    "content": "#!/usr/bin/env bash\n\ncd ../\n\n# train pointnet_jigsaw on ModelNet40, from scratch\npython train_jigsaw.py \\\n\t--gpu 0 \\\n\t--model pointnet_jigsaw \\\n\t--bn_decay \\\n\t--xavier_init \\\n\t--optimiser Adam \\\n\t--scheduler step \\\n\t--log_dir modelnet40_pointnet_scratch ;\n\n\n# train dgcnn_jigsaw on ModelNet40, from scratch\npython train_jigsaw.py \\\n\t--gpu 0 \\\n\t--model dgcnn_jigsaw \\\n\t--optimiser SGD \\\n\t--scheduler cos \\\n\t--log_dir modelnet40_dgcnn_scartch ; \n"
  },
  {
    "path": "OcCo_Torch/bash_template/train_partseg_template.sh",
    "content": "#!/usr/bin/env bash\n\ncd ../\n\n# training pointnet on ShapeNetPart, from scratch\npython train_partseg.py \\\n\t--gpu 0 \\\n\t--normal \\\n\t--bn_decay \\\n\t--xavier_init \\\n\t--model pointnet_partseg \\\n    --log_dir pointnet_scratch ;\n\n\n# fine tuning pcn on ShapeNetPart, using jigsaw pre-trained checkpoints\npython train_partseg.py \\\n\t--gpu 0 \\\n\t--normal \\\n\t--bn_decay \\\n\t--xavier_init \\\n\t--model pcn_partseg \\\n\t--log_dir pcn_jigsaw \\\n\t--restore \\\n\t--restore_path log/jigsaw/modelnet_pcn_vanilla/checkpoints/best_model.pth ;\n\n\n# fine tuning dgcnn on ShapeNetPart, using occo pre-trained checkpoints\npython train_partseg.py \\\n\t--gpu 0, 1 \\\n\t--normal \\\n\t--use_sgd \\\n\t--xavier_init \\\n\t--scheduler cos \\\n\t--model dgcnn_partseg \\\n\t--log_dir dgcnn_occo \\\n\t--restore \\\n\t--restore_path log/completion/modelnet_dgcnn_vanilla/checkpoints/best_model.pth ;\n\n\n# test fine tuned pointnet on ShapeNetPart, using multiple votes\npython train_partseg.py \\\n\t--gpu 0 \\\n\t--epoch 1 \\\n\t--mode test \\\n\t--num_votes 3 \\\n\t--model pointnet_partseg \\\n\t--log_dir pointnet_scratch \\\n\t--restore \\\n\t--restore_path log/partseg/pointnet_occo/checkpoints/best_model.pth ;\n"
  },
  {
    "path": "OcCo_Torch/bash_template/train_semseg_template.sh",
    "content": "#!/usr/bin/env bash\n\ncd ../\n\n# train pointnet_semseg on 6-fold cv of S3DIS, from scratch\nfor area in $(seq 1 1 6)\ndo\npython train_semseg.py \\\n\t--gpu 0,1 \\\n\t--model pointnet_semseg \\\n\t--bn_decay \\\n\t--xavier_init \\\n\t--test_area ${area} \\\n\t--scheduler step \\\n\t--log_dir pointnet_area${area}_scratch ;\ndone\n\n# fine tune pcn_semseg on 6-fold cv of S3DIS, using jigsaw pre-trained weights\nfor area in $(seq 1 1 6)\ndo\npython train_semseg.py \\\n\t--gpu 0,1 \\\n\t--model pcn_semseg \\\n\t--bn_decay \\\n\t--test_area ${area} \\\n\t--log_dir pcn_area${area}_jigsaw \\\n\t--restore \\\n\t--restore_path log/jigsaw/modelnet_pcn_vanilla/checkpoints/best_model.pth ;\ndone\n\n# fine tune dgcnn_semseg on 6-fold cv of S3DIS, using occo pre-trained weights\nfor area in $(seq 1 1 6)\ndo\npython train_semseg.py \\\n\t--gpu 0,1 \\\n\t--test_area ${area} \\\n\t--optimizer sgd \\\n\t--scheduler cos \\\n\t--model dgcnn_semseg \\\n\t--log_dir dgcnn_area${area}_occo \\\n\t--restore \\\n\t--restore_path log/completion/modelnet_dgcnn_vanilla/checkpoints/best_model.pth ;\ndone\n"
  },
  {
    "path": "OcCo_Torch/bash_template/train_svm_template.sh",
    "content": "#!/usr/bin/env bash\n\ncd ../\n\n# fit a linear svm on ModelNet40 encoded by OcCo PointNet\npython train_svm.py \\\n\t--gpu 0 \\\n\t--model pointnet_util \\\n\t--dataset modelnet40 \\\n\t--restore_path log/completion/modelnet_pointnet_vanilla/checkpoints/best_model.pth ;\n\n\n# grid search the best parameters of a svm with rbf kernel on ModelNet40 encoded by OcCo PCN\npython train_svm.py \\\n\t--gpu 0 \\\n\t--grid_search \\\n\t--model pcn_util \\\n\t--dataset modelnet40 \\\n\t--restore_path log/completion/modelnet_pcn_vanilla/checkpoints/best_model.pth ;\n\n\n# ... on ScanObjectNN(OBJ_BG) encoded by OcCo DGCNN\npython train_svm.py \\\n\t--gpu 0 \\\n\t--grid_search \\\n\t--batch_size 8 \\\n\t--model dgcnn_util \\\n\t--dataset scanobjectnn \\\n\t--bn \\\n\t--restore_path log/completion/modelnet_dgcnn_vanilla/checkpoints/best_model.pth ;\n"
  },
  {
    "path": "OcCo_Torch/chamfer_distance/__init__.py",
    "content": "from .chamfer_distance import ChamferDistance\n"
  },
  {
    "path": "OcCo_Torch/chamfer_distance/chamfer_distance.cpp",
    "content": "#include <torch/torch.h>\n\n// CUDA forward declarations\nint ChamferDistanceKernelLauncher(\n    const int b, const int n,\n    const float* xyz,\n    const int m,\n    const float* xyz2,\n    float* result,\n    int* result_i,\n    float* result2,\n    int* result2_i);\n\nint ChamferDistanceGradKernelLauncher(\n    const int b, const int n,\n    const float* xyz1,\n    const int m,\n    const float* xyz2,\n    const float* grad_dist1,\n    const int* idx1,\n    const float* grad_dist2,\n    const int* idx2,\n    float* grad_xyz1,\n    float* grad_xyz2);\n\n\nvoid chamfer_distance_forward_cuda(\n    const at::Tensor xyz1, \n    const at::Tensor xyz2, \n    const at::Tensor dist1, \n    const at::Tensor dist2, \n    const at::Tensor idx1, \n    const at::Tensor idx2) \n{\n    ChamferDistanceKernelLauncher(xyz1.size(0), xyz1.size(1), xyz1.data<float>(),\n                                            xyz2.size(1), xyz2.data<float>(),\n                                            dist1.data<float>(), idx1.data<int>(),\n                                            dist2.data<float>(), idx2.data<int>());\n}\n\nvoid chamfer_distance_backward_cuda(\n    const at::Tensor xyz1,\n    const at::Tensor xyz2, \n    at::Tensor gradxyz1, \n    at::Tensor gradxyz2, \n    at::Tensor graddist1, \n    at::Tensor graddist2, \n    at::Tensor idx1, \n    at::Tensor idx2)\n{\n    ChamferDistanceGradKernelLauncher(xyz1.size(0), xyz1.size(1), xyz1.data<float>(),\n                                           xyz2.size(1), xyz2.data<float>(),\n                                           graddist1.data<float>(), idx1.data<int>(),\n                                           graddist2.data<float>(), idx2.data<int>(),\n                                           gradxyz1.data<float>(), gradxyz2.data<float>());\n}\n\n\nvoid nnsearch(\n    const int b, const int n, const int m,\n    const float* xyz1,\n    const float* xyz2,\n    float* dist,\n    int* idx)\n{\n    for (int i = 0; i < b; i++) {\n        for (int j = 0; j < n; j++) {\n            const float x1 = xyz1[(i*n+j)*3+0];\n            const float y1 = xyz1[(i*n+j)*3+1];\n            const float z1 = xyz1[(i*n+j)*3+2];\n            double best = 0;\n            int besti = 0;\n            for (int k = 0; k < m; k++) {\n                const float x2 = xyz2[(i*m+k)*3+0] - x1;\n                const float y2 = xyz2[(i*m+k)*3+1] - y1;\n                const float z2 = xyz2[(i*m+k)*3+2] - z1;\n                const double d=x2*x2+y2*y2+z2*z2;\n                if (k==0 || d < best){\n                    best = d;\n                    besti = k;\n                }\n            }\n            dist[i*n+j] = best;\n            idx[i*n+j] = besti;\n        }\n    }\n}\n\n\nvoid chamfer_distance_forward(\n    const at::Tensor xyz1, \n    const at::Tensor xyz2, \n    const at::Tensor dist1, \n    const at::Tensor dist2, \n    const at::Tensor idx1, \n    const at::Tensor idx2) \n{\n    const int batchsize = xyz1.size(0);\n    const int n = xyz1.size(1);\n    const int m = xyz2.size(1);\n\n    const float* xyz1_data = xyz1.data<float>();\n    const float* xyz2_data = xyz2.data<float>();\n    float* dist1_data = dist1.data<float>();\n    float* dist2_data = dist2.data<float>();\n    int* idx1_data = idx1.data<int>();\n    int* idx2_data = idx2.data<int>();\n\n    nnsearch(batchsize, n, m, xyz1_data, xyz2_data, dist1_data, idx1_data);\n    nnsearch(batchsize, m, n, xyz2_data, xyz1_data, dist2_data, idx2_data);\n}\n\n\nvoid chamfer_distance_backward(\n    const at::Tensor xyz1, \n    const at::Tensor xyz2, \n    at::Tensor gradxyz1, \n    at::Tensor gradxyz2, \n    at::Tensor graddist1, \n    at::Tensor graddist2, \n    at::Tensor idx1, \n    at::Tensor idx2) \n{\n    const int b = xyz1.size(0);\n    const int n = xyz1.size(1);\n    const int m = xyz2.size(1);\n\n    const float* xyz1_data = xyz1.data<float>();\n    const float* xyz2_data = xyz2.data<float>();\n    float* gradxyz1_data = gradxyz1.data<float>();\n    float* gradxyz2_data = gradxyz2.data<float>();\n    float* graddist1_data = graddist1.data<float>();\n    float* graddist2_data = graddist2.data<float>();\n    const int* idx1_data = idx1.data<int>();\n    const int* idx2_data = idx2.data<int>();\n\n    for (int i = 0; i < b*n*3; i++)\n        gradxyz1_data[i] = 0;\n    for (int i = 0; i < b*m*3; i++)\n        gradxyz2_data[i] = 0;\n    for (int i = 0;i < b; i++) {\n        for (int j = 0; j < n; j++) {\n            const float x1 = xyz1_data[(i*n+j)*3+0];\n            const float y1 = xyz1_data[(i*n+j)*3+1];\n            const float z1 = xyz1_data[(i*n+j)*3+2];\n            const int j2 = idx1_data[i*n+j];\n\n            const float x2 = xyz2_data[(i*m+j2)*3+0];\n            const float y2 = xyz2_data[(i*m+j2)*3+1];\n            const float z2 = xyz2_data[(i*m+j2)*3+2];\n            const float g = graddist1_data[i*n+j]*2;\n\n            gradxyz1_data[(i*n+j)*3+0] += g*(x1-x2);\n            gradxyz1_data[(i*n+j)*3+1] += g*(y1-y2);\n            gradxyz1_data[(i*n+j)*3+2] += g*(z1-z2);\n            gradxyz2_data[(i*m+j2)*3+0] -= (g*(x1-x2));\n            gradxyz2_data[(i*m+j2)*3+1] -= (g*(y1-y2));\n            gradxyz2_data[(i*m+j2)*3+2] -= (g*(z1-z2));\n        }\n        for (int j = 0; j < m; j++) {\n            const float x1 = xyz2_data[(i*m+j)*3+0];\n            const float y1 = xyz2_data[(i*m+j)*3+1];\n            const float z1 = xyz2_data[(i*m+j)*3+2];\n            const int j2 = idx2_data[i*m+j];\n            const float x2 = xyz1_data[(i*n+j2)*3+0];\n            const float y2 = xyz1_data[(i*n+j2)*3+1];\n            const float z2 = xyz1_data[(i*n+j2)*3+2];\n            const float g = graddist2_data[i*m+j]*2;\n            gradxyz2_data[(i*m+j)*3+0] += g*(x1-x2);\n            gradxyz2_data[(i*m+j)*3+1] += g*(y1-y2);\n            gradxyz2_data[(i*m+j)*3+2] += g*(z1-z2);\n            gradxyz1_data[(i*n+j2)*3+0] -= (g*(x1-x2));\n            gradxyz1_data[(i*n+j2)*3+1] -= (g*(y1-y2));\n            gradxyz1_data[(i*n+j2)*3+2] -= (g*(z1-z2));\n        }\n    }\n}\n\n\nPYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {\n    m.def(\"forward\", &chamfer_distance_forward, \"ChamferDistance forward\");\n    m.def(\"forward_cuda\", &chamfer_distance_forward_cuda, \"ChamferDistance forward (CUDA)\");\n    m.def(\"backward\", &chamfer_distance_backward, \"ChamferDistance backward\");\n    m.def(\"backward_cuda\", &chamfer_distance_backward_cuda, \"ChamferDistance backward (CUDA)\");\n}\n"
  },
  {
    "path": "OcCo_Torch/chamfer_distance/chamfer_distance.cu",
    "content": "#include <ATen/ATen.h>\n\n#include <cuda.h>\n#include <cuda_runtime.h>\n\n__global__ \nvoid ChamferDistanceKernel(\n\tint b,\n\tint n,\n\tconst float* xyz,\n\tint m,\n\tconst float* xyz2,\n\tfloat* result,\n\tint* result_i)\n{\n\tconst int batch=512;\n\t__shared__ float buf[batch*3];\n\tfor (int i=blockIdx.x;i<b;i+=gridDim.x){\n\t\tfor (int k2=0;k2<m;k2+=batch){\n\t\t\tint end_k=min(m,k2+batch)-k2;\n\t\t\tfor (int j=threadIdx.x;j<end_k*3;j+=blockDim.x){\n\t\t\t\tbuf[j]=xyz2[(i*m+k2)*3+j];\n\t\t\t}\n\t\t\t__syncthreads();\n\t\t\tfor (int j=threadIdx.x+blockIdx.y*blockDim.x;j<n;j+=blockDim.x*gridDim.y){\n\t\t\t\tfloat x1=xyz[(i*n+j)*3+0];\n\t\t\t\tfloat y1=xyz[(i*n+j)*3+1];\n\t\t\t\tfloat z1=xyz[(i*n+j)*3+2];\n\t\t\t\tint best_i=0;\n\t\t\t\tfloat best=0;\n\t\t\t\tint end_ka=end_k-(end_k&3);\n\t\t\t\tif (end_ka==batch){\n\t\t\t\t\tfor (int k=0;k<batch;k+=4){\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+0]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+1]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+2]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (k==0 || d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+3]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+4]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+5]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+1;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+6]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+7]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+8]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+2;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+9]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+10]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+11]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+3;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}else{\n\t\t\t\t\tfor (int k=0;k<end_ka;k+=4){\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+0]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+1]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+2]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (k==0 || d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+3]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+4]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+5]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+1;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+6]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+7]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+8]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+2;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tfloat x2=buf[k*3+9]-x1;\n\t\t\t\t\t\t\tfloat y2=buf[k*3+10]-y1;\n\t\t\t\t\t\t\tfloat z2=buf[k*3+11]-z1;\n\t\t\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\t\t\tif (d<best){\n\t\t\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\t\t\tbest_i=k+k2+3;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tfor (int k=end_ka;k<end_k;k++){\n\t\t\t\t\tfloat x2=buf[k*3+0]-x1;\n\t\t\t\t\tfloat y2=buf[k*3+1]-y1;\n\t\t\t\t\tfloat z2=buf[k*3+2]-z1;\n\t\t\t\t\tfloat d=x2*x2+y2*y2+z2*z2;\n\t\t\t\t\tif (k==0 || d<best){\n\t\t\t\t\t\tbest=d;\n\t\t\t\t\t\tbest_i=k+k2;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (k2==0 || result[(i*n+j)]>best){\n\t\t\t\t\tresult[(i*n+j)]=best;\n\t\t\t\t\tresult_i[(i*n+j)]=best_i;\n\t\t\t\t}\n\t\t\t}\n\t\t\t__syncthreads();\n\t\t}\n\t}\n}\n\nvoid ChamferDistanceKernelLauncher(\n    const int b, const int n,\n    const float* xyz,\n    const int m,\n    const float* xyz2,\n    float* result,\n    int* result_i,\n    float* result2,\n    int* result2_i)\n{\n\tChamferDistanceKernel<<<dim3(32,16,1),512>>>(b, n, xyz, m, xyz2, result, result_i);\n\tChamferDistanceKernel<<<dim3(32,16,1),512>>>(b, m, xyz2, n, xyz, result2, result2_i);\n\n\tcudaError_t err = cudaGetLastError();\n\tif (err != cudaSuccess)\n\t    printf(\"error in chamfer distance updateOutput: %s\\n\", cudaGetErrorString(err));\n}\n\n\n__global__ \nvoid ChamferDistanceGradKernel(\n\tint b, int n,\n\tconst float* xyz1,\n\tint m,\n\tconst float* xyz2,\n\tconst float* grad_dist1,\n\tconst int* idx1,\n\tfloat* grad_xyz1,\n\tfloat* grad_xyz2)\n{\n\tfor (int i = blockIdx.x; i<b; i += gridDim.x) {\n\t\tfor (int j = threadIdx.x + blockIdx.y * blockDim.x; j < n; j += blockDim.x*gridDim.y) {\n\t\t\tfloat x1=xyz1[(i*n+j)*3+0];\n\t\t\tfloat y1=xyz1[(i*n+j)*3+1];\n\t\t\tfloat z1=xyz1[(i*n+j)*3+2];\n\t\t\tint j2=idx1[i*n+j];\n\t\t\tfloat x2=xyz2[(i*m+j2)*3+0];\n\t\t\tfloat y2=xyz2[(i*m+j2)*3+1];\n\t\t\tfloat z2=xyz2[(i*m+j2)*3+2];\n\t\t\tfloat g=grad_dist1[i*n+j]*2;\n\t\t\tatomicAdd(&(grad_xyz1[(i*n+j)*3+0]),g*(x1-x2));\n\t\t\tatomicAdd(&(grad_xyz1[(i*n+j)*3+1]),g*(y1-y2));\n\t\t\tatomicAdd(&(grad_xyz1[(i*n+j)*3+2]),g*(z1-z2));\n\t\t\tatomicAdd(&(grad_xyz2[(i*m+j2)*3+0]),-(g*(x1-x2)));\n\t\t\tatomicAdd(&(grad_xyz2[(i*m+j2)*3+1]),-(g*(y1-y2)));\n\t\t\tatomicAdd(&(grad_xyz2[(i*m+j2)*3+2]),-(g*(z1-z2)));\n\t\t}\n\t}\n}\n\nvoid ChamferDistanceGradKernelLauncher(\n    const int b, const int n,\n    const float* xyz1,\n    const int m,\n    const float* xyz2,\n    const float* grad_dist1,\n    const int* idx1,\n    const float* grad_dist2,\n    const int* idx2,\n    float* grad_xyz1,\n    float* grad_xyz2)\n{\n\tcudaMemset(grad_xyz1, 0, b*n*3*4);\n\tcudaMemset(grad_xyz2, 0, b*m*3*4);\n\tChamferDistanceGradKernel<<<dim3(1,16,1), 256>>>(b, n, xyz1, m, xyz2, grad_dist1, idx1, grad_xyz1, grad_xyz2);\n\tChamferDistanceGradKernel<<<dim3(1,16,1), 256>>>(b, m, xyz2, n, xyz1, grad_dist2, idx2, grad_xyz2, grad_xyz1);\n\n\tcudaError_t err = cudaGetLastError();\n  \tif (err != cudaSuccess)\n\t    printf(\"error in chamfer distance get grad: %s\\n\", cudaGetErrorString(err));\n}\n"
  },
  {
    "path": "OcCo_Torch/chamfer_distance/chamfer_distance.py",
    "content": "#  Ref: https://github.com/chrdiller/pyTorchChamferDistance\nimport os, torch, torch.nn as nn\nfrom torch.utils.cpp_extension import load\n\nbasedir = os.path.dirname(__file__)\ncd = load(name=\"cd\", sources=[\n\tos.path.join(basedir, \"chamfer_distance.cpp\"),\n\tos.path.join(basedir, \"chamfer_distance.cu\")])\n\nclass ChamferDistanceFunction(torch.autograd.Function):\n\t@staticmethod\n\tdef forward(ctx, xyz1, xyz2):\n\t\tbatchsize, n, _ = xyz1.size()\n\t\t_, m, _ = xyz2.size()\n\t\txyz1 = xyz1.contiguous()\n\t\txyz2 = xyz2.contiguous()\n\t\tdist1 = torch.zeros(batchsize, n)\n\t\tdist2 = torch.zeros(batchsize, m)\n\n\t\tidx1 = torch.zeros(batchsize, n, dtype=torch.int)\n\t\tidx2 = torch.zeros(batchsize, m, dtype=torch.int)\n\n\t\tif not xyz1.is_cuda:\n\t\t\tcd.forward(xyz1, xyz2, dist1, dist2, idx1, idx2)\n\t\telse:\n\t\t\tdist1 = dist1.cuda()\n\t\t\tdist2 = dist2.cuda()\n\t\t\tidx1 = idx1.cuda()\n\t\t\tidx2 = idx2.cuda()\n\t\t\tcd.forward_cuda(xyz1, xyz2, dist1, dist2, idx1, idx2)\n\n\t\tctx.save_for_backward(xyz1, xyz2, idx1, idx2)\n\n\t\treturn dist1, dist2\n\n\t@staticmethod\n\tdef backward(ctx, graddist1, graddist2):\n\t\txyz1, xyz2, idx1, idx2 = ctx.saved_tensors\n\n\t\tgraddist1 = graddist1.contiguous()\n\t\tgraddist2 = graddist2.contiguous()\n\n\t\tgradxyz1 = torch.zeros(xyz1.size())\n\t\tgradxyz2 = torch.zeros(xyz2.size())\n\n\t\tif not graddist1.is_cuda:\n\t\t\tcd.backward(xyz1, xyz2, gradxyz1, gradxyz2, graddist1, graddist2, idx1, idx2)\n\t\telse:\n\t\t\tgradxyz1 = gradxyz1.cuda()\n\t\t\tgradxyz2 = gradxyz2.cuda()\n\t\t\tcd.backward_cuda(xyz1, xyz2, gradxyz1, gradxyz2, graddist1, graddist2, idx1, idx2)\n\n\t\treturn gradxyz1, gradxyz2\n\n\nclass ChamferDistance(nn.Module):\n\tdef forward(self, xyz1, xyz2):\n\t\treturn ChamferDistanceFunction.apply(xyz1, xyz2)\n\n\nclass get_model(nn.Module):\n\tdef __init__(self, channel=3):\n\t\tsuper(get_model, self).__init__()\n\n\t\tself.conv1 = nn.Conv1d(channel, 128, 1)\n\n\tdef forward(self, x):\n\t\t_, D, N = x.size()\n\t\tx = self.conv1(x)\n\t\tx = x.view(-1, 128, 1).repeat(1, 1, 3)\n\t\treturn x\n\n\nif __name__ == '__main__':\n\n\timport random, numpy as np\n\n\t'''Sanity Check on the Consistency with TensorFlow'''\n\trandom.seed(100)\n\tnp.random.seed(100)\n\n\tchamfer_dist = ChamferDistance()\n\t# model = get_model().to(torch.device(\"cuda\"))\n\t# model.train()\n\n\txyz1 = np.random.randn(32, 16384, 3).astype('float32')\n\txyz2 = np.random.randn(32, 1024, 3).astype('float32')\n\n\t# pdb.set_trace()\n\t# pc1 = torch.randn(1, 100, 3).cuda().contiguous()\n\t# pc1_new = model(pc1.transpose(2, 1))\n\t# pc2 = torch.randn(1, 50, 3).cuda().contiguous()\n\n\tdist1, dist2 = chamfer_dist(torch.Tensor(xyz1), torch.Tensor(xyz2))\n\tloss = (torch.mean(dist1)) + (torch.mean(dist2))\n\tprint(loss)\n\t# loss.backward()\n"
  },
  {
    "path": "OcCo_Torch/chamfer_distance/readme.md",
    "content": "# Chamfer Distance for PyTorch\n\nThis is an implementation of the Chamfer Distance as a module for PyTorch. It is written as a custom C++/CUDA extension. It is developed by [Chris](https://github.com/chrdiller/pyTorchChamferDistance) at TUM.\n\nAs it is using PyTorch's [JIT compilation](https://pytorch.org/tutorials/advanced/cpp_extension.html), there are no additional prerequisite steps (e.g., `build` or `setup`) that have to be taken. Simply import the module as shown below, CUDA and C++ code will be compiled on the first run, which additionally takes a few seconds.\n\n### Usage\n```python\nimport torch\nfrom chamfer_distance import ChamferDistance\nchamfer_dist = ChamferDistance()\n\n# both points clouds have shapes of (batch_size, n_points, 3), wherer n_points can be different\n\ndist1, dist2 = chamfer_dist(points, points_reconstructed)\nloss = (torch.mean(torch.sqrt(dist1)) + torch.mean(torch.sqrt(dist2)))/2  \n```\n\n### Integration\nThis code has been integrated into the [Kaolin](https://github.com/NVIDIAGameWorks/kaolin) library for 3D Deep Learning by NVIDIAGameWorks. You probably want to take a look at it if you are working on some 3D ([pytorch3d](https://github.com/facebookresearch/pytorch3d) is also recommended)\n\n### Earth Mover Distance\nFor the implementation of earth mover distance, we recommend [Kaichun's](https://github.com/daerduoCarey/PyTorchEMD) :)\n"
  },
  {
    "path": "OcCo_Torch/docker/.dockerignore",
    "content": "*/data\n*/log\n*/__pycache__\n"
  },
  {
    "path": "OcCo_Torch/docker/Dockerfile_Torch",
    "content": "# https://github.com/pytorch/pytorch/issues/31171#issuecomment-565887573\nFROM pytorch/pytorch:1.3-cuda10.1-cudnn7-devel\n\nWORKDIR /workspace/OcCo_Torch\nRUN apt-get update\nRUN apt-get -y install apt-file apt-utils \nRUN apt-file update\nRUN apt-get -y install build-essential libcap-dev vim screen\nCOPY ./Requirements_Torch.txt /workspace/OcCo_Torch\nRUN pip install -r Requirements_Torch.txt\n\nRUN mkdir /home/hcw\nRUN chmod -R 777 /home/hcw\nRUN chmod 777 /usr/bin\nRUN chmod 777 /bin\nRUN chmod 777 /usr/local/\nRUN apt-get -y update\nRUN apt-get -y install libgl1-mesa-glx\n\n# RUN apt-get -y install gcc\n# RUN apt-get -y install g++\n# RUN apt-get -y upgrade libstdc++6\n\n# Optional: Install the TensorRT runtime (must be after CUDA install)\n# RUN apt update\n# RUN apt -y install libnvinfer4=4.1.2-1+cuda9.0\n\nRUN useradd hcw\nWORKDIR /workspace/OcCo_Torch\n"
  },
  {
    "path": "OcCo_Torch/docker/build_docker_torch.sh",
    "content": "#!/bin/bash\ndocker build ../ --rm -t occo_torch -f ./Dockerfile_Torch\n"
  },
  {
    "path": "OcCo_Torch/docker/launch_docker_torch.sh",
    "content": "#!/bin/bash\n\ndocker run -it \\\n\t--rm \\\n\t--shm-size=1g \\\n\t--runtime=nvidia \\\n\t--ulimit memlock=-1 \\\n\t--ulimit stack=67108864 \\\n\t-v \"$(dirname $PWD):/workspace/OcCo_Torch\" \\\n\t-v \"/scratch/hw501/data_source/:/scratch/hw501/data_source/\" \\\n\t-v \"/scratches/mario/hw501/data_source:/scratches/mario/hw501/data_source/\" \\\n\t-v \"/scratches/weatherwax_2/hwang/OcCo/data/:/scratches/weatherwax_2/hwang/OcCo/data/\" \\\n\tocco_torch bash\n\n# -v + any external directories if you are using them\n"
  },
  {
    "path": "OcCo_Torch/models/dgcnn_cls.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/WangYueFt/dgcnn/blob/master/pytorch/model.py\n\nimport torch, torch.nn as nn, torch.nn.functional as F\nfrom dgcnn_util import get_graph_feature\n\nclass get_model(nn.Module):\n\n    def __init__(self, args, num_channel=3, num_class=40, **kwargs):\n        super(get_model, self).__init__()\n        self.args = args\n        self.bn1 = nn.BatchNorm2d(64)\n        self.bn2 = nn.BatchNorm2d(64)\n        self.bn3 = nn.BatchNorm2d(128)\n        self.bn4 = nn.BatchNorm2d(256)\n        self.bn5 = nn.BatchNorm1d(args.emb_dims)\n\n        self.conv1 = nn.Sequential(nn.Conv2d(num_channel*2, 64, kernel_size=1, bias=False),\n                                   self.bn1,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv2 = nn.Sequential(nn.Conv2d(64*2, 64, kernel_size=1, bias=False),\n                                   self.bn2,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv3 = nn.Sequential(nn.Conv2d(64*2, 128, kernel_size=1, bias=False),\n                                   self.bn3,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv4 = nn.Sequential(nn.Conv2d(128*2, 256, kernel_size=1, bias=False),\n                                   self.bn4,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv5 = nn.Sequential(nn.Conv1d(512, args.emb_dims, kernel_size=1, bias=False),\n                                   self.bn5,\n                                   nn.LeakyReLU(negative_slope=0.2))\n\n        self.linear1 = nn.Linear(args.emb_dims*2, 512, bias=False)\n        self.bn6 = nn.BatchNorm1d(512)\n        self.dp1 = nn.Dropout(p=args.dropout)\n        self.linear2 = nn.Linear(512, 256)\n        self.bn7 = nn.BatchNorm1d(256)\n        self.dp2 = nn.Dropout(p=args.dropout)\n        self.linear3 = nn.Linear(256, num_class)\n\n    def forward(self, x):\n        batch_size = x.size()[0]\n        x = get_graph_feature(x, k=self.args.k)\n        x = self.conv1(x)\n        x1 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x1, k=self.args.k)\n        x = self.conv2(x)\n        x2 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x2, k=self.args.k)\n        x = self.conv3(x)\n        x3 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x3, k=self.args.k)\n        x = self.conv4(x)\n        x4 = x.max(dim=-1, keepdim=False)[0]\n\n        x = torch.cat((x1, x2, x3, x4), dim=1)\n\n        x = self.conv5(x)\n        x1 = F.adaptive_max_pool1d(x, 1).view(batch_size, -1)\n        x2 = F.adaptive_avg_pool1d(x, 1).view(batch_size, -1)\n        x = torch.cat((x1, x2), 1)\n\n        x = F.leaky_relu(self.bn6(self.linear1(x)), negative_slope=0.2)\n        x = self.dp1(x)\n        x = F.leaky_relu(self.bn7(self.linear2(x)), negative_slope=0.2)\n        x = self.dp2(x)\n        x = self.linear3(x)\n        return x\n\n\nclass get_loss(torch.nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    @staticmethod\n    def cal_loss(pred, gold, smoothing=True):\n        \"\"\"Calculate cross entropy loss, apply label smoothing if needed.\"\"\"\n        gold = gold.contiguous().view(-1)\n\n        if smoothing:\n            eps = 0.2\n            n_class = pred.size()[1]\n            one_hot = torch.zeros_like(pred).scatter(1, gold.view(-1, 1), 1)  # (num_points, num_class)\n            one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1)\n            log_prb = F.log_softmax(pred, dim=1)\n            loss = -(one_hot * log_prb).sum(dim=1).mean()  # ~ F.nll_loss(log_prb, gold)\n        else:\n            loss = F.cross_entropy(pred, gold, reduction='mean')\n\n        return loss\n\n    def forward(self, pred, target):\n        return self.cal_loss(pred, target, smoothing=True)\n"
  },
  {
    "path": "OcCo_Torch/models/dgcnn_jigsaw.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/AnTao97/dgcnn.pytorch/blob/master/model.py\n\nimport torch, torch.nn as nn, torch.nn.functional as F\nfrom dgcnn_util import get_graph_feature\n\n\nclass get_model(nn.Module):\n    def __init__(self, args, num_class, **kwargs):\n        super(get_model, self).__init__()\n        self.args = args\n        self.k = args.k\n\n        self.bn1 = nn.BatchNorm2d(64)\n        self.bn2 = nn.BatchNorm2d(64)\n        self.bn3 = nn.BatchNorm2d(64)\n        self.bn4 = nn.BatchNorm2d(64)\n        self.bn5 = nn.BatchNorm2d(64)\n        self.bn6 = nn.BatchNorm1d(args.emb_dims)\n        self.bn7 = nn.BatchNorm1d(512)\n        self.bn8 = nn.BatchNorm1d(256)\n\n        self.conv1 = nn.Sequential(nn.Conv2d(6, 64, kernel_size=1, bias=False),\n                                   self.bn1,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv2 = nn.Sequential(nn.Conv2d(64, 64, kernel_size=1, bias=False),\n                                   self.bn2,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv3 = nn.Sequential(nn.Conv2d(64*2, 64, kernel_size=1, bias=False),\n                                   self.bn3,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv4 = nn.Sequential(nn.Conv2d(64, 64, kernel_size=1, bias=False),\n                                   self.bn4,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv5 = nn.Sequential(nn.Conv2d(64*2, 64, kernel_size=1, bias=False),\n                                   self.bn5,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv6 = nn.Sequential(nn.Conv1d(192, args.emb_dims, kernel_size=1, bias=False),\n                                   self.bn6,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv7 = nn.Sequential(nn.Conv1d(1216, 512, kernel_size=1, bias=False),\n                                   self.bn7,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv8 = nn.Sequential(nn.Conv1d(512, 256, kernel_size=1, bias=False),\n                                   self.bn8,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.dp1 = nn.Dropout(p=args.dropout)\n        self.conv9 = nn.Conv1d(256, num_class, kernel_size=1, bias=False)\n\n    def forward(self, x):\n\n        batch_size, _, num_points = x.size()\n\n        x = get_graph_feature(x, self.k)\n        x = self.conv1(x)\n        x = self.conv2(x)\n        x1 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x1, k=self.k)\n        x = self.conv3(x)\n        x = self.conv4(x)\n        x2 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x2, k=self.k)\n        x = self.conv5(x)\n        x3 = x.max(dim=-1, keepdim=False)[0]\n\n        x = torch.cat((x1, x2, x3), dim=1)\n\n        x = self.conv6(x)\n        x = x.max(dim=-1, keepdim=True)[0]\n\n        x = x.repeat(1, 1, num_points)\n        x = torch.cat((x, x1, x2, x3), dim=1)\n\n        x = self.conv7(x)\n        x = self.conv8(x)\n        x = self.dp1(x)\n        x = self.conv9(x)\n        # x = F.softmax(x, dim=1)\n        # x = F.log_softmax(x, dim=1)\n        '''add softmax: \n            https://towardsdatascience.com/cuda-error-device-side-assert-triggered-c6ae1c8fa4c3\n            https://github.com/pytorch/pytorch/issues/1204\n        '''\n        return x\n\n\nclass get_loss(torch.nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    @staticmethod\n    def cal_loss(pred, gold, smoothing=False):\n        \"\"\"Calculate cross entropy loss, apply label smoothing if needed.\"\"\"\n\n        gold = gold.contiguous().view(-1)\n\n        if smoothing:\n            eps = 0.2\n            n_class = pred.size(1)\n            one_hot = torch.zeros_like(pred).scatter(1, gold.view(-1, 1), 1)\n            one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1)\n            log_prb = F.log_softmax(pred, dim=1)\n            loss = -(one_hot * log_prb).sum(dim=1).mean()  # ~ F.nll_loss(log_prb, gold)\n        else:\n            loss = F.cross_entropy(pred, gold, reduction='mean')\n\n        return loss\n\n    def forward(self, pred, target):\n        return self.cal_loss(pred, target, smoothing=False)\n"
  },
  {
    "path": "OcCo_Torch/models/dgcnn_occo.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/wentaoyuan/pcn/blob/master/models/pcn_cd.py\n#  Ref: https://github.com/AnTao97/UnsupervisedPointCloudReconstruction/blob/master/model.py\n\nimport sys, torch, itertools, numpy as np, torch.nn as nn, torch.nn.functional as F\nfrom dgcnn_util import get_graph_feature\nsys.path.append(\"../chamfer_distance\")\nfrom chamfer_distance import ChamferDistance\n\n\nclass get_model(nn.Module):\n    def __init__(self, **kwargs):\n        super(get_model, self).__init__()\n\n        self.grid_size = 4\n        self.grid_scale = 0.05\n        self.num_coarse = 1024\n        self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n        self.__dict__.update(kwargs)  # to update args, num_coarse, grid_size, grid_scale\n\n        self.num_fine = self.grid_size ** 2 * self.num_coarse  # 16384\n        self.meshgrid = [[-self.grid_scale, self.grid_scale, self.grid_size],\n                         [-self.grid_scale, self.grid_scale, self.grid_size]]\n\n        self.bn1 = nn.BatchNorm2d(64)\n        self.bn2 = nn.BatchNorm2d(64)\n        self.bn3 = nn.BatchNorm2d(128)\n        self.bn4 = nn.BatchNorm2d(256)\n        self.bn5 = nn.BatchNorm1d(self.args.emb_dims)\n\n        self.conv1 = nn.Sequential(nn.Conv2d(6, 64, kernel_size=1, bias=False),\n                                   self.bn1,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv2 = nn.Sequential(nn.Conv2d(64*2, 64, kernel_size=1, bias=False),\n                                   self.bn2,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv3 = nn.Sequential(nn.Conv2d(64*2, 128, kernel_size=1, bias=False),\n                                   self.bn3,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv4 = nn.Sequential(nn.Conv2d(128*2, 256, kernel_size=1, bias=False),\n                                   self.bn4,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv5 = nn.Sequential(nn.Conv1d(512, self.args.emb_dims, kernel_size=1, bias=False),\n                                   self.bn5,\n                                   nn.LeakyReLU(negative_slope=0.2))\n\n        self.folding1 = nn.Sequential(\n            nn.Linear(self.args.emb_dims, 1024),\n            nn.ReLU(),\n            nn.Linear(1024, 1024),\n            nn.ReLU(),\n            nn.Linear(1024, self.num_coarse * 3))\n\n        self.folding2 = nn.Sequential(\n            nn.Conv1d(1024+2+3, 512, 1),\n            nn.ReLU(),\n            nn.Conv1d(512, 512, 1),\n            nn.ReLU(),\n            nn.Conv1d(512, 3, 1))\n\n    def build_grid(self, batch_size):\n\n        x, y = np.linspace(*self.meshgrid[0]), np.linspace(*self.meshgrid[1])\n        points = np.array(list(itertools.product(x, y)))\n        points = np.repeat(points[np.newaxis, ...], repeats=batch_size, axis=0)\n\n        return torch.tensor(points).float().to(self.device)\n\n    def tile(self, tensor, multiples):\n        # substitute for tf.tile:\n        # https://www.tensorflow.org/versions/r1.15/api_docs/python/tf/tile\n        # Ref: https://discuss.pytorch.org/t/how-to-tile-a-tensor/13853/3\n        def tile_single_axis(a, dim, n_tile):\n            init_dim = a.size(dim)\n            repeat_idx = [1] * a.dim()\n            repeat_idx[dim] = n_tile\n            a = a.repeat(*repeat_idx)\n            order_index = torch.Tensor(\n                np.concatenate([init_dim * np.arange(n_tile) + i for i in range(init_dim)])).long()\n            return torch.index_select(a, dim, order_index.to(self.device))\n\n        for dim, n_tile in enumerate(multiples):\n            if n_tile == 1:\n                continue\n            tensor = tile_single_axis(tensor, dim, n_tile)\n        return tensor\n\n    @staticmethod\n    def expand_dims(tensor, dim):\n        # substitute for tf.expand_dims:\n        # https://www.tensorflow.org/versions/r1.15/api_docs/python/tf/expand_dims\n        return tensor.unsqueeze(-1).transpose(-1, dim)\n\n    def forward(self, x):\n\n        batch_size = x.size()[0]\n        x = get_graph_feature(x, k=self.args.k)\n        x = self.conv1(x)\n        x1 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x1, k=self.args.k)\n        x = self.conv2(x)\n        x2 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x2, k=self.args.k)\n        x = self.conv3(x)\n        x3 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x3, k=self.args.k)\n        x = self.conv4(x)\n        x4 = x.max(dim=-1, keepdim=False)[0]\n\n        x = torch.cat((x1, x2, x3, x4), dim=1)\n        x = self.conv5(x)\n        feature = F.adaptive_max_pool1d(x, 1).view(batch_size, -1)\n        # x1 = F.adaptive_max_pool1d(x, 1).view(batch_size, -1)\n        # x2 = F.adaptive_avg_pool1d(x, 1).view(batch_size, -1)\n        # feature = torch.cat((x1, x2), 1)\n\n        coarse = self.folding1(feature)\n        coarse = coarse.view(-1, self.num_coarse, 3)\n\n        grid = self.build_grid(x.size()[0])\n        grid_feat = grid.repeat(1, self.num_coarse, 1)\n\n        point_feat = self.tile(self.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n        point_feat = point_feat.view([-1, self.num_fine, 3])\n\n        global_feat = self.tile(self.expand_dims(feature, 1), [1, self.num_fine, 1])\n        feat = torch.cat([grid_feat, point_feat, global_feat], dim=2)\n\n        center = self.tile(self.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n        center = center.view([-1, self.num_fine, 3])\n\n        fine = self.folding2(feat.transpose(2, 1)).transpose(2, 1) + center\n\n        return coarse, fine\n\n\nclass get_loss(nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    @staticmethod\n    def dist_cd(pc1, pc2):\n        chamfer_dist = ChamferDistance()\n        dist1, dist2 = chamfer_dist(pc1, pc2)\n        return (torch.mean(torch.sqrt(dist1)) + torch.mean(torch.sqrt(dist2)))/2\n\n    def forward(self, coarse, fine, gt, alpha):\n        return self.dist_cd(coarse, gt) + alpha * self.dist_cd(fine, gt)\n\n\nif __name__ == '__main__':\n\n    model = get_model()\n    print(model)\n    input_pc = torch.rand(7, 3, 1024)\n    x = model(input_pc)\n"
  },
  {
    "path": "OcCo_Torch/models/dgcnn_partseg.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/AnTao97/dgcnn.pytorch/blob/master/model.py\n#  Ref: https://github.com/WangYueFt/dgcnn/blob/master/tensorflow/part_seg/train_multi_gpu.py\n\nimport pdb, torch, torch.nn as nn, torch.nn.functional as F\nfrom dgcnn_util import get_graph_feature, T_Net\n\n\nclass get_model(nn.Module):\n    def __init__(self, args, part_num=50, num_channel=3, **kwargs):\n        super(get_model, self).__init__()\n        self.k = args.k\n        self.part_num = part_num\n        self.transform_net = T_Net(channel=num_channel)\n\n        self.bn1 = nn.BatchNorm2d(64)\n        self.bn2 = nn.BatchNorm2d(64)\n        self.bn3 = nn.BatchNorm2d(64)\n        self.bn4 = nn.BatchNorm2d(64)\n        self.bn5 = nn.BatchNorm2d(64)\n        self.bn6 = nn.BatchNorm1d(args.emb_dims)\n        self.bn7 = nn.BatchNorm1d(64)\n        self.bn8 = nn.BatchNorm1d(256)\n        self.bn9 = nn.BatchNorm1d(256)\n        self.bn10 = nn.BatchNorm1d(128)\n\n        self.conv1 = nn.Sequential(nn.Conv2d(num_channel*2, 64, kernel_size=1, bias=False),\n                                   self.bn1,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv2 = nn.Sequential(nn.Conv2d(64, 64, kernel_size=1, bias=False),\n                                   self.bn2,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv3 = nn.Sequential(nn.Conv2d(64 * 2, 64, kernel_size=1, bias=False),\n                                   self.bn3,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv4 = nn.Sequential(nn.Conv2d(64, 64, kernel_size=1, bias=False),\n                                   self.bn4,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv5 = nn.Sequential(nn.Conv2d(64 * 2, 64, kernel_size=1, bias=False),\n                                   self.bn5,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv6 = nn.Sequential(nn.Conv1d(192, args.emb_dims, kernel_size=1, bias=False),\n                                   self.bn6,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv7 = nn.Sequential(nn.Conv1d(16, 64, kernel_size=1, bias=False),\n                                   self.bn7,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv8 = nn.Sequential(nn.Conv1d(1280, 256, kernel_size=1, bias=False),\n                                   self.bn8,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.dp1 = nn.Dropout(p=args.dropout)\n        self.conv9 = nn.Sequential(nn.Conv1d(256, 256, kernel_size=1, bias=False),\n                                   self.bn9,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.dp2 = nn.Dropout(p=args.dropout)\n        self.conv10 = nn.Sequential(nn.Conv1d(256, 128, kernel_size=1, bias=False),\n                                    self.bn10,\n                                    nn.LeakyReLU(negative_slope=0.2))\n        self.conv11 = nn.Conv1d(128, self.part_num, kernel_size=1, bias=False)\n\n    def forward(self, x, l):\n        B, D, N = x.size()\n\n        x0 = get_graph_feature(x, k=self.k)\n        t = self.transform_net(x0)\n        x = x.transpose(2, 1)\n        if D > 3:\n            x, feature = x.split(3, dim=2)\n        x = torch.bmm(x, t)\n        if D > 3:\n            x = torch.cat([x, feature], dim=2)\n        x = x.transpose(2, 1)\n\n        x = get_graph_feature(x, k=self.k)\n        x = self.conv1(x)\n        x = self.conv2(x)\n        x1 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x1, k=self.k)\n        x = self.conv3(x)\n        x = self.conv4(x)\n        x2 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x2, k=self.k)\n        x = self.conv5(x)\n        x3 = x.max(dim=-1, keepdim=False)[0]\n\n        x = torch.cat((x1, x2, x3), dim=1)\n\n        x = self.conv6(x)\n        x = x.max(dim=-1, keepdim=True)[0]\n\n        l = l.view(B, -1, 1)\n        l = self.conv7(l)\n\n        x = torch.cat((x, l), dim=1)\n        x = x.repeat(1, 1, N)\n\n        x = torch.cat((x, x1, x2, x3), dim=1)\n\n        x = self.conv8(x)\n        x = self.dp1(x)\n        x = self.conv9(x)\n        x = self.dp2(x)\n        x = self.conv10(x)\n        x = self.conv11(x)\n\n        return x.permute(0, 2, 1).contiguous()\n\n\nclass get_loss(nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    @staticmethod\n    def cal_loss(pred, gold, smoothing=False):\n        \"\"\"Calculate cross entropy loss, apply label smoothing if needed.\"\"\"\n\n        gold = gold.contiguous().view(-1)\n\n        if smoothing:\n            eps = 0.2\n            n_class = pred.size()[1]\n            one_hot = torch.zeros_like(pred).scatter(1, gold.view(-1, 1), 1)\n            one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1)\n            log_prb = F.log_softmax(pred, dim=1)\n            loss = -(one_hot * log_prb).sum(dim=1).mean()  # ~ F.nll_loss(log_prb, gold)\n        else:\n            loss = F.cross_entropy(pred, gold, reduction='mean')\n\n        return loss\n\n    def forward(self, pred, target):\n\n        return self.cal_loss(pred, target, smoothing=False)\n"
  },
  {
    "path": "OcCo_Torch/models/dgcnn_semseg.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/AnTao97/dgcnn.pytorch/blob/master/model.py\n#  Ref: https://github.com/WangYueFt/dgcnn/blob/master/tensorflow/sem_seg/train.py\n\nimport torch, torch.nn as nn, torch.nn.functional as F\nfrom dgcnn_util import get_graph_feature\n\n\nclass get_model(nn.Module):\n    def __init__(self, args, num_class, num_channel=9, **kwargs):\n        super(get_model, self).__init__()\n        self.k = args.k\n\n        self.bn1 = nn.BatchNorm2d(64)\n        self.bn2 = nn.BatchNorm2d(64)\n        self.bn3 = nn.BatchNorm2d(64)\n        self.bn4 = nn.BatchNorm2d(64)\n        self.bn5 = nn.BatchNorm2d(64)\n        self.bn6 = nn.BatchNorm1d(args.emb_dims)\n        self.bn7 = nn.BatchNorm1d(512)\n        self.bn8 = nn.BatchNorm1d(256)\n\n        self.conv1 = nn.Sequential(nn.Conv2d(num_channel*2, 64, kernel_size=1, bias=False),\n                                   self.bn1,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv2 = nn.Sequential(nn.Conv2d(64, 64, kernel_size=1, bias=False),\n                                   self.bn2,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv3 = nn.Sequential(nn.Conv2d(64*2, 64, kernel_size=1, bias=False),\n                                   self.bn3,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv4 = nn.Sequential(nn.Conv2d(64, 64, kernel_size=1, bias=False),\n                                   self.bn4,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv5 = nn.Sequential(nn.Conv2d(64*2, 64, kernel_size=1, bias=False),\n                                   self.bn5,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv6 = nn.Sequential(nn.Conv1d(192, args.emb_dims, kernel_size=1, bias=False),\n                                   self.bn6,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv7 = nn.Sequential(nn.Conv1d(1216, 512, kernel_size=1, bias=False),\n                                   self.bn7,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv8 = nn.Sequential(nn.Conv1d(512, 256, kernel_size=1, bias=False),\n                                   self.bn8,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.dp1 = nn.Dropout(p=args.dropout)\n        self.conv9 = nn.Conv1d(256, num_class, kernel_size=1, bias=False)\n\n    def forward(self, x):\n        batch_size, _, num_points = x.size()\n\n        x = get_graph_feature(x, self.k, extra_dim=True)\n        x = self.conv1(x)\n        x = self.conv2(x)\n        x1 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x1, self.k)\n        x = self.conv3(x)\n        x = self.conv4(x)\n        x2 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x2, self.k)\n        x = self.conv5(x)\n        x3 = x.max(dim=-1, keepdim=False)[0]\n\n        x = torch.cat((x1, x2, x3), dim=1)\n\n        x = self.conv6(x)\n        x = x.max(dim=-1, keepdim=True)[0]\n\n        x = x.repeat(1, 1, num_points)\n        x = torch.cat((x, x1, x2, x3), dim=1)\n\n        x = self.conv7(x)\n        x = self.conv8(x)\n        x = self.dp1(x)\n        x = self.conv9(x)\n\n        return x.permute(0, 2, 1).contiguous()\n\n\nclass get_loss(nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    @staticmethod\n    def cal_loss(pred, gold, smoothing=False):\n        \"\"\"Calculate cross entropy loss, apply label smoothing if needed.\"\"\"\n\n        gold = gold.contiguous().view(-1)\n\n        if smoothing:\n            eps = 0.2\n            n_class = pred.size()[1]\n            one_hot = torch.zeros_like(pred).scatter(1, gold.view(-1, 1), 1)\n            one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1)\n            log_prb = F.log_softmax(pred, dim=1)\n            loss = -(one_hot * log_prb).sum(dim=1).mean()  # ~ F.nll_loss(log_prb, gold)\n        else:\n            loss = F.cross_entropy(pred, gold, reduction='mean')\n\n        return loss\n\n    def forward(self, pred, target):\n\n        return self.cal_loss(pred, target, smoothing=False)\n"
  },
  {
    "path": "OcCo_Torch/models/dgcnn_util.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/WangYueFt/dgcnn/blob/master/pytorch/model.py\n\nimport torch, torch.nn as nn, torch.nn.init as init, torch.nn.functional as F\n\ndef knn(x, k):\n    inner = -2 * torch.matmul(x.transpose(2, 1), x)\n    xx = torch.sum(x ** 2, dim=1, keepdim=True)\n    pairwise_distance = -xx - inner - xx.transpose(2, 1)\n    idx = pairwise_distance.topk(k=k, dim=-1)[1]\n    return idx\n\n\ndef get_graph_feature(x, k=20, idx=None, extra_dim=False):\n\n    batch_size, num_dims, num_points = x.size()\n    x = x.view(batch_size, -1, num_points)\n    if idx is None:\n        if extra_dim is False:\n            idx = knn(x, k=k)\n        else:\n            idx = knn(x[:, 6:], k=k)  # idx = knn(x[:, :3], k=k)\n\n    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n    idx_base = torch.arange(0, batch_size, device=device).view(-1, 1, 1) * num_points\n    idx += idx_base\n    idx = idx.view(-1)\n\n    x = x.transpose(2, 1).contiguous()\n    feature = x.view(batch_size*num_points, -1)[idx, :]\n    feature = feature.view(batch_size, num_points, k, num_dims)\n    x = x.view(batch_size, num_points, 1, num_dims).repeat(1, 1, k, 1)\n    feature = torch.cat((feature-x, x), dim=3).permute(0, 3, 1, 2)\n\n    return feature  # (batch_size, 2 * num_dims, num_points, k)\n\n\nclass T_Net(nn.Module):\n    \"\"\"Similar to STN3d/STNkd in pointnet_util.py,\n    but with leaky relu and zero bias conv1d\"\"\"\n    def __init__(self, channel=3, k=3):\n        super(T_Net, self).__init__()\n        self.k = k\n        self.bn1 = nn.BatchNorm2d(64)\n        self.bn2 = nn.BatchNorm2d(128)\n        self.bn3 = nn.BatchNorm1d(1024)\n\n        self.conv1 = nn.Sequential(nn.Conv2d(channel*2, 64, kernel_size=1, bias=False),\n                                   self.bn1,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv2 = nn.Sequential(nn.Conv2d(64, 128, kernel_size=1, bias=False),\n                                   self.bn2,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv3 = nn.Sequential(nn.Conv1d(128, 1024, kernel_size=1, bias=False),\n                                   self.bn3,\n                                   nn.LeakyReLU(negative_slope=0.2))\n\n        self.linear1 = nn.Linear(1024, 512, bias=False)\n        self.bn4 = nn.BatchNorm1d(512)\n        self.linear2 = nn.Linear(512, 256, bias=False)\n        self.bn5 = nn.BatchNorm1d(256)\n\n        self.transform = nn.Linear(256, self.k**2)\n        init.constant_(self.transform.weight, 0)\n        init.eye_(self.transform.bias.view(self.k, self.k))\n\n    def forward(self, x):\n        B = x.size(0)\n\n        x = self.conv1(x)\n        x = self.conv2(x)\n        x = x.max(dim=-1, keepdim=False)[0]\n\n        x = self.conv3(x)\n        x = x.max(dim=-1, keepdim=False)[0]\n\n        x = F.leaky_relu(self.bn4(self.linear1(x)), negative_slope=0.2)\n        x = F.leaky_relu(self.bn5(self.linear2(x)), negative_slope=0.2)\n\n        x = self.transform(x)\n        x = x.view(B, self.k, self.k)\n\n        return x\n\n\nclass encoder(nn.Module):\n    def __init__(self, channel=3, **kwargs):\n        super(encoder, self).__init__()\n        self.bn1 = nn.BatchNorm2d(64)\n        self.bn2 = nn.BatchNorm2d(64)\n        self.bn3 = nn.BatchNorm2d(128)\n        self.bn4 = nn.BatchNorm2d(256)\n        self.bn5 = nn.BatchNorm1d(1024)\n\n        self.conv1 = nn.Sequential(nn.Conv2d(channel*2, 64, kernel_size=1, bias=False),\n                                   self.bn1,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv2 = nn.Sequential(nn.Conv2d(64 * 2, 64, kernel_size=1, bias=False),\n                                   self.bn2,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv3 = nn.Sequential(nn.Conv2d(64 * 2, 128, kernel_size=1, bias=False),\n                                   self.bn3,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv4 = nn.Sequential(nn.Conv2d(128 * 2, 256, kernel_size=1, bias=False),\n                                   self.bn4,\n                                   nn.LeakyReLU(negative_slope=0.2))\n        self.conv5 = nn.Sequential(nn.Conv1d(256 * 2, 1024, kernel_size=1, bias=False),\n                                   self.bn5,\n                                   nn.LeakyReLU(negative_slope=0.2))\n\n    def forward(self, x):\n        batch_size = x.size()[0]\n        x = get_graph_feature(x, k=20)\n        x = self.conv1(x)\n        x1 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x1, k=20)\n        x = self.conv2(x)\n        x2 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x2, k=20)\n        x = self.conv3(x)\n        x3 = x.max(dim=-1, keepdim=False)[0]\n\n        x = get_graph_feature(x3, k=20)\n        x = self.conv4(x)\n        x4 = x.max(dim=-1, keepdim=False)[0]\n\n        x = torch.cat((x1, x2, x3, x4), dim=1)\n\n        x = self.conv5(x)\n        x1 = F.adaptive_max_pool1d(x, 1).view(batch_size, -1)\n        # x2 = F.adaptive_avg_pool1d(x, 1).view(batch_size, -1)\n        # x = torch.cat((x1, x2), 1)\n\n        return x1\n\n"
  },
  {
    "path": "OcCo_Torch/models/pcn_cls.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport pdb, torch, torch.nn as nn, torch.nn.functional as F\nfrom pcn_util import PCNEncoder\n\nclass get_model(nn.Module):\n\tdef __init__(self, num_class=40, num_channel=3, **kwargs):\n\t\tsuper(get_model, self).__init__()\n\t\tself.feat = PCNEncoder(global_feat=True, channel=num_channel)\n\t\tself.fc1 = nn.Linear(1024, 512)\n\t\tself.fc2 = nn.Linear(512, 256)\n\t\tself.fc3 = nn.Linear(256, num_class)\n\n\t\tself.dp1 = nn.Dropout(p=0.3)\n\t\tself.bn1 = nn.BatchNorm1d(512)\n\t\tself.dp2 = nn.Dropout(p=0.3)\n\t\tself.bn2 = nn.BatchNorm1d(256)\n\n\tdef forward(self, x):\n\t\tx = self.feat(x)\n\t\tx = F.relu(self.bn1(self.fc1(x)))\n\t\tx = self.dp1(x)\n\t\t\n\t\tx = F.relu(self.bn2(self.fc2(x)))\n\t\tx = self.dp2(x)\n\n\t\tx = self.fc3(x)\n\t\tx = F.log_softmax(x, dim=1)\n\t\treturn x\n\n\nclass get_loss(nn.Module):\n\tdef __init__(self):\n\t\tsuper(get_loss, self).__init__()\n\n\tdef forward(self, pred, target):\n\t\tloss = F.nll_loss(pred, target)\n\t\treturn loss\n\n\nif __name__ == '__main__':\n\n\tmodel = get_model()\n\txyz = torch.rand(12, 3, 1024)\n\tx = model(xyz)\n"
  },
  {
    "path": "OcCo_Torch/models/pcn_jigsaw.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport torch, torch.nn as nn, torch.nn.functional as F\nfrom pcn_util import PCNEncoder\n\n\nclass get_model(nn.Module):\n    def __init__(self, num_class, num_channel=3, **kwargs):\n        super(get_model, self).__init__()\n        self.num_class = num_class\n        self.feat = PCNEncoder(global_feat=False, channel=num_channel)\n        self.conv1 = nn.Conv1d(1280, 512, 1)\n        self.conv2 = nn.Conv1d(512, 256, 1)\n        self.conv3 = nn.Conv1d(256, 128, 1)\n        self.conv4 = nn.Conv1d(128, self.num_class, 1)\n        self.bn1 = nn.BatchNorm1d(512)\n        self.bn2 = nn.BatchNorm1d(256)\n        self.bn3 = nn.BatchNorm1d(128)\n\n    def forward(self, x):\n        batch_size, _, num_points = x.size()\n        x = self.feat(x)\n        x = F.relu(self.bn1(self.conv1(x)))\n        x = F.relu(self.bn2(self.conv2(x)))\n        x = F.relu(self.bn3(self.conv3(x)))\n        x = self.conv4(x)\n        x = x.transpose(2, 1).contiguous()\n        x = F.log_softmax(x.view(-1, self.num_class), dim=-1)\n        x = x.view(batch_size, num_points, self.num_class)\n        return x\n\n\nclass get_loss(nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    def forward(self, pred, target, trans_feat, weight):\n        loss = F.nll_loss(pred, target)\n        return loss\n\n\nif __name__ == '__main__':\n    model = get_model(num_class=13, num_channel=3)\n    xyz = torch.rand(12, 3, 2048)\n    model(xyz)\n"
  },
  {
    "path": "OcCo_Torch/models/pcn_occo.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/wentaoyuan/pcn/blob/master/models/pcn_cd.py\n#  Ref: https://github.com/AnTao97/UnsupervisedPointCloudReconstruction/blob/master/model.py\n#  Sanity Check: https://github.com/vinits5/learning3d/blob/master/models/pcn.py\n\nimport sys, torch, itertools, numpy as np, torch.nn as nn\nfrom pcn_util import PCNEncoder\nsys.path.append(\"../chamfer_distance\")\nfrom chamfer_distance import ChamferDistance\n\n\nclass get_model(nn.Module):\n    def __init__(self, **kwargs):\n        super(get_model, self).__init__()\n\n        self.grid_size = 4\n        self.grid_scale = 0.05\n        self.num_coarse = 1024\n        self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n        self.__dict__.update(kwargs)  # to update args, num_coarse, grid_size, grid_scale\n\n        self.num_fine = self.grid_size ** 2 * self.num_coarse  # 16384\n        self.meshgrid = [[-self.grid_scale, self.grid_scale, self.grid_size],\n                         [-self.grid_scale, self.grid_scale, self.grid_size]]\n\n        self.feat = PCNEncoder(global_feat=True, channel=3)\n\n        # batch normalisation will destroy limit the expression\n        self.folding1 = nn.Sequential(\n            nn.Linear(1024, 1024),\n            # nn.BatchNorm1d(1024),\n            nn.ReLU(),\n            nn.Linear(1024, 1024),\n            # nn.BatchNorm1d(1024),\n            nn.ReLU(),\n            nn.Linear(1024, self.num_coarse * 3))\n\n        self.folding2 = nn.Sequential(\n            nn.Conv1d(1024+2+3, 512, 1),\n            # nn.BatchNorm1d(512),\n            nn.ReLU(),\n            nn.Conv1d(512, 512, 1),\n            # nn.BatchNorm1d(512),\n            nn.ReLU(),\n            nn.Conv1d(512, 3, 1))\n\n    def build_grid(self, batch_size):\n        # a simpler alternative would be: torch.meshgrid()\n        x, y = np.linspace(*self.meshgrid[0]), np.linspace(*self.meshgrid[1])\n        points = np.array(list(itertools.product(x, y)))\n        points = np.repeat(points[np.newaxis, ...], repeats=batch_size, axis=0)\n\n        return torch.tensor(points).float().to(self.device)\n\n    def tile(self, tensor, multiples):\n        # substitute for tf.tile:\n        # https://www.tensorflow.org/versions/r1.15/api_docs/python/tf/tile\n        # Ref: https://discuss.pytorch.org/t/how-to-tile-a-tensor/13853/3\n        def tile_single_axis(a, dim, n_tile):\n            init_dim = a.size()[dim]\n            repeat_idx = [1] * a.dim()\n            repeat_idx[dim] = n_tile\n            a = a.repeat(*repeat_idx)\n            order_index = torch.Tensor(\n                np.concatenate([init_dim * np.arange(n_tile) + i for i in range(init_dim)])).long()\n            return torch.index_select(a, dim, order_index.to(self.device))\n\n        for dim, n_tile in enumerate(multiples):\n            if n_tile == 1:  # increase the speed effectively\n                continue\n            tensor = tile_single_axis(tensor, dim, n_tile)\n        return tensor\n\n    @staticmethod\n    def expand_dims(tensor, dim):\n        # substitute for tf.expand_dims:\n        # https://www.tensorflow.org/versions/r1.15/api_docs/python/tf/expand_dims\n        # another solution is: torch.unsqueeze(tensor, dim=dim)\n        return tensor.unsqueeze(-1).transpose(-1, dim)\n\n    def forward(self, x):\n        # use the same variable naming as:\n        # https://github.com/wentaoyuan/pcn/blob/master/models/pcn_cd.py\n        feature = self.feat(x)\n\n        coarse = self.folding1(feature)\n        coarse = coarse.view(-1, self.num_coarse, 3)\n\n        grid = self.build_grid(x.shape[0])\n        grid_feat = grid.repeat(1, self.num_coarse, 1)\n\n        point_feat = self.tile(self.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n        point_feat = point_feat.view([-1, self.num_fine, 3])\n\n        global_feat = self.tile(self.expand_dims(feature, 1), [1, self.num_fine, 1])\n        feat = torch.cat([grid_feat, point_feat, global_feat], dim=2)\n\n        center = self.tile(self.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n        center = center.view([-1, self.num_fine, 3])\n\n        fine = self.folding2(feat.transpose(2, 1)).transpose(2, 1) + center\n\n        return coarse, fine\n\n\nclass get_loss(nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    @staticmethod\n    def dist_cd(pc1, pc2):\n        chamfer_dist = ChamferDistance()\n        dist1, dist2 = chamfer_dist(pc1, pc2)\n        return (torch.mean(torch.sqrt(dist1)) + torch.mean(torch.sqrt(dist2)))/2\n\n    def forward(self, coarse, fine, gt, alpha):\n        return self.dist_cd(coarse, gt) + alpha * self.dist_cd(fine, gt)\n\n\nif __name__ == '__main__':\n\n    model = get_model()\n    print(model)\n    input_pc = torch.rand(7, 3, 1024)\n    x = model(input_pc)\n"
  },
  {
    "path": "OcCo_Torch/models/pcn_partseg.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport torch, torch.nn as nn, torch.nn.functional as F\nfrom pcn_util import PCNPartSegEncoder\n\n\nclass get_model(nn.Module):\n    def __init__(self, part_num=50, num_channel=3, **kwargs):\n        super(get_model, self).__init__()\n        self.part_num = part_num\n        self.feat = PCNPartSegEncoder(channel=num_channel)\n\n        self.convs1 = nn.Conv1d(5264, 512, 1)\n        self.convs2 = nn.Conv1d(512, 256, 1)\n        self.convs3 = nn.Conv1d(256, 128, 1)\n        self.convs4 = nn.Conv1d(128, self.part_num, 1)\n        self.bns1 = nn.BatchNorm1d(512)\n        self.bns2 = nn.BatchNorm1d(256)\n        self.bns3 = nn.BatchNorm1d(128)\n\n    def forward(self, point_cloud, label):\n        B, _, N = point_cloud.size()\n        x = self.feat(point_cloud, label)\n        x = F.relu(self.bns1(self.convs1(x)))\n        x = F.relu(self.bns2(self.convs2(x)))\n        x = F.relu(self.bns3(self.convs3(x)))\n        x = self.convs4(x).transpose(2, 1).contiguous()\n        x = F.log_softmax(x.view(-1, self.part_num), dim=-1)\n        x = x.view(B, N, self.part_num)\n        return x\n\n\nclass get_loss(nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    def forward(self, pred, target):\n        loss = F.nll_loss(pred, target)\n        return loss\n\n\nif __name__ == '__main__':\n    model = get_model(part_num=50, num_channel=3)\n    xyz = torch.rand(16, 3, 4096)\n    label = torch.randint(low=0, high=20, size=(16, 1, 16)).float()\n    model(xyz, label)\n"
  },
  {
    "path": "OcCo_Torch/models/pcn_semseg.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport torch, torch.nn as nn, torch.nn.functional as F\nfrom pcn_util import PCNEncoder\n\n\nclass get_model(nn.Module):\n    def __init__(self, num_class, num_channel=9, **kwargs):\n        super(get_model, self).__init__()\n        self.num_class = num_class\n        self.feat = PCNEncoder(global_feat=False, channel=num_channel)\n        self.conv1 = nn.Conv1d(1280, 512, 1)\n        self.conv2 = nn.Conv1d(512, 256, 1)\n        self.conv3 = nn.Conv1d(256, 128, 1)\n        self.conv4 = nn.Conv1d(128, self.num_class, 1)\n        self.bn1 = nn.BatchNorm1d(512)\n        self.bn2 = nn.BatchNorm1d(256)\n        self.bn3 = nn.BatchNorm1d(128)\n\n    def forward(self, x):\n        batch_size, _, num_points = x.size()\n        x = self.feat(x)\n        x = F.relu(self.bn1(self.conv1(x)))\n        x = F.relu(self.bn2(self.conv2(x)))\n        x = F.relu(self.bn3(self.conv3(x)))\n        x = self.conv4(x)\n        x = x.transpose(2, 1).contiguous()\n        x = F.log_softmax(x.view(-1, self.num_class), dim=-1)\n        x = x.view(batch_size, num_points, self.num_class)\n        return x\n\n\nclass get_loss(nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    def forward(self, pred, target):\n        loss = F.nll_loss(pred, target)\n        return loss\n\n\nif __name__ == '__main__':\n    model = get_model(num_class=13, num_channel=3)\n    xyz = torch.rand(12, 3, 2048)\n    model(xyz)\n"
  },
  {
    "path": "OcCo_Torch/models/pcn_util.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport torch, torch.nn as nn, torch.nn.functional as F\n\nclass PCNEncoder(nn.Module):\n    def __init__(self, global_feat=False, channel=3):\n        super(PCNEncoder, self).__init__()\n\n        self.conv1 = nn.Conv1d(channel, 128, 1)\n        # self.bn1 = nn.BatchNorm1d(128)  no bn in PCN\n        self.conv2 = nn.Conv1d(128, 256, 1)\n        self.conv3 = nn.Conv1d(512, 512, 1)\n        self.conv4 = nn.Conv1d(512, 1024, 1)\n        self.global_feat = global_feat\n\n    def forward(self, x):\n        _, D, N = x.size()\n        x = F.relu(self.conv1(x))\n        pointfeat = self.conv2(x)\n\n        # 'encoder_0'\n        feat = torch.max(pointfeat, 2, keepdim=True)[0]\n        feat = feat.view(-1, 256, 1).repeat(1, 1, N)\n        x = torch.cat([pointfeat, feat], 1)\n\n        # 'encoder_1'\n        x = F.relu(self.conv3(x))\n        x = self.conv4(x)\n        x = torch.max(x, 2, keepdim=False)[0]\n\n        if self.global_feat:  # used in completion and classification tasks\n            return x\n        else:  # concatenate global and local features, for segmentation tasks\n            x = x.view(-1, 1024, 1).repeat(1, 1, N)\n            return torch.cat([x, pointfeat], 1)\n\n\nclass PCNPartSegEncoder(nn.Module):\n    def __init__(self, channel=3):\n        super(PCNPartSegEncoder, self).__init__()\n\n        self.conv1 = nn.Conv1d(channel, 128, 1)\n        self.conv2 = nn.Conv1d(128, 256, 1)\n        self.conv3 = nn.Conv1d(512, 512, 1)\n        self.conv4 = nn.Conv1d(512, 2048, 1)\n\n    def forward(self, x, label):\n        _, D, N = x.size()\n        out1 = F.relu(self.conv1(x))\n        out2 = self.conv2(out1)\n\n        # 'encoder_0'\n        feat = torch.max(out2, 2, keepdim=True)[0]\n        feat = feat.repeat(1, 1, N)\n        out3 = torch.cat([out2, feat], 1)\n\n        # 'encoder_1'\n        out4 = F.relu(self.conv3(out3))\n        out5 = self.conv4(out4)\n\n        out_max = torch.max(out5, 2, keepdim=False)[0]\n        out_max = torch.cat([out_max, label.squeeze(1)], 1)\n\n        expand = out_max.view(-1, 2064, 1).repeat(1, 1, N)  # (batch, 2064, num_point)\n        concat = torch.cat([expand, out1, out3, out4, out5], 1)\n\n        return concat\n\n\nclass encoder(nn.Module):\n    def __init__(self, num_channel=3, **kwargs):\n        super(encoder, self).__init__()\n        self.feat = PCNEncoder(global_feat=True, channel=num_channel)\n\n    def forward(self, x):\n        return self.feat(x)\n\n\nif __name__ == \"__main__\":\n    # model = PCNEncoder()\n    model = PCNPartSegEncoder()\n    xyz = torch.rand(16, 3, 100)  # batch, channel, num_point\n    label = torch.randint(low=0, high=20, size=(16, 1, 12)).float()\n    x = model(xyz, label)\n    print(x.size())\n"
  },
  {
    "path": "OcCo_Torch/models/pointnet_cls.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/master/models/pointnet_cls.py\n\nimport torch.nn as nn, torch.nn.functional as F\nfrom pointnet_util import PointNetEncoder, feature_transform_regularizer\n\n\nclass get_model(nn.Module):\n\tdef __init__(self, num_class=40, num_channel=3, **kwargs):\n\t\tsuper(get_model, self).__init__()\n\t\tself.feat = PointNetEncoder(\n\t\t\tglobal_feat=True, feature_transform=True, channel=num_channel)\n\t\tself.fc1 = nn.Linear(1024, 512)\n\t\tself.fc2 = nn.Linear(512, 256)\n\t\tself.fc3 = nn.Linear(256, num_class)\n\t\tself.dropout = nn.Dropout(p=0.3)\n\t\tself.bn1 = nn.BatchNorm1d(512)\n\t\tself.bn2 = nn.BatchNorm1d(256)\n\n\tdef forward(self, x):\n\t\tx, trans, trans_feat = self.feat(x)\n\t\tx = F.relu(self.bn1(self.fc1(x)))\n\t\tx = F.relu(self.bn2(self.dropout(self.fc2(x))))\n\t\tx = self.fc3(x)\n\t\tx = F.log_softmax(x, dim=1)\n\t\treturn x, trans_feat\n\n\nclass get_loss(nn.Module):\n\tdef __init__(self, mat_diff_loss_scale=0.001):\n\t\tsuper(get_loss, self).__init__()\n\t\tself.mat_diff_loss_scale = mat_diff_loss_scale\n\n\tdef forward(self, pred, target, trans_feat):\n\t\tloss = F.nll_loss(pred, target)\n\t\tmat_diff_loss = feature_transform_regularizer(trans_feat)\n\t\ttotal_loss = loss + mat_diff_loss * self.mat_diff_loss_scale\n\t\treturn total_loss\n"
  },
  {
    "path": "OcCo_Torch/models/pointnet_jigsaw.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport torch, torch.nn as nn, torch.nn.functional as F\nfrom pointnet_util import PointNetEncoder, feature_transform_regularizer\n\n\nclass get_model(nn.Module):\n    def __init__(self, num_class, num_channel=3, **kwargs):\n        super(get_model, self).__init__()\n        self.num_class = num_class\n        self.feat = PointNetEncoder(global_feat=False,\n                                    feature_transform=True,\n                                    channel=num_channel)\n        self.conv1 = nn.Conv1d(1088, 512, 1)\n        self.conv2 = nn.Conv1d(512, 256, 1)\n        self.conv3 = nn.Conv1d(256, 128, 1)\n        self.conv4 = nn.Conv1d(128, self.num_class, 1)\n        self.bn1 = nn.BatchNorm1d(512)\n        self.bn2 = nn.BatchNorm1d(256)\n        self.bn3 = nn.BatchNorm1d(128)\n\n    def forward(self, x):\n        batch_size, _, num_points = x.size()\n        x, trans, trans_feat = self.feat(x)\n        x = F.relu(self.bn1(self.conv1(x)))\n        x = F.relu(self.bn2(self.conv2(x)))\n        x = F.relu(self.bn3(self.conv3(x)))\n        x = self.conv4(x)\n        x = x.transpose(2, 1).contiguous()\n        x = F.log_softmax(x.view(-1, self.num_class), dim=-1)\n        x = x.view(batch_size, num_points, self.num_class)\n        return x, trans_feat\n\n\nclass get_loss(nn.Module):\n    def __init__(self, mat_diff_loss_scale=0.001):\n        super(get_loss, self).__init__()\n        self.mat_diff_loss_scale = mat_diff_loss_scale\n\n    def forward(self, pred, target, trans_feat):\n        loss = F.nll_loss(pred, target)\n        mat_diff_loss = feature_transform_regularizer(trans_feat)\n        total_loss = loss + mat_diff_loss * self.mat_diff_loss_scale\n        return total_loss\n\n\nif __name__ == '__main__':\n    model = get_model(num_class=13, num_channel=3)\n    xyz = torch.rand(12, 3, 2048)\n    model(xyz)\n"
  },
  {
    "path": "OcCo_Torch/models/pointnet_occo.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/krrish94/chamferdist\n#  Ref: https://github.com/chrdiller/pyTorchChamferDistance\n#  Ref: https://github.com/wentaoyuan/pcn/blob/master/models/pcn_cd.py\n#  Ref: https://github.com/AnTao97/UnsupervisedPointCloudReconstruction/blob/master/model.py\n\n\n\nimport sys, torch, itertools, numpy as np, torch.nn as nn\nfrom pointnet_util import PointNetEncoder\nsys.path.append(\"../chamfer_distance\")\nfrom chamfer_distance import ChamferDistance\n\nclass get_model(nn.Module):\n    def __init__(self, **kwargs):\n        super(get_model, self).__init__()\n\n        self.grid_size = 4\n        self.grid_scale = 0.05\n        self.num_coarse = 1024\n        self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n        self.__dict__.update(kwargs)  # to update args, num_coarse, grid_size, grid_scale\n\n        self.num_fine = self.grid_size ** 2 * self.num_coarse  # 16384\n        self.meshgrid = [[-self.grid_scale, self.grid_scale, self.grid_size],\n                         [-self.grid_scale, self.grid_scale, self.grid_size]]\n\n        self.feat = PointNetEncoder(global_feat=True, feature_transform=False, channel=3)\n        self.folding1 = nn.Sequential(\n            nn.Linear(1024, 1024),\n            nn.ReLU(),\n            nn.Linear(1024, 1024),\n            nn.ReLU(),\n            nn.Linear(1024, self.num_coarse * 3))\n\n        self.folding2 = nn.Sequential(\n            nn.Conv1d(1024+2+3, 512, 1),\n            nn.ReLU(),\n            nn.Conv1d(512, 512, 1),\n            nn.ReLU(),\n            nn.Conv1d(512, 3, 1))\n\n    def build_grid(self, batch_size):\n\n        x, y = np.linspace(*self.meshgrid[0]), np.linspace(*self.meshgrid[1])\n        points = np.array(list(itertools.product(x, y)))\n        points = np.repeat(points[np.newaxis, ...], repeats=batch_size, axis=0)\n\n        return torch.tensor(points).float().to(self.device)\n\n    def tile(self, tensor, multiples):\n        # substitute for tf.tile:\n        # https://www.tensorflow.org/versions/r1.15/api_docs/python/tf/tile\n        # Ref: https://discuss.pytorch.org/t/how-to-tile-a-tensor/13853/3\n        def tile_single_axis(a, dim, n_tile):\n            init_dim = a.size()[dim]\n            repeat_idx = [1] * a.dim()\n            repeat_idx[dim] = n_tile\n            a = a.repeat(*repeat_idx)\n            order_index = torch.Tensor(\n                np.concatenate([init_dim * np.arange(n_tile) + i for i in range(init_dim)])).long()\n            return torch.index_select(a, dim, order_index.to(self.device))\n\n        for dim, n_tile in enumerate(multiples):\n            if n_tile == 1:\n                continue\n            tensor = tile_single_axis(tensor, dim, n_tile)\n        return tensor\n\n    @staticmethod\n    def expand_dims(tensor, dim):\n        # substitute for tf.expand_dims:\n        # https://www.tensorflow.org/versions/r1.15/api_docs/python/tf/expand_dims\n        return tensor.unsqueeze(-1).transpose(-1, dim)\n\n    def forward(self, x):\n        feature, _, _ = self.feat(x)\n\n        coarse = self.folding1(feature)\n        coarse = coarse.view(-1, self.num_coarse, 3)\n\n        grid = self.build_grid(x.shape[0])\n        grid_feat = grid.repeat(1, self.num_coarse, 1)\n\n        point_feat = self.tile(self.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n        point_feat = point_feat.view([-1, self.num_fine, 3])\n\n        global_feat = self.tile(self.expand_dims(feature, 1), [1, self.num_fine, 1])\n        feat = torch.cat([grid_feat, point_feat, global_feat], dim=2)\n\n        center = self.tile(self.expand_dims(coarse, 2), [1, 1, self.grid_size ** 2, 1])\n        center = center.view([-1, self.num_fine, 3])\n\n        fine = self.folding2(feat.transpose(2, 1)).transpose(2, 1) + center\n\n        return coarse, fine\n\n\nclass get_loss(nn.Module):\n    def __init__(self):\n        super(get_loss, self).__init__()\n\n    @staticmethod\n    def dist_cd(pc1, pc2):\n        chamfer_dist = ChamferDistance()\n        dist1, dist2 = chamfer_dist(pc1, pc2)\n        return (torch.mean(torch.sqrt(dist1)) + torch.mean(torch.sqrt(dist2)))/2\n\n    def forward(self, coarse, fine, gt, alpha):\n        return self.dist_cd(coarse, gt) + alpha * self.dist_cd(fine, gt)\n\n\nif __name__ == '__main__':\n\n    model = get_model()\n    print(model)\n    input_pc = torch.rand(7, 3, 1024)\n    x = model(input_pc)\n"
  },
  {
    "path": "OcCo_Torch/models/pointnet_partseg.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/master/models/pointnet_part_seg.py\n\nimport torch.nn as nn, torch.nn.functional as F\nfrom pointnet_util import PointNetPartSegEncoder, feature_transform_regularizer\n\n\nclass get_model(nn.Module):\n    def __init__(self, part_num=50, num_channel=3, **kwargs):\n        super(get_model, self).__init__()\n        self.part_num = part_num\n        self.feat = PointNetPartSegEncoder(feature_transform=True,\n                                           channel=num_channel)\n\n        self.convs1 = nn.Conv1d(4944, 256, 1)\n        self.convs2 = nn.Conv1d(256, 256, 1)\n        self.convs3 = nn.Conv1d(256, 128, 1)\n        self.convs4 = nn.Conv1d(128, part_num, 1)\n        self.bns1 = nn.BatchNorm1d(256)\n        self.bns2 = nn.BatchNorm1d(256)\n        self.bns3 = nn.BatchNorm1d(128)\n\n    def forward(self, point_cloud, label):\n        B, D, N = point_cloud.size()\n        concat, trans_feat = self.feat(point_cloud, label)\n\n        net = F.relu(self.bns1(self.convs1(concat)))\n        net = F.relu(self.bns2(self.convs2(net)))\n        net = F.relu(self.bns3(self.convs3(net)))\n        net = self.convs4(net).transpose(2, 1).contiguous()\n        net = F.log_softmax(net.view(-1, self.part_num), dim=-1)\n        net = net.view(B, N, self.part_num)  # [B, N, 50]\n\n        return net, trans_feat\n\n\nclass get_loss(nn.Module):\n    def __init__(self, mat_diff_loss_scale=0.001):\n        super(get_loss, self).__init__()\n        self.mat_diff_loss_scale = mat_diff_loss_scale\n\n    def forward(self, pred, target, trans_feat):\n        loss = F.nll_loss(pred, target)\n        mat_diff_loss = feature_transform_regularizer(trans_feat)\n        total_loss = loss + mat_diff_loss * self.mat_diff_loss_scale\n        return total_loss\n\n"
  },
  {
    "path": "OcCo_Torch/models/pointnet_semseg.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport torch, torch.nn as nn, torch.nn.functional as F\nfrom pointnet_util import PointNetEncoder, feature_transform_regularizer\n\n\nclass get_model(nn.Module):\n    def __init__(self, num_class=13, num_channel=9, **kwargs):\n        super(get_model, self).__init__()\n\n        self.num_class = num_class\n        self.feat = PointNetEncoder(global_feat=False,\n                                    feature_transform=True,\n                                    channel=num_channel)\n        self.conv1 = nn.Conv1d(1088, 512, 1)\n        self.conv2 = nn.Conv1d(512, 256, 1)\n        self.conv3 = nn.Conv1d(256, 128, 1)\n        self.conv4 = nn.Conv1d(128, self.num_class, 1)\n        self.bn1 = nn.BatchNorm1d(512)\n        self.bn2 = nn.BatchNorm1d(256)\n        self.bn3 = nn.BatchNorm1d(128)\n\n    def forward(self, x):\n        batch_size, _, num_points = x.size()\n        x, trans, trans_feat = self.feat(x)\n        x = F.relu(self.bn1(self.conv1(x)))\n        x = F.relu(self.bn2(self.conv2(x)))\n        x = F.relu(self.bn3(self.conv3(x)))\n        x = self.conv4(x)\n        x = x.transpose(2, 1).contiguous()\n        x = F.log_softmax(x.view(-1, self.num_class), dim=-1)\n        x = x.view(batch_size, num_points, self.num_class)\n        return x, trans_feat\n\n\nclass get_loss(nn.Module):\n    def __init__(self, mat_diff_loss_scale=0.001):\n        super(get_loss, self).__init__()\n        self.mat_diff_loss_scale = mat_diff_loss_scale\n\n    def forward(self, pred, target, trans_feat):\n        loss = F.nll_loss(pred, target)\n        mat_diff_loss = feature_transform_regularizer(trans_feat)\n        total_loss = loss + mat_diff_loss * self.mat_diff_loss_scale\n        return total_loss\n\n\nif __name__ == '__main__':\n    model = get_model(13, num_channel=9)\n    xyz = torch.rand(12, 9, 2048)\n    model(xyz)\n"
  },
  {
    "path": "OcCo_Torch/models/pointnet_util.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/fxia22/pointnet.pytorch/pointnet/model.py\n\nimport torch, torch.nn as nn, numpy as np, torch.nn.functional as F\nfrom torch.autograd import Variable\n\n\ndef feature_transform_regularizer(trans):\n    d = trans.size()[1]\n    I = torch.eye(d)[None, :, :]\n    if trans.is_cuda:\n        I = I.cuda()\n    loss = torch.mean(torch.norm(torch.bmm(trans, trans.transpose(2, 1) - I), dim=(1, 2)))\n    return loss\n\n\n# STN -> Spatial Transformer Network\nclass STN3d(nn.Module):\n    def __init__(self, channel):\n        super(STN3d, self).__init__()\n        self.conv1 = nn.Conv1d(channel, 64, 1)  # in-channel, out-channel, kernel size\n        self.conv2 = nn.Conv1d(64, 128, 1)\n        self.conv3 = nn.Conv1d(128, 1024, 1)\n        self.fc1 = nn.Linear(1024, 512)\n        self.fc2 = nn.Linear(512, 256)\n        self.fc3 = nn.Linear(256, 9)\n        self.relu = nn.ReLU()\n\n        self.bn1 = nn.BatchNorm1d(64)\n        self.bn2 = nn.BatchNorm1d(128)\n        self.bn3 = nn.BatchNorm1d(1024)\n        self.bn4 = nn.BatchNorm1d(512)\n        self.bn5 = nn.BatchNorm1d(256)\n\n    def forward(self, x):\n        B = x.size()[0]\n        x = F.relu(self.bn1(self.conv1(x)))\n        x = F.relu(self.bn2(self.conv2(x)))\n        x = F.relu(self.bn3(self.conv3(x)))\n        x = torch.max(x, 2, keepdim=False)[0]  # global descriptors\n\n        x = F.relu(self.bn4(self.fc1(x)))\n        x = F.relu(self.bn5(self.fc2(x)))\n        x = self.fc3(x)\n\n        iden = Variable(torch.from_numpy(np.eye(3).flatten().astype(np.float32))).view(1, 9).repeat(B, 1)\n        if x.is_cuda:\n            iden = iden.cuda()\n        x = x + iden\n        x = x.view(-1, 3, 3)\n        return x\n\n\nclass STNkd(nn.Module):\n    def __init__(self, k=64):\n        super(STNkd, self).__init__()\n        self.conv1 = nn.Conv1d(k, 64, 1)\n        self.conv2 = nn.Conv1d(64, 128, 1)\n        self.conv3 = nn.Conv1d(128, 1024, 1)\n        self.fc1 = nn.Linear(1024, 512)\n        self.fc2 = nn.Linear(512, 256)\n        self.fc3 = nn.Linear(256, k * k)\n        self.relu = nn.ReLU()\n\n        self.bn1 = nn.BatchNorm1d(64)\n        self.bn2 = nn.BatchNorm1d(128)\n        self.bn3 = nn.BatchNorm1d(1024)\n        self.bn4 = nn.BatchNorm1d(512)\n        self.bn5 = nn.BatchNorm1d(256)\n\n        self.k = k\n\n    def forward(self, x):\n        B = x.size()[0]\n        x = F.relu(self.bn1(self.conv1(x)))\n        x = F.relu(self.bn2(self.conv2(x)))\n        x = F.relu(self.bn3(self.conv3(x)))\n        x = torch.max(x, 2, keepdim=False)[0]\n\n        x = F.relu(self.bn4(self.fc1(x)))\n        x = F.relu(self.bn5(self.fc2(x)))\n        x = self.fc3(x)\n\n        iden = Variable(torch.from_numpy(np.eye(self.k).flatten().astype(np.float32))).view(\n            1, self.k ** 2).repeat(B, 1)\n        if x.is_cuda:\n            iden = iden.cuda()\n        x = x + iden\n        x = x.view(-1, self.k, self.k)\n        return x\n\n\nclass PointNetEncoder(nn.Module):\n    def __init__(self, global_feat=True, feature_transform=False,\n                 channel=3, detailed=False):\n        # when input include normals, it\n        super(PointNetEncoder, self).__init__()\n        self.stn = STN3d(channel)  # Batch * 3 * 3\n        self.conv1 = nn.Conv1d(channel, 64, 1)\n        self.conv2 = nn.Conv1d(64, 128, 1)\n        self.conv3 = nn.Conv1d(128, 1024, 1)\n        self.bn1 = nn.BatchNorm1d(64)\n        self.bn2 = nn.BatchNorm1d(128)\n        self.bn3 = nn.BatchNorm1d(1024)\n        self.global_feat = global_feat\n        self.feature_transform = feature_transform\n        if self.feature_transform:\n            self.fstn = STNkd(k=64)\n        self.detailed = detailed\n\n    def forward(self, x):\n\n        _, D, N = x.size()  # Batch Size, Dimension of Point Features, Num of Points\n        trans = self.stn(x)\n        x = x.transpose(2, 1)\n        if D > 3:\n            # pdb.set_trace()\n            x, feature = x.split([3, D-3], dim=2)\n        x = torch.bmm(x, trans)\n        # feature = torch.bmm(feature, trans)  # feature -> normals\n\n        if D > 3:\n            x = torch.cat([x, feature], dim=2)\n        x = x.transpose(2, 1)\n        out1 = self.bn1(self.conv1(x))\n        x = F.relu(out1)\n\n        if self.feature_transform:\n            trans_feat = self.fstn(x)\n            x = x.transpose(2, 1)\n            x = torch.bmm(x, trans_feat)\n            x = x.transpose(2, 1)\n        else:\n            trans_feat = None\n\n        pointfeat = x\n\n        out2 = self.bn2(self.conv2(x))\n        x = F.relu(out2)\n\n        out3 = self.bn3(self.conv3(x))\n        # x = self.bn3(self.conv3(x))\n        x = torch.max(out3, 2, keepdim=False)[0]\n        if self.global_feat:\n            return x, trans, trans_feat\n        elif self.detailed:\n            return out1, out2, out3, x\n        else:  # concatenate global and local feature together\n            x = x.view(-1, 1024, 1).repeat(1, 1, N)\n            return torch.cat([x, pointfeat], 1), trans, trans_feat\n\n\nclass PointNetPartSegEncoder(nn.Module):\n    def __init__(self, feature_transform=True, channel=3):\n        super(PointNetPartSegEncoder, self).__init__()\n        self.stn = STN3d(channel)\n        self.conv1 = nn.Conv1d(channel, 64, 1)\n        self.conv2 = nn.Conv1d(64, 128, 1)\n        self.conv3 = nn.Conv1d(128, 128, 1)\n        self.conv4 = nn.Conv1d(128, 512, 1)\n        self.conv5 = nn.Conv1d(512, 2048, 1)\n\n        self.bn1 = nn.BatchNorm1d(64)\n        self.bn2 = nn.BatchNorm1d(128)\n        self.bn3 = nn.BatchNorm1d(128)\n        self.bn4 = nn.BatchNorm1d(512)\n        self.bn5 = nn.BatchNorm1d(2048)\n\n        self.feature_transform = feature_transform\n        if self.feature_transform:\n            self.fstn = STNkd(k=128)\n\n    def forward(self, point_cloud, label):\n        B, D, N = point_cloud.size()\n\n        trans = self.stn(point_cloud)\n        point_cloud = point_cloud.transpose(2, 1)\n        if D > 3:\n            point_cloud, feature = point_cloud.split(3, dim=2)\n        point_cloud = torch.bmm(point_cloud, trans)\n        if D > 3:\n            point_cloud = torch.cat([point_cloud, feature], dim=2)\n        point_cloud = point_cloud.transpose(2, 1)\n\n        out1 = F.relu(self.bn1(self.conv1(point_cloud)))\n        out2 = F.relu(self.bn2(self.conv2(out1)))\n        out3 = F.relu(self.bn3(self.conv3(out2)))\n\n        if self.feature_transform:\n            trans_feat = self.fstn(out3)\n            net_transformed = torch.bmm(out3.transpose(2, 1), trans_feat)\n            out3 = net_transformed.transpose(2, 1)\n\n        out4 = F.relu(self.bn4(self.conv4(out3)))\n        out5 = self.bn5(self.conv5(out4))\n\n        out_max = torch.max(out5, 2, keepdim=False)[0]\n        out_max = torch.cat([out_max, label.squeeze(1)], 1)\n        expand = out_max.view(-1, 2048 + 16, 1).repeat(1, 1, N)\n        concat = torch.cat([expand, out1, out2, out3, out4, out5], 1)\n\n        if self.feature_transform:\n            return concat, trans_feat\n        return concat\n\n\nclass encoder(nn.Module):\n    def __init__(self, num_channel=3, **kwargs):\n        super(encoder, self).__init__()\n        self.feat = PointNetEncoder(global_feat=True, channel=num_channel)\n\n    def forward(self, x):\n        feat, _, _ = self.feat(x)\n        return feat\n\n\nclass detailed_encoder(nn.Module):\n    def __init__(self, num_channel=3, **kwargs):\n        super(detailed_encoder, self).__init__()\n        self.feat = PointNetEncoder(global_feat=False,\n                                    channel=num_channel,\n                                    detailed=True)\n\n    def forward(self, x):\n        out1, out2, out3, x = self.feat(x)\n        return out1, out2, out3, x"
  },
  {
    "path": "OcCo_Torch/readme.md",
    "content": "## OcCo in PyTorch\n\n"
  },
  {
    "path": "OcCo_Torch/train_cls.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/WangYueFt/dgcnn/blob/master/pytorch/main.py\n#  Ref: https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/master/train_cls.py\n\nimport os, sys, torch, shutil, importlib, argparse\nsys.path.append('utils')\nsys.path.append('models')\nfrom PC_Augmentation import random_point_dropout, random_scale_point_cloud, random_shift_point_cloud\nfrom torch.optim.lr_scheduler import CosineAnnealingLR, StepLR\nfrom ModelNetDataLoader import General_CLSDataLoader_HDF5\nfrom Torch_Utility import copy_parameters, seed_torch\nfrom torch.utils.tensorboard import SummaryWriter\n# from Inference_Timer import Inference_Timer\nfrom torch.utils.data import DataLoader\nfrom Dataset_Loc import Dataset_Loc\nfrom TrainLogger import TrainLogger\nfrom tqdm import tqdm\n\n\ndef parse_args():\n    parser = argparse.ArgumentParser('Point Cloud Classification')\n\n    ''' === Training and Model === '''\n    parser.add_argument('--log_dir', type=str, help='log folder [default: ]')\n    parser.add_argument('--gpu', type=str, default='0', help='GPU [default: 0]')\n    parser.add_argument('--mode', type=str, default='train', help='train or test')\n    parser.add_argument('--epoch', type=int, default=200, help='epochs [default: 200]')\n    # parser.add_argument('--seed', type=int, default=1, help='random seed (default: 1)')\n    parser.add_argument('--batch_size', type=int, default=24, help='batch size [default: 24]')\n    parser.add_argument('--model', default='pointnet_cls', help='model [default: pointnet_cls]')\n    parser.add_argument('--dropout', type=float, default=0.5, help='dropout rate [default: 0.5]')\n    parser.add_argument('--momentum', type=float, default=0.9, help='SGD momentum [default: 0.9]')\n    parser.add_argument('--lr_decay', type=float, default=0.5, help='lr decay rate [default: 0.5]')\n    parser.add_argument('--step_size', type=int, default=20, help='lr decay step [default: 20 eps]')\n    parser.add_argument('--num_point', type=int, default=1024, help='points number [default: 1024]')\n    parser.add_argument('--restore', action='store_true', help='using pre-trained [default: False]')\n    parser.add_argument('--restore_path', type=str, help=\"path to pretrained weights [default: None]\")\n    parser.add_argument('--emb_dims', type=int, default=1024, help='dimension of embeddings [default: 1024]')\n    parser.add_argument('--k', type=int, default=20, help='number of nearest neighbors to use [default: 20]')\n    parser.add_argument('--use_sgd', action='store_true', default=False, help='use SGD optimiser [default: False]')\n    parser.add_argument('--lr', type=float, default=0.001, help='learning rate [default: 0.001, 0.1 if using sgd]')\n    parser.add_argument('--scheduler', type=str, default='step', help='lr decay scheduler [default: step, or cos]')\n\n    ''' === Dataset === '''\n    parser.add_argument('--partial', action='store_true', help='partial objects [default: False]')\n    parser.add_argument('--bn', action='store_true', help='with background noise [default: False]')\n    parser.add_argument('--data_aug', action='store_true', help='data Augmentation [default: False]')\n    parser.add_argument('--dataset', type=str, default='modelnet40', help='dataset [default: modelnet40]')\n    parser.add_argument('--fname', type=str, help='filename, used in ScanObjectNN or fewer data [default:]')\n\n    return parser.parse_args()\n\n\ndef main(args):\n\n    os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n    os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu\n    # seed_torch(args.seed)\n\n    ''' === Set up Loggers and Load Data === '''\n    MyLogger = TrainLogger(args, name=args.model.upper(), subfold='cls', filename=args.mode + '_log')\n    writer = SummaryWriter(os.path.join(MyLogger.experiment_dir, 'runs'))\n\n    MyLogger.logger.info('Load dataset %s' % args.dataset)\n    NUM_CLASSES, TRAIN_FILES, TEST_FILES = Dataset_Loc(dataset=args.dataset, fname=args.fname,\n                                                       partial=args.partial, bn=args.bn)\n    TRAIN_DATASET = General_CLSDataLoader_HDF5(file_list=TRAIN_FILES, num_point=1024)\n    TEST_DATASET = General_CLSDataLoader_HDF5(file_list=TEST_FILES, num_point=1024)\n    trainDataLoader = DataLoader(TRAIN_DATASET, batch_size=args.batch_size, shuffle=True, num_workers=4, drop_last=True)\n    testDataLoader = DataLoader(TEST_DATASET, batch_size=args.batch_size, shuffle=False, num_workers=4)\n\n    ''' === Load Model and Backup Scripts === '''\n    MODEL = importlib.import_module(args.model)\n    shutil.copy(os.path.abspath(__file__), MyLogger.log_dir)\n    shutil.copy('./models/%s.py' % args.model, MyLogger.log_dir)\n\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    classifier = MODEL.get_model(args=args, num_channel=3, num_class=NUM_CLASSES).to(device)\n    criterion = MODEL.get_loss().to(device)\n    classifier = torch.nn.DataParallel(classifier)\n    # nn.DataParallel has its own issues (slow, memory expensive),\n    # here are some advanced solutions: https://zhuanlan.zhihu.com/p/145427849\n    print('=' * 27)\n    print('Using %d GPU,' % torch.cuda.device_count(), 'Indices: %s' % args.gpu)\n    print('=' * 27)\n\n    ''' === Restore Model from Pre-Trained Checkpoints: OcCo/Jigsaw etc === '''\n    if args.restore:\n        checkpoint = torch.load(args.restore_path)\n        classifier = copy_parameters(classifier, checkpoint, verbose=True)\n        MyLogger.logger.info('Use pre-trained weights from %s' % args.restore_path)\n    else:\n        MyLogger.logger.info('No pre-trained weights, start training from scratch...')\n\n    if not args.use_sgd:\n        optimizer = torch.optim.Adam(\n            classifier.parameters(),\n            lr=args.lr,\n            betas=(0.9, 0.999),\n            eps=1e-08,\n            weight_decay=1e-4\n        )\n    else:\n        optimizer = torch.optim.SGD(classifier.parameters(),\n                                    lr=args.lr * 100,\n                                    momentum=args.momentum,\n                                    weight_decay=1e-4)\n\n    if args.scheduler == 'cos':\n        scheduler = CosineAnnealingLR(optimizer, T_max=args.epoch, eta_min=1e-3)\n    else:\n        scheduler = StepLR(optimizer, step_size=args.step_size, gamma=args.lr_decay)\n    LEARNING_RATE_CLIP = 0.01 * args.lr\n\n    if args.mode == 'test':\n        with torch.no_grad():\n            classifier.eval()\n            MyLogger.epoch_init(training=False)\n\n            for points, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n                points, target = points.float().transpose(2, 1).cuda(), target.long().cuda()\n                if args.model == 'pointnet_cls':\n                    pred, trans_feat = classifier(points)\n                    loss = criterion(pred, target, trans_feat)\n                else:\n                    pred = classifier(points)\n                    loss = criterion(pred, target)\n                MyLogger.step_update(pred.data.max(1)[1].cpu().numpy(),\n                                     target.long().cpu().numpy(),\n                                     loss.cpu().detach().numpy())\n\n            MyLogger.epoch_summary(writer=writer, training=False)\n        sys.exit(\"Test Finished\")\n\n    for epoch in range(MyLogger.epoch, args.epoch + 1):\n\n        ''' === Training === '''\n        MyLogger.epoch_init()\n\n        for points, target in tqdm(trainDataLoader, total=len(trainDataLoader), smoothing=0.9):\n            writer.add_scalar('Learning Rate', scheduler.get_lr()[-1], MyLogger.step)\n\n            # Augmentation, might bring performance gains\n            if args.data_aug:\n                points = random_point_dropout(points.data.numpy())\n                points[:, :, :3] = random_scale_point_cloud(points[:, :, :3])\n                points[:, :, :3] = random_shift_point_cloud(points[:, :, :3])\n                points = torch.Tensor(points)\n\n            points, target = points.transpose(2, 1).float().cuda(), target.long().cuda()\n\n            # FP and BP\n            classifier.train()\n            optimizer.zero_grad()\n            if args.model == 'pointnet_cls':\n                pred, trans_feat = classifier(points)\n                loss = criterion(pred, target, trans_feat)\n            else:\n                pred = classifier(points)\n                loss = criterion(pred, target)\n            loss.backward()\n            optimizer.step()\n            MyLogger.step_update(pred.data.max(1)[1].cpu().numpy(),\n                                 target.long().cpu().numpy(),\n                                 loss.cpu().detach().numpy())\n        MyLogger.epoch_summary(writer=writer, training=True)\n\n        ''' === Validating === '''\n        with torch.no_grad():\n            classifier.eval()\n            MyLogger.epoch_init(training=False)\n\n            for points, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n                points, target = points.float().transpose(2, 1).cuda(), target.long().cuda()\n                if args.model == 'pointnet_cls':\n                    pred, trans_feat = classifier(points)\n                    loss = criterion(pred, target, trans_feat)\n                else:\n                    pred = classifier(points)\n                    loss = criterion(pred, target)\n                MyLogger.step_update(pred.data.max(1)[1].cpu().numpy(),\n                                     target.long().cpu().numpy(),\n                                     loss.cpu().detach().numpy())\n\n            MyLogger.epoch_summary(writer=writer, training=False)\n            if MyLogger.save_model:\n                state = {\n                    'step': MyLogger.step,\n                    'epoch': MyLogger.best_instance_epoch,\n                    'instance_acc': MyLogger.best_instance_acc,\n                    'best_class_acc': MyLogger.best_class_acc,\n                    'best_class_epoch': MyLogger.best_class_epoch,\n                    'model_state_dict': classifier.state_dict(),\n                    'optimizer_state_dict': optimizer.state_dict(),\n                }\n                torch.save(state, MyLogger.savepath)\n\n        scheduler.step()\n        if args.scheduler == 'step':\n            for param_group in optimizer.param_groups:\n                if optimizer.param_groups[0]['lr'] < LEARNING_RATE_CLIP:\n                    param_group['lr'] = LEARNING_RATE_CLIP\n\n    MyLogger.train_summary()\n\n\nif __name__ == '__main__':\n\n    args = parse_args()\n    main(args)\n"
  },
  {
    "path": "OcCo_Torch/train_completion.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/wentaoyuan/pcn/blob/master/train.py\n#  Ref: https://github.com/WangYueFt/dgcnn/blob/master/tensorflow/train.py\n#  For DGCNN Encoder, We Also Use Adam + StepLR for the Unity and Simplicity\n\n\nimport os, sys, time, torch, shutil, argparse, datetime, importlib, numpy as np\nsys.path.append('utils')\nsys.path.append('models')\nfrom TrainLogger import TrainLogger\nfrom LMDB_DataFlow import lmdb_dataflow\nfrom Torch_Utility import copy_parameters\n# from torch.optim.lr_scheduler import StepLR\nfrom Visu_Utility import plot_pcd_three_views\nfrom torch.utils.tensorboard import SummaryWriter\n\n\ndef parse_args():\n    parser = argparse.ArgumentParser('Point Cloud Completion')\n\n    ''' === Training Setting === '''\n    parser.add_argument('--log_dir', type=str, help='log folder [default: ]')\n    parser.add_argument('--gpu', type=str, default='0', help='GPU [default: 0]')\n    parser.add_argument('--batch_size', type=int, default=32, help='batch size [default: 32]')\n    parser.add_argument('--epoch', type=int, default=50, help='number of epoch [default: 50]')\n    parser.add_argument('--lr', type=float, default=0.0001, help='learning rate [default: 1e-4]')\n    parser.add_argument('--lr_decay', type=float, default=0.7, help='lr decay rate [default: 0.7]')\n    parser.add_argument('--step_size', type=int, default=20, help='lr decay step [default: 20 epoch]')\n    parser.add_argument('--dataset', type=str, default='modelnet', help='dataset [default: modelnet]')\n    parser.add_argument('--restore', action='store_true', help='loaded from restore [default: False]')\n    parser.add_argument('--restore_path', type=str, help='path to saved pre-trained model [default: ]')\n    parser.add_argument('--steps_print', type=int, default=100, help='# steps to print [default: 100]')\n    parser.add_argument('--steps_visu', type=int, default=3456, help='# steps to visual [default: 3456]')\n    parser.add_argument('--steps_eval', type=int, default=1000, help='# steps to evaluate [default: 1e3]')\n    parser.add_argument('--epochs_save', type=int, default=5, help='# epochs to save [default: 5 epochs]')\n\n    ''' === Model Setting === '''\n    parser.add_argument('--model', type=str, default='pcn_occo', help='model [pcn_occo]')\n    parser.add_argument('--k', type=int, default=20, help='# nearest neighbors in DGCNN [20]')\n    parser.add_argument('--grid_size', type=int, default=4, help='edge length of the 2D grid [4]')\n    parser.add_argument('--grid_scale', type=float, default=0.5, help='scale of the 2D grid [0.5]')\n    parser.add_argument('--num_coarse', type=int, default=1024, help='# points in coarse gt [1024]')\n    parser.add_argument('--emb_dims', type=int, default=1024, help='# dimension of DGCNN encoder [1024]')\n    parser.add_argument('--input_pts', type=int, default=1024, help='# points of occluded inputs [1024]')\n    parser.add_argument('--gt_pts', type=int, default=16384, help='# points of ground truth inputs [16384]')\n\n    return parser.parse_args()\n\n\ndef main(args):\n    os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n    os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu\n\n    ''' === Set up Loggers and Load Data === '''\n    MyLogger = TrainLogger(args, name=args.model.upper(), subfold='completion')\n    os.makedirs(os.path.join(MyLogger.experiment_dir, 'plots'), exist_ok=True)\n    writer = SummaryWriter(os.path.join(MyLogger.experiment_dir, 'runs'))\n\n    MyLogger.logger.info('Load dataset %s' % args.dataset)\n    if args.dataset == 'modelnet':\n        lmdb_train = './data/modelnet/train.lmdb'\n        lmdb_valid = './data/modelnet/test.lmdb'\n    elif args.dataset == 'shapenet':\n        lmdb_train = 'data/shapenet/train.lmdb'\n        lmdb_valid = 'data/shapenet/valid.lmdb'\n    else:\n        raise ValueError(\"Dataset is not available, it should be either ModelNet or ShapeNet\")\n\n    assert (args.gt_pts == args.grid_size ** 2 * args.num_coarse)\n    df_train, num_train = lmdb_dataflow(\n        lmdb_train, args.batch_size, args.input_pts, args.gt_pts, is_training=True)\n    df_valid, num_valid = lmdb_dataflow(\n        lmdb_valid, args.batch_size, args.input_pts, args.gt_pts, is_training=False)\n    train_gen, valid_gen = df_train.get_data(), df_valid.get_data()\n    total_steps = num_train // args.batch_size * args.epoch\n\n    ''' === Load Model and Backup Scripts === '''\n    MODEL = importlib.import_module(args.model)\n    shutil.copy(os.path.abspath(__file__), MyLogger.log_dir)\n    shutil.copy('./models/%s.py' % args.model, MyLogger.log_dir)\n\n    # multiple GPUs usage\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    completer = MODEL.get_model(args=args, grid_size=args.grid_size,\n                                grid_scale=args.grid_scale, num_coarse=args.num_coarse).to(device)\n    criterion = MODEL.get_loss().to(device)\n    completer = torch.nn.DataParallel(completer)\n    # nn.DataParallel has its own issues (slow, memory expensive), bearable\n    # some optional advanced solutions: https://zhuanlan.zhihu.com/p/145427849\n    print('=' * 33)\n    print('Using %d GPU,' % torch.cuda.device_count(), 'Indices are: %s' % args.gpu)\n    print('=' * 33)\n\n    ''' === Restore Model from Checkpoints, If there is any === '''\n    if args.restore:\n        checkpoint = torch.load(args.restore_path)\n        completer = copy_parameters(completer, checkpoint, verbose=True)\n        MyLogger.logger.info('Use pre-trained model from %s' % args.restore_path)\n        MyLogger.step, MyLogger.epoch = checkpoint['step'], checkpoint['epoch']\n\n    else:\n        MyLogger.logger.info('No pre-trained model, start training from scratch...')\n\n    ''' IMPORTANT: for completion, no weight decay in Adam, no batch norm in decoder!'''\n    optimizer = torch.optim.Adam(\n        completer.parameters(),\n        lr=args.lr,\n        betas=(0.9, 0.999),\n        eps=1e-08,\n        weight_decay=0)\n    # weight_decay=1e-4)\n\n    # For the sake of simplicity, we save the momentum decay in the batch norm\n    # scheduler = StepLR(optimizer, step_size=20, gamma=0.7) -> instead we define these manually\n    LEARNING_RATE_CLIP = 0.01 * args.lr\n\n    def vary2fix(inputs, npts, batch_size=args.batch_size, num_point=args.input_pts):\n        \"\"\"upsample/downsample varied input points into fixed length\n        :param inputs: input points cloud\n        :param npts: describe how many points of each input object\n        :param batch_size: training batch size\n        :param num_point: number of points of per occluded object\n        :return: fixed length of points of each object\n        \"\"\"\n\n        inputs_ls = np.split(inputs[0], npts.cumsum())\n        ret_inputs = np.zeros((1, batch_size * num_point, 3))\n        ret_npts = npts.copy()\n\n        for idx, obj in enumerate(inputs_ls[:-1]):\n\n            if len(obj) <= num_point:\n                select_idx = np.concatenate([\n                    np.arange(len(obj)), np.random.choice(len(obj), num_point - len(obj))])\n            else:\n                select_idx = np.arange(len(obj))\n                np.random.shuffle(select_idx)\n\n            ret_inputs[0][idx * num_point:(idx + 1) * num_point] = obj[select_idx].copy()\n            ret_npts[idx] = num_point\n\n        return ret_inputs, ret_npts\n\n    def piecewise_constant(global_step, boundaries, values):\n        \"\"\"substitute for tf.train.piecewise_constant:\n        https://www.tensorflow.org/api_docs/python/tf/compat/v1/train/piecewise_constant\n        global_step can be either training epoch or training step\n        \"\"\"\n        if len(boundaries) != len(values) - 1:\n            raise ValueError(\n                \"The length of boundaries should be 1 less than the length of values\")\n\n        if global_step <= boundaries[0]:\n            return values[0]\n        elif global_step > boundaries[-1]:\n            return values[-1]\n        else:\n            for low, high, v in zip(boundaries[:-1], boundaries[1:], values[1:-1]):\n                if (global_step > low) & (global_step <= high):\n                    return v\n\n    total_time, train_start = 0, time.time()\n    for step in range(MyLogger.step + 1, total_steps + 1):\n\n        ''' === Training === '''\n        start = time.time()\n        epoch = step * args.batch_size // num_train + 1\n        lr = max(args.lr * (args.lr_decay ** (epoch // args.step_size)), LEARNING_RATE_CLIP)\n        for param_group in optimizer.param_groups:\n            param_group['lr'] = lr\n        # follow the original alpha setting for ShapeNet Dataset in PCN paper:\n        alpha = piecewise_constant(step, [10000, 20000, 50000], [0.01, 0.1, 0.5, 1.0])\n        writer.add_scalar('Learning Rate', lr, step)\n        writer.add_scalar('Alpha', alpha, step)\n\n        ids, inputs, npts, gt = next(train_gen)\n        if args.dataset == 'shapenet':\n            inputs, _ = vary2fix(inputs, npts)\n\n        completer.train()\n        optimizer.zero_grad()\n        inputs = inputs.reshape(args.batch_size, args.input_pts, 3)\n        inputs, gt = torch.Tensor(inputs).transpose(2, 1).cuda(), torch.Tensor(gt).cuda()\n        pred_coarse, pred_fine = completer(inputs)\n        loss = criterion(pred_coarse, pred_fine, gt, alpha)\n        loss.backward()\n        optimizer.step()\n        total_time += time.time() - start\n        writer.add_scalar('Loss', loss, step)\n\n        if step % args.steps_print == 0:\n            MyLogger.logger.info('epoch %d  step %d  alpha %.2f  loss %.8f  time per step %.2f s' %\n                                 (epoch, step, alpha, loss, total_time / args.steps_print))\n            total_time = 0\n\n        ''' === Validating === '''\n        if step % args.steps_eval == 0:\n\n            with torch.no_grad():\n                completer.eval()\n                MyLogger.logger.info('Testing...')\n                num_eval_steps, eval_loss, eval_time = num_valid // args.batch_size, 0, 0\n\n                for eval_step in range(num_eval_steps):\n                    start = time.time()\n                    _, inputs, npts, gt = next(valid_gen)\n                    if args.dataset == 'shapenet':\n                        inputs, _ = vary2fix(inputs, npts)\n\n                    inputs = inputs.reshape(args.batch_size, args.input_pts, 3)\n                    inputs, gt = torch.Tensor(inputs).transpose(2, 1).cuda(), torch.Tensor(gt).cuda()\n\n                    pred_coarse, pred_fine = completer(inputs)\n                    loss = criterion(pred_coarse, pred_fine, gt, alpha)\n                    eval_loss += loss\n                    eval_time += time.time() - start\n\n                MyLogger.logger.info('epoch %d  step %d  validation  loss %.8f  time per step %.2f s' %\n                                     (epoch, step, eval_loss / num_eval_steps, eval_time / num_eval_steps))\n\n        ''' === Visualisation === '''\n        if step % args.steps_visu == 0:\n            all_pcds = [item.detach().cpu().numpy() for item in [\n                inputs.transpose(2, 1), pred_coarse, pred_fine, gt]]\n            for i in range(args.batch_size):\n                plot_path = os.path.join(MyLogger.experiment_dir, 'plots',\n                                         'epoch_%d_step_%d_%s.png' % (epoch, step, ids[i]))\n                pcds = [x[i] for x in all_pcds]\n                plot_pcd_three_views(plot_path, pcds,\n                                     ['input', 'coarse output', 'fine output', 'ground truth'])\n\n        trained_epoch = epoch - 1\n        if (trained_epoch % args.epochs_save == 0) and (trained_epoch != 0) and \\\n                not os.path.exists(os.path.join(MyLogger.checkpoints_dir,\n                                                'model_epoch_%d.pth' % trained_epoch)):\n            state = {\n                'step': step,\n                'epoch': epoch,\n                'model_state_dict': completer.state_dict(),\n                'optimizer_state_dict': optimizer.state_dict(),\n            }\n            torch.save(state, os.path.join(MyLogger.checkpoints_dir,\n                                           \"model_epoch_%d.pth\" % trained_epoch))\n            MyLogger.logger.info('Model saved at %s/model_epoch_%d.pth\\n'\n                                 % (MyLogger.checkpoints_dir, trained_epoch))\n\n    MyLogger.logger.info('Training Finished, Total Time: ' +\n                         str(datetime.timedelta(seconds=time.time() - train_start)))\n\n\nif __name__ == '__main__':\n    args = parse_args()\n    main(args)\n"
  },
  {
    "path": "OcCo_Torch/train_jigsaw.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  ref: https://github.com/AnTao97/dgcnn.pytorch/blob/master/main_semseg.py\n#  ref: https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/master/train_semseg.py\n\nimport os, sys, torch, argparse, importlib, shutil\nsys.path.append('models')\nsys.path.append('utils')\nfrom torch.optim.lr_scheduler import CosineAnnealingLR, StepLR\nfrom Torch_Utility import weights_init, bn_momentum_adjust\nfrom ModelNetDataLoader import ModelNetJigsawDataLoader\nfrom torch.utils.tensorboard import SummaryWriter\nfrom torch.utils.data import DataLoader\nfrom TrainLogger import TrainLogger\nfrom tqdm import tqdm\n\n\ndef parse_args():\n    parser = argparse.ArgumentParser('3D Point Cloud Jigsaw Puzzles')\n\n    ''' === Training === '''\n    parser.add_argument('--log_dir', type=str, help='log folder [default: ]')\n    parser.add_argument('--gpu', type=str, default='0', help='GPU [default: 0]')\n    parser.add_argument('--batch_size', type=int, default=32, help='batch size [default: 32]')\n    parser.add_argument('--epoch', default=200, type=int, help='training epochs [default: 200]')\n    parser.add_argument('--lr', default=0.0001, type=float, help='learning rate [default: 1e-4]')\n    parser.add_argument('--optimizer', type=str, default='Adam', help='optimiser [default: Adam]')\n    parser.add_argument('--momentum', type=float, default=0.9, help='SGD momentum [default: 0.9]')\n    parser.add_argument('--lr_decay', type=float, default=0.7, help='lr decay rate [default: 0.7]')\n    parser.add_argument('--bn_decay', action='store_true', help='BN Momentum Decay [default: False]')\n    parser.add_argument('--xavier_init', action='store_true', help='Xavier weight init [default: False]')\n    parser.add_argument('--scheduler', type=str, default='step', help='lr decay scheduler [default: step]')\n    parser.add_argument('--model', type=str, default='pointnet_jigsaw', help='model [default: pointnet_jigsaw]')\n    parser.add_argument('--step_size', type=int, default=20, help='decay steps for lr [default: every 20 epochs]')\n\n    ''' === Model === '''\n    parser.add_argument('--k', type=int, default=20, help='num of nearest neighbors to use [default: 20]')\n    parser.add_argument('--emb_dims', type=int, default=1024, help='dimension of embeddings [default: 1024]')\n    parser.add_argument('--num_point', type=int, default=1024, help='number of points per object [default: 1024]')\n\n    return parser.parse_args()\n\n\ndef main(args):\n\n    os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n    os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu\n\n    NUM_CLASSES = 3 ** 3\n    DATA_PATH = 'data/modelnet40_ply_hdf5_2048/jigsaw'\n    TRAIN_DATASET = ModelNetJigsawDataLoader(DATA_PATH, split='train', n_points=args.num_point, k=3)\n    TEST_DATASET = ModelNetJigsawDataLoader(DATA_PATH, split='test', n_points=args.num_point, k=3)\n    trainDataLoader = DataLoader(TRAIN_DATASET, batch_size=args.batch_size, shuffle=True, num_workers=4)\n    testDataLoader = DataLoader(TEST_DATASET, batch_size=args.batch_size, shuffle=False, num_workers=4)\n    MyLogger = TrainLogger(args, name=args.model.upper(), subfold='jigsaw')\n\n    ''' === MODEL LOADING === '''\n    MODEL = importlib.import_module(args.model)\n    shutil.copy(os.path.abspath(__file__), MyLogger.log_dir)\n    shutil.copy('./models/%s.py' % args.model, MyLogger.log_dir)\n    writer = SummaryWriter(os.path.join(MyLogger.experiment_dir, 'runs'))\n\n    # allow multiple GPUs\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    classifier = MODEL.get_model(args=args, num_class=NUM_CLASSES, num_channel=3).to(device)\n    criterion = MODEL.get_loss().to(device)\n    classifier = torch.nn.DataParallel(classifier)\n    print('=' * 33)\n    print('Using %d GPU,' % torch.cuda.device_count(), 'Indices: %s' % args.gpu)\n    print('=' * 33)\n\n    if args.xavier_init:\n        classifier = classifier.apply(weights_init)\n        MyLogger.logger.info(\"Using Xavier Weight Initialisation\")\n\n    if args.optimizer == 'Adam':\n        optimizer = torch.optim.Adam(\n            classifier.parameters(),\n            lr=args.lr,\n            betas=(0.9, 0.999),\n            eps=1e-08,\n            weight_decay=1e-4)\n        MyLogger.logger.info(\"Using Adam Optimiser\")\n    else:\n        optimizer = torch.optim.SGD(\n            classifier.parameters(),\n            lr=1000 * args.lr,\n            momentum=args.momentum)\n        MyLogger.logger.info(\"Using SGD Optimiser\")\n\n    LEARNING_RATE_CLIP = 1e-5\n    MOMENTUM_ORIGINAL = 0.1\n    MOMENTUM_DECAY = 0.5\n    MOMENTUM_DECAY_STEP = args.step_size\n\n    if args.scheduler == 'cos':\n        scheduler = CosineAnnealingLR(optimizer, T_max=args.epoch, eta_min=1e-3)\n    else:\n        scheduler = StepLR(optimizer, step_size=args.step_size, gamma=0.7)\n\n    for epoch in range(MyLogger.epoch, args.epoch + 1):\n\n        ''' === Training === '''\n        MyLogger.epoch_init(training=True)\n\n        for points, target in tqdm(trainDataLoader, total=len(trainDataLoader), smoothing=0.9):\n            points, target = points.transpose(2, 1).float().cuda(), target.view(-1, 1)[:, 0].long().cuda()\n            classifier.train()\n            optimizer.zero_grad()\n\n            if args.model == 'pointnet_jigsaw':\n                pred, trans_feat = classifier(points)\n                pred = pred.contiguous().view(-1, NUM_CLASSES)\n                loss = criterion(pred, target, trans_feat)\n            else:\n                pred = classifier(points)\n                pred = pred.contiguous().view(-1, NUM_CLASSES)\n                loss = criterion(pred, target)\n\n            loss.backward()\n            optimizer.step()\n            # pdb.set_trace()\n            MyLogger.step_update(pred.data.max(1)[1].cpu().numpy(),\n                                 target.long().cpu().numpy(),\n                                 loss.cpu().detach().numpy())\n        MyLogger.epoch_summary(writer=writer, training=True)\n\n        ''' === Evaluation === '''\n        with torch.no_grad():\n            classifier.eval()\n            MyLogger.epoch_init(training=False)\n\n            for points, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n                points, target = points.transpose(2, 1).float().cuda(), target.view(-1, 1)[:, 0].long().cuda()\n                if args.model == 'pointnet_jigsaw':\n                    pred, trans_feat = classifier(points)\n                    pred = pred.contiguous().view(-1, NUM_CLASSES)\n                    loss = criterion(pred, target, trans_feat)\n                else:\n                    pred = classifier(points)\n                    pred = pred.contiguous().view(-1, NUM_CLASSES)\n                    loss = criterion(pred, target)\n                MyLogger.step_update(pred.data.max(1)[1].cpu().numpy(),\n                                     target.long().cpu().numpy(),\n                                     loss.cpu().detach().numpy())\n            MyLogger.epoch_summary(writer=writer, training=False)\n\n            if MyLogger.save_model:\n                state = {\n                    'step': MyLogger.step,\n                    'epoch': MyLogger.best_instance_epoch,\n                    'instance_acc': MyLogger.best_instance_acc,\n                    'model_state_dict': classifier.state_dict(),\n                    'optimizer_state_dict': optimizer.state_dict(),\n                }\n                torch.save(state, MyLogger.savepath)\n\n        scheduler.step()\n        if args.scheduler == 'step':\n            for param_group in optimizer.param_groups:\n                if optimizer.param_groups[0]['lr'] < LEARNING_RATE_CLIP:\n                    param_group['lr'] = LEARNING_RATE_CLIP\n        if args.bn_decay:\n            momentum = MOMENTUM_ORIGINAL * (MOMENTUM_DECAY ** (epoch // MOMENTUM_DECAY_STEP))\n            if momentum < 0.01:\n                momentum = 0.01\n            print('BN momentum updated to: %f' % momentum)\n            classifier = classifier.apply(lambda x: bn_momentum_adjust(x, momentum))\n\n    MyLogger.train_summary()\n\n\nif __name__ == '__main__':\n\n    args = parse_args()\n    main(args)\n\n"
  },
  {
    "path": "OcCo_Torch/train_partseg.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/master/train_partseg.py\n\nimport os, sys, torch, shutil, importlib, argparse, numpy as np\nsys.path.append('utils')\nsys.path.append('models')\nfrom PC_Augmentation import random_scale_point_cloud, random_shift_point_cloud\nfrom Torch_Utility import copy_parameters, weights_init, bn_momentum_adjust\nfrom torch.optim.lr_scheduler import StepLR, CosineAnnealingLR\nfrom torch.utils.tensorboard import SummaryWriter\nfrom ShapeNetDataLoader import PartNormalDataset\nfrom torch.utils.data import DataLoader\nfrom TrainLogger import TrainLogger\nfrom tqdm import tqdm\n\n\nseg_classes = {'Earphone':   [16, 17, 18],\n               'Motorbike':  [30, 31, 32, 33, 34, 35],\n               'Rocket':     [41, 42, 43],\n               'Car':        [8, 9, 10, 11],\n               'Laptop':     [28, 29],\n               'Cap':        [6, 7],\n               'Skateboard': [44, 45, 46],\n               'Mug':        [36, 37],\n               'Guitar':     [19, 20, 21],\n               'Bag':        [4, 5],\n               'Lamp':       [24, 25, 26, 27],\n               'Table':      [47, 48, 49],\n               'Airplane':   [0, 1, 2, 3],\n               'Pistol':     [38, 39, 40],\n               'Chair':      [12, 13, 14, 15],\n               'Knife':      [22, 23]}\nseg_label_to_cat = {}  # {0:Airplane, 1:Airplane, ..., 49:Table}\nfor cat in seg_classes.keys():\n    for label in seg_classes[cat]:\n        seg_label_to_cat[label] = cat\n\n\ndef parse_args():\n    parser = argparse.ArgumentParser('Model')\n    parser.add_argument('--log_dir', type=str, help='log folder [default: ]')\n    parser.add_argument('--gpu', type=str, default='0', help='GPU [default: 0]')\n    parser.add_argument('--mode', type=str, default='train', help='train or test')\n    parser.add_argument('--epoch', default=250, type=int, help=' epochs [default: 250]')\n    parser.add_argument('--batch_size', type=int, default=16, help='batch size [default: 16]')\n    parser.add_argument('--lr', default=0.001, type=float, help='learning rate [default: 0.001]')\n    parser.add_argument('--momentum', type=float, default=0.9, help='SGD momentum [default: 0.9]')\n    parser.add_argument('--restore_path', type=str, help='path to pretrained weights [default: ]')\n    parser.add_argument('--lr_decay', type=float, default=0.5, help='lr decay rate [default: 0.5]')\n    parser.add_argument('--num_point', type=int, default=2048, help='point number [default: 2048]')\n    parser.add_argument('--restore', action='store_true', help='using pre-trained [default: False]')\n    parser.add_argument('--use_sgd', action='store_true', help='use SGD optimiser [default: False]')\n    parser.add_argument('--data_aug', action='store_true', help='data augmentation [default: False]')\n    parser.add_argument('--scheduler', default='step', help='learning rate scheduler [default: step]')\n    parser.add_argument('--model', default='pointnet_partseg', help='model [default: pointnet_partseg]')\n    parser.add_argument('--dropout', type=float, default=0.5, help='dropout rate in FCs [default: 0.5]')\n    parser.add_argument('--bn_decay', action='store_true', help='use BN nomentum decay [default: False]')\n    parser.add_argument('--xavier_init', action='store_true', help='Xavier weight init [default: False]')\n    parser.add_argument('--emb_dims', type=int, default=1024, help='embedding dimensions [default: 1024]')\n    parser.add_argument('--k', type=int, default=20, help='num of nearest neighbors to use [default: 20]')\n    parser.add_argument('--normal', action='store_true', default=False, help='use normal [default: False]')\n    parser.add_argument('--step_size', type=int, default=20, help='lr decay step [default: every 20 epochs]')\n    parser.add_argument('--num_votes', type=int, default=3, help='aggregate test predictions via vote [default: 3]')\n\n    return parser.parse_args()\n\n\ndef main(args):\n    os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n    os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu\n\n    def to_categorical(y, num_class):\n        \"\"\" 1-hot encodes a tensor \"\"\"\n        new_y = torch.eye(num_class)[y.cpu().data.numpy(), ]\n        if y.is_cuda:\n            return new_y.cuda()\n        return new_y\n\n    ''' === Set up Loggers and Load Data === '''\n    MyLogger = TrainLogger(args, name=args.model.upper(), subfold='partseg',\n                           filename=args.mode + '_log', cls2name=seg_label_to_cat)\n    writer = SummaryWriter(os.path.join(MyLogger.experiment_dir, 'runs'))\n    root = 'data/shapenetcore_partanno_segmentation_benchmark_v0_normal/'\n\n    TRAIN_DATASET = PartNormalDataset(root=root, num_point=args.num_point, split='trainval', use_normal=args.normal)\n    TEST_DATASET = PartNormalDataset(root=root, num_point=args.num_point, split='test', use_normal=args.normal)\n    trainDataLoader = DataLoader(TRAIN_DATASET, batch_size=args.batch_size, shuffle=True, num_workers=4)\n    testDataLoader = DataLoader(TEST_DATASET, batch_size=args.batch_size, shuffle=False, num_workers=4)\n\n    num_classes, num_part = 16, 50\n\n    ''' === Load Model and Backup Scripts === '''\n    channel_num = 6 if args.normal else 3\n    MODEL = importlib.import_module(args.model)\n    shutil.copy(os.path.abspath(__file__), MyLogger.log_dir)\n    shutil.copy('./models/%s.py' % args.model, MyLogger.log_dir)\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    classifier = MODEL.get_model(part_num=num_part, num_channel=channel_num, args=args).cuda().to(device)\n    criterion = MODEL.get_loss().to(device)\n    classifier = torch.nn.DataParallel(classifier)\n\n    if args.restore:\n        checkpoint = torch.load(args.restore_path)\n        classifier = copy_parameters(classifier, checkpoint, verbose=True)\n        MyLogger.logger.info('Use pre-trained weights from %s' % args.restore_path)\n    else:\n        MyLogger.logger.info('No pre-trained weights, start training from scratch...')\n        if args.xavier_init:\n            classifier = classifier.apply(weights_init)\n            MyLogger.logger.info(\"Using Xavier weight initialisation\")\n\n    if args.mode == 'test':\n        MyLogger.logger.info('\\n\\n')\n        MyLogger.logger.info('=' * 33)\n        MyLogger.logger.info('load parrameters from %s' % args.restore_path)\n        with torch.no_grad():\n            test_metrics = {}\n            total_correct, total_seen = 0, 0\n            total_seen_class = [0 for _ in range(num_part)]\n            total_correct_class = [0 for _ in range(num_part)]\n            shape_ious = {cat: [] for cat in seg_classes.keys()}  # {shape: []}\n\n            for points, label, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n                classifier.eval()\n                cur_batch_size, num_point, _ = points.size()\n                vote_pool = torch.zeros(cur_batch_size, num_point, num_part).cuda()  # (batch, num point, num part)\n                points, label, target = points.transpose(2, 1).float().cuda(), label.long().cuda(), target.numpy()\n                \n                ''' === generate predictions from raw output (multiple via voting) === '''\n                for _ in range(args.num_votes):\n                    if args.model == 'pointnet_partseg':\n                        seg_pred, _ = classifier(points, to_categorical(label, num_classes))\n                    else:\n                        seg_pred = classifier(points, to_categorical(label, num_classes))\n                    vote_pool += seg_pred  # added on probability\n                \n                seg_pred = vote_pool / args.num_votes\n                cur_pred_val_logits = seg_pred.cpu().data.numpy()\n                cur_pred_val = np.zeros((cur_batch_size, num_point)).astype(np.int32)\n\n                for i in range(cur_batch_size):\n                    cat = seg_label_to_cat[target[i, 0]]  # str, shape name\n                    logits = cur_pred_val_logits[i, :, :]  # array, (num point, num part)\n                    cur_pred_val[i, :] = np.argmax(logits[:, seg_classes[cat]], 1) + seg_classes[cat][0] \n                    # only consider parts from that shape\n\n                ''' === calculate accuracy === '''\n                total_correct += np.sum(cur_pred_val == target)\n                total_seen += (cur_batch_size * num_point)\n\n                for l in range(num_part):\n                    total_seen_class[l] += np.sum(target == l)\n                    total_correct_class[l] += (np.sum((cur_pred_val == l) & (target == l)))\n\n                ''' === calculate iou === '''\n                for i in range(cur_batch_size):\n                    segl = target[i, :]  # array, (num point, )\n                    segp = cur_pred_val[i, :]  # array, (num point, )\n                    cat = seg_label_to_cat[segl[0]]  # str, shape name\n                    part_ious = [0. for _ in range(len(seg_classes[cat]))]  # parts belong to that shape\n                    \n                    for l in seg_classes[cat]:\n                        if (np.sum(segl == l) == 0) and (np.sum(segp == l) == 0):  # no prediction or gt\n                            part_ious[l - seg_classes[cat][0]] = 1.0\n                        else:\n                            iou = np.sum((segl == l) & (segp == l)) / float(np.sum((segl == l) | (segp == l)))\n                            part_ious[l - seg_classes[cat][0]] = iou\n                    shape_ious[cat].append(np.mean(part_ious))\n\n        all_shape_ious = []\n        for cat in shape_ious.keys():\n            for iou in shape_ious[cat]:\n                all_shape_ious.append(iou)\n            shape_ious[cat] = np.mean(shape_ious[cat])\n            \n        mean_shape_ious = np.mean(list(shape_ious.values()))\n        test_metrics['class_avg_iou'] = mean_shape_ious\n        test_metrics['instance_avg_iou'] = np.mean(all_shape_ious)\n        test_metrics['accuracy'] = total_correct / float(total_seen)\n        test_metrics['class_avg_accuracy'] = np.mean(\n            np.array(total_correct_class) / np.array(total_seen_class, dtype=np.float))\n        for cat in sorted(shape_ious.keys()):\n            MyLogger.logger.info('test mIoU of %-14s %f' % (cat, shape_ious[cat]))\n\n        MyLogger.logger.info('Accuracy is: %.5f' % test_metrics['accuracy'])\n        MyLogger.logger.info('Class avg accuracy is: %.5f' % test_metrics['class_avg_accuracy'])\n        MyLogger.logger.info('Class avg mIoU is: %.5f' % test_metrics['class_avg_iou'])\n        MyLogger.logger.info('Instance avg mIoU is: %.5f' % test_metrics['instance_avg_iou'])\n        sys.exit(\"Test Finished\")\n\n    if not args.use_sgd:\n        optimizer = torch.optim.Adam(\n            classifier.parameters(),\n            lr=args.lr,\n            betas=(0.9, 0.999),\n            eps=1e-08,\n            weight_decay=1e-4)\n    else:\n        optimizer = torch.optim.SGD(classifier.parameters(),\n                                    lr=args.lr * 100,\n                                    momentum=args.momentum,\n                                    weight_decay=1e-4)\n    if args.scheduler is 'step':\n        scheduler = StepLR(optimizer, step_size=args.step_size, gamma=args.lr_decay)\n    else:\n        scheduler = CosineAnnealingLR(optimizer, T_max=args.epoch, eta_min=1e-3)\n\n    LEARNING_RATE_CLIP = 1e-5\n    MOMENTUM_ORIGINAL = 0.1\n    MOMENTUM_DECAY = 0.5\n    MOMENTUM_DECAY_STEP = args.step_size\n\n    for epoch in range(MyLogger.epoch, args.epoch + 1):\n\n        MyLogger.epoch_init()\n\n        for points, label, target in tqdm(trainDataLoader, total=len(trainDataLoader), smoothing=0.9):\n\n            if args.data_aug:\n                points = points.data.numpy()\n                points[:, :, :3] = random_scale_point_cloud(points[:, :, 0:3])\n                points[:, :, :3] = random_shift_point_cloud(points[:, :, 0:3])\n                points = torch.Tensor(points)\n\n            points, label, target = points.transpose(2, 1).float().cuda(), label.long().cuda(), \\\n                                    target.view(-1, 1)[:, 0].long().cuda()\n            classifier.train()\n            optimizer.zero_grad()\n            if args.model == 'pointnet_partseg':\n                seg_pred, trans_feat = classifier(points, to_categorical(label, num_classes))\n                seg_pred = seg_pred.contiguous().view(-1, num_part)\n                loss = criterion(seg_pred, target, trans_feat)\n            else:\n                seg_pred = classifier(points, to_categorical(label, num_classes))\n                seg_pred = seg_pred.contiguous().view(-1, num_part)\n                loss = criterion(seg_pred, target)\n\n            loss.backward()\n            optimizer.step()\n            MyLogger.step_update(seg_pred.data.max(1)[1].cpu().numpy(),\n                                 target.long().cpu().numpy(),\n                                 loss.cpu().detach().numpy())\n        MyLogger.epoch_summary(writer=writer, training=True, mode='partseg')\n\n        '''=== Evaluating ==='''\n        with torch.no_grad():\n\n            classifier.eval()\n            MyLogger.epoch_init(training=False)\n\n            for points, label, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n                cur_batch_size, NUM_POINT, _ = points.size()\n                points, label, target = points.transpose(2, 1).float().cuda(), label.long().cuda(), \\\n                                        target.view(-1, 1)[:, 0].long().cuda()\n                if args.model == 'pointnet_partseg':\n                    seg_pred, trans_feat = classifier(points, to_categorical(label, num_classes))\n                    seg_pred = seg_pred.contiguous().view(-1, num_part)\n                    loss = criterion(seg_pred, target, trans_feat)\n                else:\n                    seg_pred = classifier(points, to_categorical(label, num_classes))\n                    seg_pred = seg_pred.contiguous().view(-1, num_part)\n                    loss = criterion(seg_pred, target)\n\n                MyLogger.step_update(seg_pred.data.max(1)[1].cpu().numpy(),\n                                     target.long().cpu().numpy(),\n                                     loss.cpu().detach().numpy())\n            \n            MyLogger.epoch_summary(writer=writer, training=False, mode='partseg')\n\n            if MyLogger.save_model:\n                state = {\n                    'step': MyLogger.step,\n                    'miou': MyLogger.best_miou,\n                    'epoch': MyLogger.best_miou_epoch,\n                    'model_state_dict': classifier.state_dict(),\n                    'optimizer_state_dict': optimizer.state_dict()}\n                torch.save(state, MyLogger.savepath)\n\n            if epoch % 5 == 0:\n                state = {\n                    'model_state_dict': classifier.state_dict(),\n                    'optimizer_state_dict': optimizer.state_dict()}\n                torch.save(state, MyLogger.savepath.replace('best_model', 'model_ep%d' % epoch))\n\n            scheduler.step()\n            if args.scheduler == 'step':\n                for param_group in optimizer.param_groups:\n                    if optimizer.param_groups[0]['lr'] < LEARNING_RATE_CLIP:\n                        param_group['lr'] = LEARNING_RATE_CLIP\n            if args.bn_decay:\n                momentum = MOMENTUM_ORIGINAL * (MOMENTUM_DECAY ** (epoch // MOMENTUM_DECAY_STEP))\n                if momentum < 0.01:\n                    momentum = 0.01\n                print('BN momentum updated to: %f' % momentum)\n                classifier = classifier.apply(lambda x: bn_momentum_adjust(x, momentum))\n\n\nif __name__ == '__main__':\n    args = parse_args()\n    main(args)\n"
  },
  {
    "path": "OcCo_Torch/train_semseg.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  ref: https://github.com/charlesq34/pointnet/blob/master/sem_seg/train.py\n#  ref: https://github.com/AnTao97/dgcnn.pytorch/blob/master/main_semseg.py\n#  ref: https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/master/train_semseg.py\n\nimport os, sys, torch, shutil, argparse, importlib\nsys.path.append('utils')\nsys.path.append('models')\nfrom Torch_Utility import copy_parameters, weights_init, bn_momentum_adjust\nfrom torch.optim.lr_scheduler import CosineAnnealingLR, StepLR\nfrom torch.utils.tensorboard import SummaryWriter\nfrom S3DISDataLoader import S3DISDataset_HDF5\nfrom torch.utils.data import DataLoader\nfrom TrainLogger import TrainLogger\nfrom tqdm import tqdm\n\n\nclasses = ['ceiling', 'floor', 'wall', 'beam', 'column',\n           'window', 'door', 'table', 'chair', 'sofa',\n           'bookcase', 'board', 'clutter']\nclass2label = {cls: i for i, cls in enumerate(classes)}\nseg_classes = class2label\nseg_label_to_cat = {}\nfor i, cat in enumerate(seg_classes.keys()):\n    seg_label_to_cat[i] = cat\n\n\ndef parse_args():\n    parser = argparse.ArgumentParser(description='Point Cloud Semantic Segmentation')\n\n    parser.add_argument('--log_dir', type=str, help='log path [default: ]')\n    parser.add_argument('--gpu', type=str, default='0', help='GPU [default: 0]')\n    parser.add_argument('--mode', type=str, default='train', help='train or test')\n    parser.add_argument('--batch_size', type=int, default=24, help='batch size [default: 24]')\n    parser.add_argument('--test_area', type=int, default=5, help='test area, 1-6 [default: 5]')\n    parser.add_argument('--epoch', default=100, type=int, help='training epochs [default: 100]')\n    parser.add_argument('--lr', type=float, default=0.001, help='learning rate [default: 0.001]')\n    parser.add_argument('--momentum', type=float, default=0.9, help='SGD momentum [default: 0.9]')\n    parser.add_argument('--lr_decay', type=float, default=0.5, help='lr decay rate [default: 0.5]')\n    parser.add_argument('--restore', action='store_true', help='restore the weights [default: False]')\n    parser.add_argument('--restore_path', type=str, help='path to pre-saved model weights [default: ]')\n    parser.add_argument('--dropout', type=float, default=0.5, help='dropout rate in FCs [default: 0.5]')\n    parser.add_argument('--bn_decay', action='store_true', help='use BN Momentum Decay [default: False]')\n    parser.add_argument('--xavier_init', action='store_true', help='Xavier weight init [default: False]')\n    parser.add_argument('--emb_dims', type=int, default=1024, help='embedding dimensions [default: 1024]')\n    parser.add_argument('--k', type=int, default=20, help='num of nearest neighbors to use [default: 20]')\n    parser.add_argument('--step_size', type=int, default=40, help='lr decay steps [default: every 40 epochs]')\n    parser.add_argument('--scheduler', type=str, default='cos', help='lr decay scheduler [default: cos, step]')\n    parser.add_argument('--model', type=str, default='pointnet_semseg', help='model [default: pointnet_semseg]')\n    parser.add_argument('--optimizer', type=str, default='adam', help='optimiser [default: adam, otherwise sgd]')\n\n    return parser.parse_args()\n\n\ndef main(args):\n\n    os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n    os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu\n\n    root = 'data/indoor3d_sem_seg_hdf5_data'\n    NUM_CLASSES = len(seg_label_to_cat)\n\n    TRAIN_DATASET = S3DISDataset_HDF5(root=root, split='train', test_area=args.test_area)\n    TEST_DATASET = S3DISDataset_HDF5(root=root, split='test', test_area=args.test_area)\n    trainDataLoader = DataLoader(TRAIN_DATASET, batch_size=args.batch_size, shuffle=True, num_workers=4)\n    testDataLoader = DataLoader(TEST_DATASET, batch_size=args.batch_size, shuffle=False, num_workers=4)\n\n    MyLogger = TrainLogger(args, name=args.model.upper(), subfold='semseg',\n                           cls2name=class2label, filename=args.mode + '_log')\n    MyLogger.logger.info(\"The number of training data is: %d\" % len(TRAIN_DATASET))\n    MyLogger.logger.info(\"The number of testing data is: %d\" % len(TEST_DATASET))\n\n    ''' === Model Loading === '''\n    MODEL = importlib.import_module(args.model)\n    shutil.copy(os.path.abspath(__file__), MyLogger.log_dir)\n    shutil.copy('./models/%s.py' % args.model, MyLogger.log_dir)\n    writer = SummaryWriter(os.path.join(MyLogger.experiment_dir, 'runs'))\n\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    classifier = MODEL.get_model(num_class=NUM_CLASSES, num_channel=9, args=args).to(device)\n    criterion = MODEL.get_loss().to(device)\n    classifier = torch.nn.DataParallel(classifier)\n    print('=' * 27)\n    print('Using %d GPU,' % torch.cuda.device_count(), 'Indices: %s' % args.gpu)\n    print('=' * 27)\n\n    if args.restore:\n        checkpoint = torch.load(args.restore_path)\n        classifier = copy_parameters(classifier, checkpoint, verbose=True)\n        MyLogger.logger.info('Use pre-trained weights from %s' % args.restore_path)\n    else:\n        MyLogger.logger.info('No pre-trained weights, start training from scratch...')\n        if args.xavier_init:\n            classifier = classifier.apply(weights_init)\n            MyLogger.logger.info(\"Using Xavier weight initialisation\")\n\n    if args.optimizer == 'adam':\n        optimizer = torch.optim.Adam(\n            classifier.parameters(),\n            lr=args.lr,\n            betas=(0.9, 0.999),\n            eps=1e-08,\n            weight_decay=1e-4)\n        MyLogger.logger.info(\"Using Adam optimiser\")\n    else:\n        optimizer = torch.optim.SGD(\n            classifier.parameters(),\n            lr=args.lr * 100,\n            momentum=args.momentum)\n        MyLogger.logger.info(\"Using SGD optimiser\")\n    # using the similar lr decay setting from\n    # https://github.com/charlesq34/pointnet/blob/master/sem_seg/train.py\n    # scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=40, gamma=0.5)\n\n    if args.scheduler == 'cos':\n        scheduler = CosineAnnealingLR(optimizer, T_max=args.epoch, eta_min=1e-3)\n    else:\n        scheduler = StepLR(optimizer, step_size=args.step_size, gamma=args.lr_decay)\n\n    LEARNING_RATE_CLIP = 1e-5\n    MOMENTUM_ORIGINAL = 0.1\n    MOMENTUM_DECAY = 0.5\n    MOMENTUM_DECAY_STEP = args.step_size\n\n    ''' === Testing then Exit === '''\n    if args.mode == 'test':\n        with torch.no_grad():\n            classifier.eval()\n            MyLogger.epoch_init(training=False)\n\n            for points, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n                points, target = points.transpose(2, 1).float().cuda(), target.view(-1, 1)[:, 0].long().cuda()\n                if args.model == 'pointnet_semseg':\n                    seg_pred, trans_feat = classifier(points)\n                    seg_pred = seg_pred.contiguous().view(-1, NUM_CLASSES)\n                    loss = criterion(seg_pred, target, trans_feat)\n                else:\n                    seg_pred = classifier(points)\n                    seg_pred = seg_pred.contiguous().view(-1, NUM_CLASSES)\n                    loss = criterion(seg_pred, target)\n                MyLogger.step_update(seg_pred.data.max(1)[1].cpu().numpy(),\n                                     target.long().cpu().numpy(),\n                                     loss.cpu().detach().numpy())\n\n            MyLogger.epoch_summary(writer=writer, training=False, mode='semseg')\n        sys.exit(\"Test Finished\")\n\n    for epoch in range(MyLogger.epoch, args.epoch + 1):\n\n        ''' === Training === '''\n        # scheduler.step()\n        MyLogger.epoch_init()\n\n        for points, target in tqdm(trainDataLoader, total=len(trainDataLoader), smoothing=0.9):\n            writer.add_scalar('learning rate', scheduler.get_lr()[-1], MyLogger.step)\n            points, target = points.float().transpose(2, 1).cuda(), target.view(-1, 1)[:, 0].long().cuda()\n\n            classifier.train()\n            optimizer.zero_grad()\n            # pdb.set_trace()\n            if args.model == 'pointnet_semseg':\n                seg_pred, trans_feat = classifier(points)\n                seg_pred = seg_pred.contiguous().view(-1, NUM_CLASSES)\n                loss = criterion(seg_pred, target, trans_feat)\n            else:\n                seg_pred = classifier(points)\n                seg_pred = seg_pred.contiguous().view(-1, NUM_CLASSES)\n                loss = criterion(seg_pred, target)\n\n            loss.backward()\n            optimizer.step()\n\n            MyLogger.step_update(seg_pred.data.max(1)[1].cpu().numpy(),\n                                 target.long().cpu().numpy(),\n                                 loss.cpu().detach().numpy())\n        MyLogger.epoch_summary(writer=writer, training=True, mode='semseg')\n\n        '''=== Evaluating ==='''\n        with torch.no_grad():\n            classifier.eval()\n            MyLogger.epoch_init(training=False)\n\n            for points, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n                points, target = points.transpose(2, 1).float().cuda(), target.view(-1, 1)[:, 0].long().cuda()\n                if args.model == 'pointnet_semseg':\n                    seg_pred, trans_feat = classifier(points)\n                    seg_pred = seg_pred.contiguous().view(-1, NUM_CLASSES)\n                    loss = criterion(seg_pred, target, trans_feat)\n                else:\n                    seg_pred = classifier(points)\n                    seg_pred = seg_pred.contiguous().view(-1, NUM_CLASSES)\n                    loss = criterion(seg_pred, target)\n                MyLogger.step_update(seg_pred.data.max(1)[1].cpu().numpy(),\n                                     target.long().cpu().numpy(),\n                                     loss.cpu().detach().numpy())\n\n            MyLogger.epoch_summary(writer=writer, training=False, mode='semseg')\n            if MyLogger.save_model:\n                state = {\n                    'step': MyLogger.step,\n                    'miou': MyLogger.best_miou,\n                    'epoch': MyLogger.best_miou_epoch,\n                    'model_state_dict': classifier.state_dict(),\n                    'optimizer_state_dict': optimizer.state_dict(),\n                }\n                torch.save(state, MyLogger.savepath)\n\n        scheduler.step()\n        if args.scheduler == 'step':\n            for param_group in optimizer.param_groups:\n                if optimizer.param_groups[0]['lr'] < LEARNING_RATE_CLIP:\n                    param_group['lr'] = LEARNING_RATE_CLIP\n        if args.bn_decay:\n            momentum = MOMENTUM_ORIGINAL * (MOMENTUM_DECAY ** (epoch // MOMENTUM_DECAY_STEP))\n            if momentum < 0.01:\n                momentum = 0.01\n            print('BN momentum updated to: %f' % momentum)\n            classifier = classifier.apply(lambda x: bn_momentum_adjust(x, momentum))\n\n    MyLogger.train_summary(mode='semseg')\n\n\nif __name__ == '__main__':\n    args = parse_args()\n    main(args)\n"
  },
  {
    "path": "OcCo_Torch/train_svm.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://scikit-learn.org/stable/modules/svm.html\n#  Ref: https://scikit-learn.org/stable/modules/classes.html#module-sklearn.model_selection\n\nimport os, sys, torch, argparse, datetime, importlib, numpy as np\nsys.path.append('utils')\nsys.path.append('models')\nfrom sklearn.model_selection import GridSearchCV, RandomizedSearchCV\nfrom ModelNetDataLoader import General_CLSDataLoader_HDF5\nfrom Torch_Utility import copy_parameters\n# from sklearn.preprocessing import scale\nfrom torch.utils.data import DataLoader\nfrom Dataset_Loc import Dataset_Loc\nfrom sklearn import svm, metrics\nfrom tqdm import tqdm\n\n\ndef parse_args():\n    parser = argparse.ArgumentParser('SVM on Point Cloud Classification')\n\n    ''' === Network Model === '''\n    parser.add_argument('--gpu', type=str, default='0', help='GPU [default: 0]')\n    parser.add_argument('--model', default='pcn_util', help='model [default: pcn_util]')\n    parser.add_argument('--batch_size', type=int, default=24, help='batch size [default: 24]')\n    parser.add_argument('--restore_path', type=str, help=\"path to pre-trained weights [default: None]\")\n    parser.add_argument('--grid_search', action='store_true', help='opt parameters via Grid Search [default: False]')\n\n    ''' === Dataset === '''\n    parser.add_argument('--partial', action='store_true', help='partial objects [default: False]')\n    parser.add_argument('--bn', action='store_true', help='with background noise [default: False]')\n    parser.add_argument('--dataset', type=str, default='modelnet40', help='dataset [default: modelnet40]')\n    parser.add_argument('--fname', type=str, default=\"\", help='filename, used in ScanObjectNN [default: ]')\n\n    return parser.parse_args()\n\n\nif __name__ == \"__main__\":\n    args = parse_args()\n\n    os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n    os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu\n\n    _, TRAIN_FILES, TEST_FILES = Dataset_Loc(dataset=args.dataset, fname=args.fname,\n                                             partial=args.partial, bn=args.bn)\n    TRAIN_DATASET = General_CLSDataLoader_HDF5(file_list=TRAIN_FILES)\n    TEST_DATASET = General_CLSDataLoader_HDF5(file_list=TEST_FILES)\n    trainDataLoader = DataLoader(TRAIN_DATASET, batch_size=args.batch_size, shuffle=True, num_workers=4)\n    testDataLoader = DataLoader(TEST_DATASET, batch_size=args.batch_size, shuffle=Falses, num_workers=4)\n\n    MODEL = importlib.import_module(args.model)\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    encoder = MODEL.encoder(args=args, num_channel=3).to(device)\n    encoder = torch.nn.DataParallel(encoder)\n\n    checkpoint = torch.load(args.restore_path)\n    encoder = copy_parameters(encoder, checkpoint, verbose=True)\n\n    X_train, y_train, X_test, y_test = [], [], [], []\n    with torch.no_grad():\n        encoder.eval()\n\n        for points, target in tqdm(trainDataLoader, total=len(trainDataLoader), smoothing=0.9):\n            points, target = points.float().transpose(2, 1).cuda(), target.long().cuda()\n            feats = encoder(points)\n            X_train.append(feats.cpu().numpy())\n            y_train.append(target.cpu().numpy())\n\n        for points, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n            points, target = points.float().transpose(2, 1).cuda(), target.long().cuda()\n            feats = encoder(points)\n            X_test.append(feats.cpu().numpy())\n            y_test.append(target.cpu().numpy())\n\n    X_train, y_train = np.concatenate(X_train), np.concatenate(y_train)\n    X_test, y_test = np.concatenate(X_test), np.concatenate(y_test)\n\n    # Optional: Standardize the Feature Space\n    # X_train, X_test = scale(X_train), scale(X_test)\n\n    ''' === Simple Trial === '''\n    linear_svm = svm.SVC(kernel='linear')\n    linear_svm.fit(X_train, y_train)\n    y_pred = linear_svm.predict(X_test)\n    print(\"\\n\", \"Simple Linear SVC accuracy:\", metrics.accuracy_score(y_test, y_pred), \"\\n\")\n\n    rbf_svm = svm.SVC(kernel='rbf')\n    rbf_svm.fit(X_train, y_train)\n    y_pred = rbf_svm.predict(X_test)\n    print(\"Simple RBF SVC accuracy:\", metrics.accuracy_score(y_test, y_pred), \"\\n\")\n\n    ''' === Grid Search for SVM with RBF Kernel === '''\n    if not args.grid_search:\n        sys.exit()\n    print(\"Now we use Grid Search to opt the parameters for SVM RBF kernel\")\n    # [1e-3, 5e-3, 1e-2, ..., 5e1]\n    gamma_range = np.outer(np.logspace(-3, 1, 5), np.array([1, 5])).flatten()\n    # [1e-1, 5e-1, 1e0, ..., 5e1]\n    C_range = np.outer(np.logspace(-1, 1, 3), np.array([1, 5])).flatten()\n    parameters = {'kernel': ['rbf'], 'C': C_range, 'gamma': gamma_range}\n\n    svm_clsf = svm.SVC()\n    grid_clsf = GridSearchCV(estimator=svm_clsf, param_grid=parameters, n_jobs=8, verbose=1)\n\n    start_time = datetime.datetime.now()\n    print('Start Param Searching at {}'.format(str(start_time)))\n    grid_clsf.fit(X_train, y_train)\n    print('Elapsed time, param searching {}'.format(str(datetime.datetime.now() - start_time)))\n    sorted(grid_clsf.cv_results_.keys())\n\n    # scores = grid_clsf.cv_results_['mean_test_score'].reshape(len(C_range), len(gamma_range))\n    y_pred = grid_clsf.best_estimator_.predict(X_test)\n    print(\"\\n\\n\")\n    print(\"=\"*37)\n    print(\"Best Params via Grid Search Cross Validation on Train Split is: \", grid_clsf.best_params_)\n    print(\"Best Model's Accuracy on Test Dataset: {}\".format(metrics.accuracy_score(y_test, y_pred)))\n"
  },
  {
    "path": "OcCo_Torch/utils/3DPC_Data_Gen.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n#  Generating Training Data of 3D Point Cloud for 3D Jigsaw Puzzles\n\nimport os, h5py, numpy as np\n\n'''\nThe 3D object/block is split into voxels along axes, \neach point is assigned with a voxel label.\n'''\n\n\ndef pc_ssl_3djigsaw_gen(pc_xyz, k=2, edge_len=1):\n    \"\"\"\n    :param pc_xyz: point cloud, (n_point, 3 + additional feature)\n    :param k: number of voxels along each axis\n    :param edge_len: length of voxel (cube) edge\n    :return: permuted pc, labels\n    \"\"\"\n    intervals = [edge_len*2 / k * x - edge_len for x in np.arange(k + 1)]\n    assert edge_len >= pc_xyz.__abs__().max()\n    indices = np.searchsorted(intervals, pc_xyz, side='left') - 1\n    label = indices[:, 0] * k ** 2 + indices[:, 1] * k + indices[:, 2]\n\n    shuffle_indices = np.arange(k ** 3)\n    np.random.shuffle(shuffle_indices)\n    shuffled_dict = dict()\n    for i, d in enumerate(shuffle_indices):\n        shuffled_dict[i] = d\n\n    def numberToBase(n, base=k):\n        if n == 0:\n            return [0]\n        digits = []\n        while n:\n            digits.append(str(int(n % base)))\n            n //= base\n        return int(\"\".join(digits[::-1]))\n\n    for voxel_id in range(k ** 3):\n        selected_points = (label == voxel_id)\n        permutated_places = shuffled_dict[voxel_id]\n        loc = permutated_places\n        center_diff = np.array([(loc // k ** 2) - (voxel_id // k ** 2),\n                                (loc // k ** 2) // k - (voxel_id // k ** 2) // k,\n                                loc % k - voxel_id % k]) * (2 * edge_len)/k  # + const - edge_len\n        pc_xyz[selected_points] = pc_xyz[selected_points] + center_diff\n\n    return pc_xyz, label\n\n\nif __name__ == \"__main__\":\n    root_dir = r'./data/modelnet40_ply_hdf5_2048'\n    dir_path = r'./data/modelnet40_ply_hdf5_2048/jigsaw/k2'\n    os.mkdir(dir_path) if not os.path.exists(dir_path) else None\n\n    TRAIN_FILES = [item.strip() for item in open(os.path.join(root_dir, 'train_files.txt')).readlines()]\n    VALID_FILES = [item.strip() for item in open(os.path.join(root_dir, 'test_files.txt')).readlines()]\n\n\n    def loadh5DataFile(PathtoFile):\n        f = h5py.File(PathtoFile, 'r')\n        return f['data'][:], f['label'][:]\n\n\n    def reduce2fix(pc, n_points=1024):\n        indices = np.arange(len(pc))\n        np.random.shuffle(indices)\n        return pc[indices[:n_points]]\n\n\n    for file_ in VALID_FILES:\n        filename = file_.split('/')[-1]\n        print(filename)\n        data, _ = loadh5DataFile(file_)\n        # subsample all point clouds into 1024 points of each\n        data = np.apply_along_axis(reduce2fix, axis=1, arr=data)\n        shuffled_data = np.zeros_like(data)\n        shuffled_label = np.zeros((data.shape[0], data.shape[1]))\n        for idx, pc_xyz in enumerate(data):\n            pc_xyz, label = pc_ssl_3djigsaw_gen(pc_xyz, k=2, edge_len=1)\n            shuffled_data[idx] = pc_xyz\n            shuffled_label[idx] = label\n        hf = h5py.File(os.path.join(dir_path, filename), 'w')\n\n        hf.create_dataset('label', data=shuffled_label)\n        hf.create_dataset('data', data=shuffled_data)\n        hf.close()\n"
  },
  {
    "path": "OcCo_Torch/utils/Dataset_Loc.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hc.wang96@gmail.com\n#  Modify the path w.r.t your own settings\n\n\ndef Dataset_Loc(dataset, fname, partial=True, bn=False, few_shot=False):\n    def fetch_files(filelist):\n        return [item.strip() for item in open(filelist).readlines()]\n\n    dataset = dataset.lower()\n\n    if dataset == 'shapenet8':\n        NUM_CLASSES = 8\n        if partial:\n            TRAIN_FILES = fetch_files('./data/shapenet/hdf5_partial_1024/train_file.txt')\n            VALID_FILES = fetch_files('./data/shapenet/hdf5_partial_1024/valid_file.txt')\n        else:\n            raise ValueError(\"For ShapeNet we are only interested in the partial objects recognition\")\n\n    elif dataset == 'shapenet10':\n        NUM_CLASSES = 10\n        TRAIN_FILES = fetch_files('./data/ShapeNet10/Cleaned/train_file.txt')\n        VALID_FILES = fetch_files('./data/ShapeNet10/Cleaned/test_file.txt')\n\n    # elif dataset == 'modelnet10':\n    # \tNUM_CLASSES = 10\n    # \tTRAIN_FILES = fetch_files('./data/ModelNet10/Cleaned/train_file.txt')\n    # \tVALID_FILES = fetch_files('./data/ModelNet10/Cleaned/test_file.txt')\n\n    elif dataset == 'modelnet40':\n        '''Actually we find that using data from PointNet++: '''\n        NUM_CLASSES = 40\n        if partial:\n            TRAIN_FILES = fetch_files('./data/modelnet40_pcn/hdf5_partial_1024/train_file.txt')\n            VALID_FILES = fetch_files('./data/modelnet40_pcn/hdf5_partial_1024/test_file.txt')\n        else:\n            VALID_FILES = fetch_files('./data/modelnet40_ply_hdf5_2048/test_files.txt')\n            if few_shot:\n                TRAIN_FILES = fetch_files('./data/modelnet40_ply_hdf5_2048/few_labels/%s.h5' % fname)\n            else:\n                TRAIN_FILES = fetch_files('./data/modelnet40_ply_hdf5_2048/train_files.txt')\n\n    elif dataset == 'scannet10':\n        NUM_CLASSES = 10\n        TRAIN_FILES = fetch_files('./data/ScanNet10/ScanNet_Cleaned/train_file.txt')\n        VALID_FILES = fetch_files('./data/ScanNet10/ScanNet_Cleaned/test_file.txt')\n\n    elif dataset == 'scanobjectnn':\n        NUM_CLASSES = 15\n        if bn:\n            TRAIN_FILES = ['./data/ScanNetObjectNN/h5_files/main_split/training_objectdataset' + fname + '_1024.h5']\n            VALID_FILES = ['./data/ScanNetObjectNN/h5_files/main_split/test_objectdataset' + fname + '_1024.h5']\n\n        else:\n            TRAIN_FILES = ['./data/ScanNetObjectNN/h5_files/main_split_nobg/training_objectdataset' + fname + '_1024.h5']\n            VALID_FILES = ['./data/ScanNetObjectNN/h5_files/main_split_nobg/test_objectdataset' + fname + '_1024.h5']\n\n    else:\n        raise ValueError('dataset not exists')\n\n    return NUM_CLASSES, TRAIN_FILES, VALID_FILES\n"
  },
  {
    "path": "OcCo_Torch/utils/Inference_Timer.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport os, torch, time, numpy as np\n\nclass Inference_Timer:\n    def __init__(self, args):\n        self.args = args\n        self.est_total = []\n        self.use_cpu = True if (self.args.gpu == 'None') else False\n        self.device = 'CPU' if self.use_cpu else 'GPU'\n        if self.use_cpu:\n            os.environ['OMP_NUM_THREADS'] = \"1\"\n            os.environ['MKL_NUM_THREADS'] = \"1\"\n            print('Now we calculate the inference time on a single CPU')\n        else:\n            print('Now we calculate the inference time on a single GPU')\n        self.args.batch_size, self.args.epoch = 2, 1\n    #  1D BatchNorm requires more than 1 sample to compute std\n    #  ref: https://github.com/pytorch/pytorch/issues/7716\n    #  otherwise remove the 1D BatchNorm,\n    #  since its contribution to the inference is negligible\n    #  ref:\n\n    def update_args(self):\n        return self.args\n\n    def single_step(self, model, data):\n        if not self.use_cpu:\n            torch.cuda.synchronize()\n        start = time.time()\n        output = model(data)\n        if not self.use_cpu:\n            torch.cuda.synchronize()\n        end = time.time()\n        self.est_total.append(end - start)\n        return output\n\n    def update_single_epoch(self, logger):\n        logger.info(\"Model: {}\".format(self.args.model))\n        logger.info(\"Average Inference Time Per Example on Single {}: {:.3f} milliseconds\".format(\n            self.device, np.mean(self.est_total)*1000))\n"
  },
  {
    "path": "OcCo_Torch/utils/LMDB_DataFlow.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/wentaoyuan/pcn/blob/master/data_util.py\n\n\nimport numpy as np\nfrom tensorpack import dataflow\n\n\ndef resample_pcd(pcd, n):\n    \"\"\"drop or duplicate points so that input of each object has exactly n points\"\"\"\n    idx = np.random.permutation(pcd.shape[0])\n    if idx.shape[0] < n:\n        idx = np.concatenate([idx, np.random.randint(pcd.shape[0], size=n-pcd.shape[0])])\n    return pcd[idx[:n]]\n\n\nclass PreprocessData(dataflow.ProxyDataFlow):\n\n    def __init__(self, ds, input_size, output_size):\n        super(PreprocessData, self).__init__(ds)\n        self.input_size = input_size\n        self.output_size = output_size\n\n    def get_data(self):\n        for id, input, gt in self.ds.get_data():\n            input = resample_pcd(input, self.input_size)\n            gt = resample_pcd(gt, self.output_size)\n            yield id, input, gt\n\n\nclass BatchData(dataflow.ProxyDataFlow):\n    def __init__(self, ds, batch_size, input_size, gt_size, remainder=False, use_list=False):\n        super(BatchData, self).__init__(ds)\n        self.batch_size = batch_size\n        self.input_size = input_size\n        self.gt_size = gt_size\n        self.remainder = remainder\n        self.use_list = use_list\n\n    def __len__(self):\n        \"\"\"get the number of batches\"\"\"\n        ds_size = len(self.ds)\n        div = ds_size // self.batch_size\n        rem = ds_size % self.batch_size\n        if rem == 0:\n            return div\n        return div + int(self.remainder)  # int(False) == 0\n\n    def __iter__(self):\n        \"\"\"generating data in batches\"\"\"\n        holder = []\n        for data in self.ds:\n            holder.append(data)\n            if len(holder) == self.batch_size:\n                yield self._aggregate_batch(holder, self.use_list)\n                del holder[:]  # reset holder as empty list => holder = []\n        if self.remainder and len(holder) > 0:\n            yield self._aggregate_batch(holder, self.use_list)\n\n    def _aggregate_batch(self, data_holder, use_list=False):\n        \"\"\"\n        Concatenate input points along the 0-th dimension\n            Stack all other data along the 0-th dimension\n        \"\"\"\n        ids = np.stack([x[0] for x in data_holder])\n        inputs = [resample_pcd(x[1], self.input_size) if x[1].shape[0] > self.input_size else x[1]\n                  for x in data_holder]\n        inputs = np.expand_dims(np.concatenate([x for x in inputs]), 0).astype(np.float32)\n        npts = np.stack([x[1].shape[0] if x[1].shape[0] < self.input_size else self.input_size\n                         for x in data_holder]).astype(np.int32)\n        gts = np.stack([resample_pcd(x[2], self.gt_size) for x in data_holder]).astype(np.float32)\n        return ids, inputs, npts, gts\n\n\ndef lmdb_dataflow(lmdb_path, batch_size, input_size, output_size, is_training, test_speed=False):\n    \"\"\" Load LMDB Files, then Generate Training Batches\"\"\"\n    df = dataflow.LMDBSerializer.load(lmdb_path, shuffle=False)\n    size = df.size()\n    if is_training:\n        df = dataflow.LocallyShuffleData(df, buffer_size=2000)  # buffer_size\n        df = dataflow.PrefetchData(df, num_prefetch=500, num_proc=1)  # multiprocess the data\n    df = BatchData(df, batch_size, input_size, output_size)\n    if is_training:\n        df = dataflow.PrefetchDataZMQ(df, num_proc=8)\n    df = dataflow.RepeatedData(df, -1)\n    if test_speed:\n        dataflow.TestDataSpeed(df, size=1000).start()\n    df.reset_state()\n    return df, size\n"
  },
  {
    "path": "OcCo_Torch/utils/LMDB_Writer.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hw501@cam.ac.uk\n\nimport os, argparse, numpy as np\nfrom tensorpack import DataFlow, dataflow\nfrom open3d.open3d.io import read_triangle_mesh, read_point_cloud\n\n\ndef sample_from_mesh(filename, num_samples=16384):\n    pcd = read_triangle_mesh(filename).sample_points_uniformly(number_of_points=num_samples)\n    return np.array(pcd.points)\n\n\nclass pcd_df(DataFlow):\n    def __init__(self, model_list, num_scans, partial_dir, complete_dir, num_partial_points=1024):\n        self.model_list = [_file for _file in model_list if 'train' in _file]\n        self.num_scans = num_scans\n        self.partial_dir = partial_dir\n        self.complete_dir = complete_dir\n        self.num_ppoints = num_partial_points\n\n    def size(self):\n        return len(self.model_list) * self.num_scans\n\n    @staticmethod\n    def read_pcd(filename):\n        pcd = read_point_cloud(filename)\n        return np.array(pcd.points)\n\n    def get_data(self):\n        for model_id in self.model_list:\n            complete = sample_from_mesh(os.path.join(self.complete_dir, '%s.obj' % model_id))\n            for i in range(self.num_scans):\n                partial = self.read_pcd(os.path.join(self.partial_dir, model_id + '_%d.pcd' % i))\n                partial = partial[np.random.choice(len(partial), self.num_ppoints)]\n                yield model_id.replace('/', '_'), partial, complete\n\n\nif __name__ == '__main__':\n    parser = argparse.ArgumentParser()\n    parser.add_argument('--list_path', default=r'../render/ModelNet_flist_normalised.txt')\n    parser.add_argument('--num_scans', type=int, default=10)\n    parser.add_argument('--partial_dir', default=r'../render/dump_modelnet_normalised_supercoarse/pcd')\n    parser.add_argument('--complete_dir', default=r'../data/ModelNet40')\n    parser.add_argument('--output_file', default=r'../data/ModelNet40_train_1024_supercoarse.lmdb')\n    args = parser.parse_args()\n\n    with open(args.list_path) as file:\n        model_list = file.read().splitlines()\n    df = pcd_df(model_list, args.num_scans, args.partial_dir, args.complete_dir)\n    if os.path.exists(args.output_file):\n        os.system('rm %s' % args.output_file)\n    dataflow.LMDBSerializer.save(df, args.output_file)\n"
  },
  {
    "path": "OcCo_Torch/utils/ModelNetDataLoader.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\nimport os, torch, h5py, warnings, numpy as np\nfrom torch.utils.data import Dataset\n\nwarnings.filterwarnings('ignore')\n\n\ndef pc_normalize(pc):\n    centroid = np.mean(pc, axis=0)\n    pc -= centroid\n    m = np.max(np.sqrt(np.sum(pc ** 2, axis=1)))\n    pc = pc / m\n    return pc\n\n\ndef farthest_point_sample(point, npoint):\n    \"\"\"\n    Input:\n        xyz: point cloud data, [N, D]\n        npoint: number of samples\n    Return:\n        centroids: sampled point cloud index, [npoint, D]\n    \"\"\"\n    N, D = point.shape\n    xyz = point[:, :3]\n    centroids = np.zeros((npoint,))\n    distance = np.ones((N,)) * 1e10\n    farthest = np.random.randint(0, N)\n    for i in range(npoint):\n        centroids[i] = farthest\n        centroid = xyz[farthest, :]\n        dist = np.sum((xyz - centroid) ** 2, -1)\n        mask = dist < distance\n        distance[mask] = dist[mask]\n        farthest = np.argmax(distance, -1)\n    point = point[centroids.astype(np.int32)]\n    return point\n\n\nclass ModelNetDataLoader(Dataset):\n    def __init__(self, root, npoint=1024, split='train', uniform=False, normal_channel=True, cache_size=15000):\n        self.root = root\n        self.npoints = npoint\n        self.uniform = uniform\n        self.catfile = os.path.join(self.root, 'modelnet40_shape_names.txt')\n\n        self.cat = [line.rstrip() for line in open(self.catfile)]\n        self.classes = dict(zip(self.cat, range(len(self.cat))))\n        self.normal_channel = normal_channel\n\n        shape_ids = {'train': [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_train.txt'))],\n                     'test': [line.rstrip() for line in open(os.path.join(self.root, 'modelnet40_test.txt'))]}\n\n        assert (split == 'train' or split == 'test')\n        shape_names = ['_'.join(x.split('_')[0:-1]) for x in shape_ids[split]]\n        self.datapath = [(shape_names[i], os.path.join(self.root, shape_names[i], shape_ids[split][i]) + '.txt') for i\n                         in range(len(shape_ids[split]))]\n        print('The size of %s data is %d' % (split, len(self.datapath)))\n\n        self.cache_size = cache_size  # how many data points to cache in memory\n        self.cache = {}  # from index to (point_set, cls) tuple\n\n    def __len__(self):\n        return len(self.datapath)\n\n    def _get_item(self, index):\n        if index in self.cache:\n            point_set, cls = self.cache[index]\n        else:\n            fn = self.datapath[index]\n            cls = self.classes[self.datapath[index][0]]\n            cls = np.array([cls]).astype(np.int32)\n            point_set = np.loadtxt(fn[1], delimiter=',').astype(np.float32)\n            if self.uniform:\n                point_set = farthest_point_sample(point_set, self.npoints)\n            else:\n                point_set = point_set[0:self.npoints, :]\n\n            point_set[:, 0:3] = pc_normalize(point_set[:, 0:3])\n\n            if not self.normal_channel:\n                point_set = point_set[:, 0:3]\n\n            if len(self.cache) < self.cache_size:\n                self.cache[index] = (point_set, cls)\n\n        return point_set, cls\n\n    def __getitem__(self, index):\n        return self._get_item(index)\n\n\nclass General_CLSDataLoader_HDF5(Dataset):\n    def __init__(self, file_list, num_point=1024):\n        self.num_point = num_point\n        self.file_list = file_list\n        self.points_list = np.zeros((1, num_point, 3))\n        self.labels_list = np.zeros((1,))\n\n        for file in self.file_list:\n            data, label = self.loadh5DataFile(file)\n            self.points_list = np.concatenate(\n                [self.points_list, data[:, :self.num_point, :]], axis=0)\n            self.labels_list = np.concatenate([self.labels_list, label.ravel()], axis=0)\n\n        self.points_list = self.points_list[1:]\n        self.labels_list = self.labels_list[1:]\n        assert len(self.points_list) == len(self.labels_list)\n        print('Number of Objects: ', len(self.labels_list))\n\n    @staticmethod\n    def loadh5DataFile(PathtoFile):\n        f = h5py.File(PathtoFile, 'r')\n        return f['data'][:], f['label'][:]\n\n    def __len__(self):\n        return len(self.points_list)\n\n    def __getitem__(self, index):\n        point_xyz = self.points_list[index][:, 0:3]\n        point_label = self.labels_list[index].astype(np.int32)\n        return point_xyz, point_label\n\n\nclass ModelNetJigsawDataLoader(Dataset):\n    def __init__(self, root=r'./data/modelnet40_ply_hdf5_2048/jigsaw',\n                 n_points=1024, split='train', k=3):\n        self.npoints = n_points\n        self.root = root\n        self.split = split\n        assert split in ['train', 'test']\n        if self.split == 'train':\n            self.file_list = [d for d in os.listdir(root) if d.find('train') is not -1]\n        else:\n            self.file_list = [d for d in os.listdir(root) if d.find('test') is not -1]\n        self.points_list = np.zeros((1, n_points, 3))\n        self.labels_list = np.zeros((1, n_points))\n\n        for file in self.file_list:\n            file = os.path.join(root, file)\n            data, label = self.loadh5DataFile(file)\n            self.points_list = np.concatenate([self.points_list, data], axis=0)  # .append(data)\n            self.labels_list = np.concatenate([self.labels_list, label], axis=0)\n        # self.labels_list.append(label)\n\n        self.points_list = self.points_list[1:]\n        self.labels_list = self.labels_list[1:]\n        assert len(self.points_list) == len(self.labels_list)\n        print('Number of %s Objects: '%self.split, len(self.labels_list))\n\n        # just use the simple weights\n        self.labelweights = np.ones(k ** 3)\n\n\n    @staticmethod\n    def loadh5DataFile(PathtoFile):\n        f = h5py.File(PathtoFile, 'r')\n        return f['data'][:], f['label'][:]\n\n    def __getitem__(self, index):\n        point_set = self.points_list[index][:, 0:3]\n        semantic_seg = self.labels_list[index].astype(np.int32)\n        return point_set, semantic_seg\n\n    def __len__(self):\n        return len(self.points_list)\n\n\nif __name__ == '__main__':\n\n    data = ModelNetDataLoader('/data/modelnet40_normal_resampled/', split='train', uniform=False, normal_channel=True, )\n    DataLoader = torch.utils.data.DataLoader(data, batch_size=12, shuffle=True)\n    for point, label in DataLoader:\n        print(point.shape)\n        print(label.shape)\n"
  },
  {
    "path": "OcCo_Torch/utils/PC_Augmentation.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport numpy as np\n\n\"\"\"\n\t================================================\n\t=== Library for Point Cloud Utility Function ===\n\t================================================\n\"\"\"\n\n\ndef pc_normalize(pc):\n    \"\"\" Normalise the Input Point Cloud into a Unit Sphere \"\"\"\n    centroid = np.mean(pc, axis=0)\n    pc = pc - centroid\n    m = np.max(np.sqrt(np.sum(pc ** 2, axis=1)))\n    pc = pc / m\n    return pc\n\n\ndef farthest_point_sample(point, npoint):\n    \"\"\" A Simple Yet Inefficient Farthest Point Sampling on Point Cloud \"\"\"\n    N, D = point.shape\n    xyz = point[:, :3]\n    centroids = np.zeros((npoint,))\n    distance = np.ones((N,)) * 1e10\n    farthest = np.random.randint(0, N)\n    for i in range(npoint):\n        centroids[i] = farthest\n        centroid = xyz[farthest, :]\n        dist = np.sum((xyz - centroid) ** 2, -1)\n        mask = dist < distance\n        distance[mask] = dist[mask]\n        farthest = np.argmax(distance, -1)\n    point = point[centroids.astype(np.int32)]\n    return point\n\n\ndef random_shift_point_cloud(batch_data, shift_range=0.1):\n    \"\"\" Shift the Point Cloud along the XYZ axis, magnitude is randomly sampled from [-0.1, 0.1] \"\"\"\n    B, N, C = batch_data.shape\n    shifts = np.random.uniform(-shift_range, shift_range, (B, 3))\n    for batch_index in range(B):\n        batch_data[batch_index, :, :] += shifts[batch_index, :]\n    return batch_data\n\n\ndef random_scale_point_cloud(batch_data, scale_low=0.8, scale_high=1.25):\n    \"\"\" Scale the Point Cloud Objects into a Random Magnitude between [0.8, 1.25] \"\"\"\n    B, N, C = batch_data.shape\n    scales = np.random.uniform(scale_low, scale_high, B)\n    for batch_index in range(B):\n        batch_data[batch_index, :, :] *= scales[batch_index]\n    return batch_data\n\n\ndef random_point_dropout(batch_pc, max_dropout_ratio=0.875):\n    \"\"\" Randomly Dropout out a Portion of Points, Ratio is Randomly Selected between [0, 0.875]\t\"\"\"\n    for b in range(batch_pc.shape[0]):\n        dropout_ratio = np.random.random() * max_dropout_ratio\n        drop_idx = np.where(np.random.random((batch_pc.shape[1])) <= dropout_ratio)[0]\n        if len(drop_idx) > 0:\n            batch_pc[b, drop_idx, :] = batch_pc[b, 0, :]  # set the rest as the first point\n    return batch_pc\n\n\ndef translate_pointcloud_dgcnn(pointcloud):\n    \"\"\" Random Scaling + Translation, Deprecated \"\"\"\n    xyz1 = np.random.uniform(low=2. / 3., high=3. / 2., size=[3])\n    xyz2 = np.random.uniform(low=-0.2, high=0.2, size=[3])\n    translated_pointcloud = np.add(np.multiply(pointcloud, xyz1), xyz2).astype('float32')\n    return translated_pointcloud\n\n\ndef jitter_pointcloud_dgcnn(pointcloud, sigma=0.01, clip=0.02):\n    \"\"\" Random Jittering, Deprecated \"\"\"\n    N, C = pointcloud.shape\n    pointcloud += np.clip(sigma * np.random.randn(N, C), -1 * clip, clip)\n    return pointcloud\n"
  },
  {
    "path": "OcCo_Torch/utils/S3DISDataLoader.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport os, sys, h5py, numpy as np\nfrom torch.utils.data import Dataset\n\nBASE_DIR = os.path.dirname(os.path.abspath(__file__))\nROOT_DIR = os.path.dirname(BASE_DIR)\nsys.path.append(BASE_DIR)\nsys.path.append(ROOT_DIR)\nroot = '../data/stanford_indoor3d/'\n\n# 13 classes, as noted in the meta/s3dis/class_names.txt\nnum_per_class = np.array([3370714, 2856755, 4919229, 318158, 375640, 478001, 974733,\n                          650464, 791496, 88727, 1284130, 229758, 2272837], dtype=np.int32)\nnum_per_class_dict = {}\nfor cls, num_cls in enumerate(num_per_class):\n    num_per_class_dict[cls] = num_cls\n\n\nclass S3DISDataset_HDF5(Dataset):\n    \"\"\"Chopped Scene\"\"\"\n\n    def __init__(self, root='data/indoor3d_sem_seg_hdf5_data', split='train', test_area=5):\n        self.root = root\n        self.all_files = self.getDataFiles(os.path.join(self.root, 'all_files.txt'))\n        self.room_filelist = self.getDataFiles(os.path.join(self.root, 'room_filelist.txt'))\n        self.scene_points_list = []\n        self.semantic_labels_list = []\n\n        for h5_filename in self.all_files:\n            data_batch, label_batch = self.loadh5DataFile(h5_filename)\n            self.scene_points_list.append(data_batch)\n            self.semantic_labels_list.append(label_batch)\n\n        self.data_batches = np.concatenate(self.scene_points_list, 0)\n        self.label_batches = np.concatenate(self.semantic_labels_list, 0)\n\n        test_area = 'Area_' + str(test_area)\n        train_idxs, test_idxs = [], []\n\n        for i, room_name in enumerate(self.room_filelist):\n            if test_area in room_name:\n                test_idxs.append(i)\n            else:\n                train_idxs.append(i)\n\n        assert split in ['train', 'test']\n        if split == 'train':\n            self.data_batches = self.data_batches[train_idxs, ...]\n            self.label_batches = self.label_batches[train_idxs]\n        else:\n            self.data_batches = self.data_batches[test_idxs, ...]\n            self.label_batches = self.label_batches[test_idxs]\n\n    @staticmethod\n    def getDataFiles(list_filename):\n        return [line.rstrip() for line in open(list_filename)]\n\n    @staticmethod\n    def loadh5DataFile(PathtoFile):\n        f = h5py.File(PathtoFile, 'r')\n        return f['data'][:], f['label'][:]\n\n    def __getitem__(self, index):\n        points = self.data_batches[index, :]\n        labels = self.label_batches[index].astype(np.int32)\n\n        return points, labels\n\n    def __len__(self):\n        return len(self.data_batches)\n\n\nclass S3DISDataset(Dataset):\n    \"\"\"Chopped Scene\"\"\"\n    def __init__(self, root, block_points=4096, split='train', test_area=5, with_rgb=True, use_weight=True,\n                 block_size=1.5, padding=0.001):\n        self.npoints = block_points\n        self.block_size = block_size\n        self.padding = padding\n        self.root = root\n        self.with_rgb = with_rgb\n        self.split = split\n        assert split in ['train', 'test']\n        if self.split == 'train':\n            self.file_list = [d for d in os.listdir(root) if d.find('Area_%d' % test_area) is -1]\n        else:\n            self.file_list = [d for d in os.listdir(root) if d.find('Area_%d' % test_area) is not -1]\n        self.scene_points_list = []\n        self.semantic_labels_list = []\n\n        for file in self.file_list:\n            data = np.load(root + file)\n            self.scene_points_list.append(data[:, :6])  # (num_points, 6), xyz + rgb\n            self.semantic_labels_list.append(data[:, 6])  # (num_points, )\n\n        assert len(self.scene_points_list) == len(self.semantic_labels_list)\n        print('Number of scene: ', len(self.scene_points_list))\n\n        if split == 'train' and use_weight:\n            labelweights = np.zeros(13)\n            for seg in self.semantic_labels_list:\n                tmp, _ = np.histogram(seg, range(14))\n                labelweights += tmp\n            labelweights = labelweights.astype(np.float32)\n            labelweights = labelweights / np.sum(labelweights)\n            self.labelweights = np.power(np.amax(labelweights) / labelweights, 1 / 3.0)\n\n            # reciprocal of the # of class\n            ce_label_weight = 1 / (labelweights + 0.02)\n            self.labelweights = ce_label_weight\n\n        else:\n            self.labelweights = np.ones(13)\n\n        # just use the average weights\n        self.labelweights = np.ones(13)\n        print(self.labelweights)\n\n    def __getitem__(self, index):\n        if self.with_rgb:\n            point_set = self.scene_points_list[index]\n            point_set[:, 3:] = 2 * point_set[:, 3:] / 255.0 - 1  # normalised rgb into [-1, 1]\n        else:\n            point_set = self.scene_points_list[index][:, 0:3]\n        semantic_seg = self.semantic_labels_list[index].astype(np.int32)\n        coordmax = np.max(point_set[:, 0:3], axis=0)\n        coordmin = np.min(point_set[:, 0:3], axis=0)\n\n        isvalid = False\n        for _ in range(10):\n            curcenter = point_set[np.random.choice(len(semantic_seg), 1)[0], 0:3]\n            curmin = curcenter - [self.block_size / 2, self.block_size / 2, 1.5]\n            curmax = curcenter + [self.block_size / 2, self.block_size / 2, 1.5]\n            curmin[2], curmax[2] = coordmin[2], coordmax[2]\n            curchoice = np.sum((point_set[:, 0:3] >= (curmin - 0.2)) * (point_set[:, 0:3] <= (curmax + 0.2)),\n                               axis=1) == 3\n            cur_point_set = point_set[curchoice, 0:3]\n            cur_point_full = point_set[curchoice, :]\n            cur_semantic_seg = semantic_seg[curchoice]\n            if len(cur_semantic_seg) == 0:\n                continue\n            mask = np.sum((cur_point_set >= (curmin - self.padding)) * (cur_point_set <= (curmax + self.padding)),\n                          axis=1) == 3\n            vidx = np.ceil((cur_point_set[mask, :] - curmin) / (curmax - curmin) * [31.0, 31.0, 62.0])\n            vidx = np.unique(vidx[:, 0] * 31.0 * 62.0 + vidx[:, 1] * 62.0 + vidx[:, 2])\n            isvalid = len(vidx) / 31.0 / 31.0 / 62.0 >= 0.02\n            if isvalid:\n                break\n        choice = np.random.choice(len(cur_semantic_seg), self.npoints, replace=True)\n        point_set = cur_point_full[choice, :]\n        semantic_seg = cur_semantic_seg[choice]\n        mask = mask[choice]\n        sample_weight = self.labelweights[semantic_seg]\n        sample_weight *= mask\n        return point_set, semantic_seg, sample_weight\n\n    def __len__(self):\n        return len(self.scene_points_list)\n\n\nclass S3DISDatasetWholeScene:\n    def __init__(self, root, block_points=8192, split='val', test_area=5, with_rgb=True, use_weight=True,\n                 block_size=1.5, stride=1.5, padding=0.001):\n        self.npoints = block_points\n        self.block_size = block_size\n        self.padding = padding\n        self.stride = stride\n        self.root = root\n        self.with_rgb = with_rgb\n        self.split = split\n        assert split in ['train', 'test']\n        if self.split == 'train':\n            self.file_list = [d for d in os.listdir(root) if d.find('Area_%d' % test_area) is -1]\n        else:\n            self.file_list = [d for d in os.listdir(root) if d.find('Area_%d' % test_area) is not -1]\n        self.scene_points_list = []\n        self.semantic_labels_list = []\n        for file in self.file_list:\n            data = np.load(root + file)\n            self.scene_points_list.append(data[:, :6])\n            self.semantic_labels_list.append(data[:, 6])\n        assert len(self.scene_points_list) == len(self.semantic_labels_list)\n        print('Number of scene: ', len(self.scene_points_list))\n        if split == 'train' and use_weight:\n            labelweights = np.zeros(13)\n            for seg in self.semantic_labels_list:\n                tmp, _ = np.histogram(seg, range(14))\n                labelweights += tmp\n            labelweights = labelweights.astype(np.float32)\n            labelweights = labelweights / np.sum(labelweights)\n            self.labelweights = np.power(np.amax(labelweights) / labelweights, 1 / 3.0)\n        else:\n            self.labelweights = np.ones(13)\n\n        print(self.labelweights)\n\n    def __getitem__(self, index):\n        if self.with_rgb:\n            point_set_ini = self.scene_points_list[index]\n            point_set_ini[:, 3:] = 2 * point_set_ini[:, 3:] / 255.0 - 1\n        else:\n            point_set_ini = self.scene_points_list[index][:, 0:3]\n        semantic_seg_ini = self.semantic_labels_list[index].astype(np.int32)\n        coordmax = np.max(point_set_ini[:, 0:3], axis=0)\n        coordmin = np.min(point_set_ini[:, 0:3], axis=0)\n        nsubvolume_x = np.ceil((coordmax[0] - coordmin[0]) / self.block_size).astype(np.int32)\n        nsubvolume_y = np.ceil((coordmax[1] - coordmin[1]) / self.block_size).astype(np.int32)\n        point_sets = list()\n        semantic_segs = list()\n        sample_weights = list()\n        for i in range(nsubvolume_x):\n            for j in range(nsubvolume_y):\n                curmin = coordmin + [i * self.block_size, j * self.block_size, 0]\n                curmax = coordmin + [(i + 1) * self.block_size, (j + 1) * self.block_size, coordmax[2] - coordmin[2]]\n                curchoice = np.sum(\n                    (point_set_ini[:, 0:3] >= (curmin - 0.2)) * (point_set_ini[:, 0:3] <= (curmax + 0.2)), axis=1) == 3\n                cur_point_set = point_set_ini[curchoice, 0:3]\n                cur_point_full = point_set_ini[curchoice, :]\n                cur_semantic_seg = semantic_seg_ini[curchoice]\n                if len(cur_semantic_seg) == 0:\n                    continue\n                mask = np.sum((cur_point_set >= (curmin - self.padding)) * (cur_point_set <= (curmax + self.padding)),\n                              axis=1) == 3\n                choice = np.random.choice(len(cur_semantic_seg), self.npoints, replace=True)\n                point_set = cur_point_full[choice, :]  # Nx3/6\n                semantic_seg = cur_semantic_seg[choice]  # N\n                mask = mask[choice]\n\n                sample_weight = self.labelweights[semantic_seg]\n                sample_weight *= mask  # N\n                point_sets.append(np.expand_dims(point_set, 0))  # 1xNx3\n                semantic_segs.append(np.expand_dims(semantic_seg, 0))  # 1xN\n                sample_weights.append(np.expand_dims(sample_weight, 0))  # 1xN\n        point_sets = np.concatenate(tuple(point_sets), axis=0)\n        semantic_segs = np.concatenate(tuple(semantic_segs), axis=0)\n        sample_weights = np.concatenate(tuple(sample_weights), axis=0)\n        return point_sets, semantic_segs, sample_weights\n\n    def __len__(self):\n        return len(self.scene_points_list)\n\n\nclass ScannetDatasetWholeScene_evaluation:\n    # prepare to give prediction on each points\n    def __init__(self, root=root, block_points=8192, split='test', test_area=5, with_rgb=True, use_weight=True,\n                 stride=0.5, block_size=1.5, padding=0.001):\n        self.block_points = block_points\n        self.block_size = block_size\n        self.padding = padding\n        self.root = root\n        self.with_rgb = with_rgb\n        self.split = split\n        self.stride = stride\n        self.scene_points_num = []\n        assert split in ['train', 'test']\n        if self.split == 'train':\n            self.file_list = [d for d in os.listdir(root) if d.find('Area_%d' % test_area) is -1]\n        else:\n            self.file_list = [d for d in os.listdir(root) if d.find('Area_%d' % test_area) is not -1]\n        self.scene_points_list = []\n        self.semantic_labels_list = []\n        for file in self.file_list:\n            data = np.load(root + file)\n            self.scene_points_list.append(data[:, :6])\n            self.semantic_labels_list.append(data[:, 6])\n        assert len(self.scene_points_list) == len(self.semantic_labels_list)\n        print('Number of scene: ', len(self.scene_points_list))\n        if split == 'train' and use_weight:\n            labelweights = np.zeros(13)\n            for seg in self.semantic_labels_list:\n                tmp, _ = np.histogram(seg, range(14))\n                self.scene_points_num.append(seg.shape[0])\n                labelweights += tmp\n            labelweights = labelweights.astype(np.float32)\n            labelweights = labelweights / np.sum(labelweights)\n            self.labelweights = np.power(np.amax(labelweights) / labelweights, 1 / 3.0)\n        else:\n            self.labelweights = np.ones(13)\n            for seg in self.semantic_labels_list:\n                self.scene_points_num.append(seg.shape[0])\n\n        print(self.labelweights)\n\n    @staticmethod\n    def chunks(l, n):\n        \"\"\"Yield successive n-sized chunks from l.\"\"\"\n        for i in range(0, len(l), n):\n            yield l[i:i + n]\n\n    @staticmethod\n    def split_data(data, idx):\n        new_data = []\n        for i in range(len(idx)):\n            new_data += [np.expand_dims(data[idx[i]], axis=0)]\n        return new_data\n\n    @staticmethod\n    def nearest_dist(block_center, block_center_list):\n        num_blocks = len(block_center_list)\n        dist = np.zeros(num_blocks)\n        for i in range(num_blocks):\n            dist[i] = np.linalg.norm(block_center_list[i] - block_center, ord=2)  # i->j\n        return np.argsort(dist)[0]\n\n    def __getitem__(self, index):\n        delta = self.stride\n        if self.with_rgb:\n            point_set_ini = self.scene_points_list[index]\n            point_set_ini[:, 3:] = 2 * point_set_ini[:, 3:] / 255.0 - 1\n        else:\n            point_set_ini = self.scene_points_list[index][:, 0:3]\n        semantic_seg_ini = self.semantic_labels_list[index].astype(np.int32)\n        coordmax = np.max(point_set_ini[:, 0:3], axis=0)\n        coordmin = np.min(point_set_ini[:, 0:3], axis=0)\n        nsubvolume_x = np.ceil((coordmax[0] - coordmin[0]) / delta).astype(np.int32)\n        nsubvolume_y = np.ceil((coordmax[1] - coordmin[1]) / delta).astype(np.int32)\n\n        point_sets, semantic_segs, sample_weights, point_idxs, block_center = [], [], [], [], []\n        for i in range(nsubvolume_x):\n            for j in range(nsubvolume_y):\n                curmin = coordmin + [i * delta, j * delta, 0]\n                curmax = curmin + [self.block_size, self.block_size, coordmax[2] - coordmin[2]]\n                curchoice = np.sum(\n                    (point_set_ini[:, 0:3] >= (curmin - 0.2)) * (point_set_ini[:, 0:3] <= (curmax + 0.2)), axis=1) == 3\n                curchoice_idx = np.where(curchoice)[0]\n                cur_point_set = point_set_ini[curchoice, :]\n                cur_semantic_seg = semantic_seg_ini[curchoice]\n                if len(cur_semantic_seg) == 0:\n                    continue\n                mask = np.sum((cur_point_set[:, 0:3] >= (curmin - self.padding)) * (\n                        cur_point_set[:, 0:3] <= (curmax + self.padding)), axis=1) == 3\n                sample_weight = self.labelweights[cur_semantic_seg]\n                sample_weight *= mask  # N\n                point_sets.append(cur_point_set)  # 1xNx3/6\n                semantic_segs.append(cur_semantic_seg)  # 1xN\n                sample_weights.append(sample_weight)  # 1xN\n                point_idxs.append(curchoice_idx)  # 1xN\n                block_center.append((curmin[0:2] + curmax[0:2]) / 2.0)\n\n        # merge small blocks\n        num_blocks = len(point_sets)\n        block_idx = 0\n        while block_idx < num_blocks:\n            if point_sets[block_idx].shape[0] > self.block_points / 2:\n                block_idx += 1\n                continue\n\n            small_block_data = point_sets[block_idx].copy()\n            small_block_seg = semantic_segs[block_idx].copy()\n            small_block_smpw = sample_weights[block_idx].copy()\n            small_block_idxs = point_idxs[block_idx].copy()\n            small_block_center = block_center[block_idx].copy()\n            point_sets.pop(block_idx)\n            semantic_segs.pop(block_idx)\n            sample_weights.pop(block_idx)\n            point_idxs.pop(block_idx)\n            block_center.pop(block_idx)\n\n            nearest_block_idx = self.nearest_dist(small_block_center, block_center)\n            point_sets[nearest_block_idx] = np.concatenate(\n                (point_sets[nearest_block_idx], small_block_data), axis=0)\n            semantic_segs[nearest_block_idx] = np.concatenate(\n                (semantic_segs[nearest_block_idx], small_block_seg), axis=0)\n            sample_weights[nearest_block_idx] = np.concatenate(\n                (sample_weights[nearest_block_idx], small_block_smpw), axis=0)\n            point_idxs[nearest_block_idx] = np.concatenate((point_idxs[nearest_block_idx], small_block_idxs), axis=0)\n            num_blocks = len(point_sets)\n\n        # divide large blocks\n        num_blocks = len(point_sets)\n        div_blocks = []\n        div_blocks_seg = []\n        div_blocks_smpw = []\n        div_blocks_idxs = []\n        div_blocks_center = []\n        for block_idx in range(num_blocks):\n            cur_num_pts = point_sets[block_idx].shape[0]\n\n            point_idx_block = np.array([x for x in range(cur_num_pts)])\n            if point_idx_block.shape[0] % self.block_points != 0:\n                makeup_num = self.block_points - point_idx_block.shape[0] % self.block_points\n                np.random.shuffle(point_idx_block)\n                point_idx_block = np.concatenate((point_idx_block, point_idx_block[0:makeup_num].copy()))\n\n            np.random.shuffle(point_idx_block)\n\n            sub_blocks = list(self.chunks(point_idx_block, self.block_points))\n\n            div_blocks += self.split_data(point_sets[block_idx], sub_blocks)\n            div_blocks_seg += self.split_data(semantic_segs[block_idx], sub_blocks)\n            div_blocks_smpw += self.split_data(sample_weights[block_idx], sub_blocks)\n            div_blocks_idxs += self.split_data(point_idxs[block_idx], sub_blocks)\n            div_blocks_center += [block_center[block_idx].copy() for _ in range(len(sub_blocks))]\n        div_blocks = np.concatenate(tuple(div_blocks), axis=0)\n        div_blocks_seg = np.concatenate(tuple(div_blocks_seg), axis=0)\n        div_blocks_smpw = np.concatenate(tuple(div_blocks_smpw), axis=0)\n        div_blocks_idxs = np.concatenate(tuple(div_blocks_idxs), axis=0)\n        return div_blocks, div_blocks_seg, div_blocks_smpw, div_blocks_idxs\n\n    def __len__(self):\n        return len(self.scene_points_list)\n\n\nif __name__ == '__main__':\n    data = S3DISDataset_HDF5()\n    for i in range(10):\n        points, labels = data[i]\n        print(points.shape)\n        print(labels.shape)\n\n"
  },
  {
    "path": "OcCo_Torch/utils/ShapeNetDataLoader.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/yanx27/Pointnet_Pointnet2_pytorch/blob/master/data_utils/ShapeNetDataLoader.py\nimport os, json, torch, warnings, numpy as np\nfrom PC_Augmentation import pc_normalize\nfrom torch.utils.data import Dataset\nwarnings.filterwarnings('ignore')\n\n\nclass PartNormalDataset(Dataset):\n    \"\"\"\n    Data Source: https://shapenet.cs.stanford.edu/media/shapenetcore_partanno_segmentation_benchmark_v0_normal.zip\n    \"\"\"\n    def __init__(self, root, num_point=2048, split='train', use_normal=False):\n        self.catfile = os.path.join(root, 'synsetoffset2category.txt')\n        self.use_normal = use_normal\n        self.num_point = num_point\n        self.cache_size = 20000\n        self.datapath = []\n        self.root = root\n        self.cache = {}\n        self.meta = {}\n        self.cat = {}\n\n        with open(self.catfile, 'r') as f:\n            for line in f:\n                ls = line.strip().split()\n                self.cat[ls[0]] = ls[1]\n        # self.cat -> {'class name': syn_id, ...}\n        # self.meta -> {'class name': file list, ...}\n        # self.classes -> {'class name': class id, ...}\n        # self.datapath -> [('class name', single file) , ...]\n        self.classes = dict(zip(self.cat, range(len(self.cat))))\n\n        train_ids = self.read_fns(os.path.join(self.root, 'train_test_split', 'shuffled_train_file_list.json'))\n        test_ids = self.read_fns(os.path.join(self.root, 'train_test_split', 'shuffled_test_file_list.json'))\n        val_ids = self.read_fns(os.path.join(self.root, 'train_test_split', 'shuffled_val_file_list.json'))\n        \n        for item in self.cat:\n            dir_point = os.path.join(self.root, self.cat[item])\n            fns = sorted(os.listdir(dir_point))\n            self.meta[item] = []\n\n            if split is 'trainval':\n                fns = [fn for fn in fns if ((fn[0:-4] in train_ids) or (fn[0:-4] in val_ids))]\n            elif split is 'test':\n                fns = [fn for fn in fns if fn[0:-4] in test_ids]\n            else:\n                print('Unknown split: %s [Option: ]. Exiting...' % split)\n                exit(-1)\n\n            for fn in fns:\n                token = (os.path.splitext(os.path.basename(fn))[0])\n                self.meta[item].append(os.path.join(dir_point, token + '.txt'))\n\n        for item in self.cat:\n            for fn in self.meta[item]:\n                self.datapath.append((item, fn))\n\n        self.seg_classes = {'Earphone': [16, 17, 18], 'Motorbike': [30, 31, 32, 33, 34, 35],\n                            'Rocket': [41, 42, 43], 'Car': [8, 9, 10, 11], 'Laptop': [28, 29],\n                            'Cap': [6, 7], 'Skateboard': [44, 45, 46], 'Lamp': [24, 25, 26, 27],\n                            'Mug': [36, 37], 'Guitar': [19, 20, 21], 'Bag': [4, 5], 'Knife': [22, 23],\n                            'Table': [47, 48, 49], 'Airplane': [0, 1, 2, 3], 'Pistol': [38, 39, 40], \n                            'Chair': [12, 13, 14, 15]}\n\n    @staticmethod\n    def read_fns(path):\n        with open(path, 'r') as file:\n            ids = set([str(d.split('/')[2]) for d in json.load(file)])\n        return ids\n\n    def __getitem__(self, index):\n        if index in self.cache:\n            pts, cls, seg = self.cache[index]\n        else:\n            fn = self.datapath[index]\n            cat, pt = fn[0], np.loadtxt(fn[1]).astype(np.float32)\n            cls = np.array([self.classes[cat]]).astype(np.int32)\n            pts = pt[:, :6] if self.use_normal else pt[:, :3]\n            seg = pt[:, -1].astype(np.int32)\n            if len(self.cache) < self.cache_size:\n                self.cache[index] = (pts, cls, seg)\n\n        choice = np.random.choice(len(seg), self.num_point, replace=True)\n        pts[:, 0:3] = pc_normalize(pts[:, 0:3])\n        pts, seg = pts[choice, :], seg[choice]\n\n        return pts, cls, seg\n\n    def __len__(self):\n        return len(self.datapath)\n\n\nif __name__ == \"__main__\":\n\n    root = '../data/shapenetcore_partanno_segmentation_benchmark_v0_normal/'\n    TRAIN_DATASET = PartNormalDataset(root=root, num_point=2048, split='trainval', use_normal=False)\n    trainDataLoader = torch.utils.data.DataLoader(TRAIN_DATASET, batch_size=24, shuffle=True, num_workers=4)\n\n    for i, data in enumerate(trainDataLoader):\n        points, label, target = data\n\n"
  },
  {
    "path": "OcCo_Torch/utils/TSNE_Visu.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://scikit-learn.org/stable/modules/generated/sklearn.manifold.TSNE.html\n\nimport os, sys, torch, argparse, importlib, numpy as np, matplotlib.pyplot as plt\nsys.path.append('../')\nsys.path.append('../models')\nfrom ModelNetDataLoader import General_CLSDataLoader_HDF5\nfrom Torch_Utility import copy_parameters\nfrom torch.utils.data import DataLoader\nfrom Dataset_Loc import Dataset_Loc\nfrom sklearn.manifold import TSNE\nfrom tqdm import tqdm\n\n\ndef parse_args():\n    parser = argparse.ArgumentParser('SVM on Point Cloud Classification')\n\n    ''' === Network Model === '''\n    parser.add_argument('--gpu', type=str, default='0', help='GPU [default: 0]')\n    parser.add_argument('--model', default='pcn_util', help='model [default: pcn_util]')\n    parser.add_argument('--batch_size', type=int, default=24, help='batch size [default: 24]')\n    parser.add_argument('--restore_path', type=str, help=\"path to pretrained weights [default: None]\")\n\n    ''' === Dataset === '''\n    parser.add_argument('--partial', action='store_true', help='partial objects [default: False]')\n    parser.add_argument('--bn', action='store_true', help='with background noise [default: False]')\n    parser.add_argument('--dataset', type=str, default='modelnet40', help='dataset [default: modelnet40]')\n    parser.add_argument('--fname', type=str, help='filename, used in ScanObjectNN or fewer data [default:]')\n\n    return parser.parse_args()\n\n\nif __name__ == \"__main__\":\n    args = parse_args()\n\n    os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\n    os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.gpu\n\n    NUM_CLASSES, TRAIN_FILES, TEST_FILES = Dataset_Loc(dataset=args.dataset, fname=args.fname,\n                                                       partial=args.partial, bn=args.bn)\n    TRAIN_DATASET = General_CLSDataLoader_HDF5(file_list=TRAIN_FILES)\n    # TEST_DATASET = General_CLSDataLoader_HDF5(file_list=TEST_FILES)\n    trainDataLoader = DataLoader(TRAIN_DATASET, batch_size=args.batch_size, shuffle=True, num_workers=4)\n    # testDataLoader = DataLoader(TEST_DATASET, batch_size=args.batch_size, shuffle=True, num_workers=4)\n\n    MODEL = importlib.import_module(args.model)\n    device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n    encoder = MODEL.encoder(args=args, num_channel=3).to(device)\n    encoder = torch.nn.DataParallel(encoder)\n\n    checkpoint = torch.load(args.restore_path)\n    encoder = copy_parameters(encoder, checkpoint, verbose=True)\n\n    X_train, y_train, X_test, y_test = [], [], [], []\n    with torch.no_grad():\n        encoder.eval()\n\n        for points, target in tqdm(trainDataLoader, total=len(trainDataLoader), smoothing=0.9):\n            points, target = points.float().transpose(2, 1).cuda(), target.long().cuda()\n            feats = encoder(points)\n            X_train.append(feats.cpu().numpy())\n            y_train.append(target.cpu().numpy())\n\n        # for points, target in tqdm(testDataLoader, total=len(testDataLoader), smoothing=0.9):\n        #     points, target = points.float().transpose(2, 1).cuda(), target.long().cuda()\n        #     feats = encoder(points)\n        #     X_test.append(feats.cpu().numpy())\n        #     y_test.append(target.cpu().numpy())\n\n    X_train, y_train = np.concatenate(X_train), np.concatenate(y_train)\n    # X_test, y_test = np.concatenate(X_test), np.concatenate(y_test)\n\n    # In general, larger dataset/num of class require larger perplexity\n    X_embedded = TSNE(n_components=2, perplexity=100).fit_transform(X_train)\n\n    plt.figure(figsize=(16, 16))\n    plt.scatter(X_embedded[:, 0], X_embedded[:, 1], c=y_train, cmap=plt.cm.get_cmap(\"jet\", NUM_CLASSES))\n    plt.colorbar(ticks=range(1, NUM_CLASSES + 1))\n    plt.clim(0.5, NUM_CLASSES + 0.5)\n    # plt.savefig('log/tsne/tsne_shapenet10_pcn.pdf')\n    plt.show()\n\n"
  },
  {
    "path": "OcCo_Torch/utils/Torch_Utility.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/pytorch/pytorch/issues/7068#issuecomment-487907668\nimport torch, os, random, numpy as np\n\n\ndef seed_torch(seed=1029):\n    random.seed(seed)\n    os.environ['PYTHONHASHSEED'] = str(seed)\n    np.random.seed(seed)\n    torch.manual_seed(seed)\n    torch.cuda.manual_seed(seed)\n    torch.cuda.manual_seed_all(seed)  # for multi-GPU Usage\n    torch.backends.cudnn.benchmark = False\n    torch.backends.cudnn.deterministic = True\n\n\ndef copy_parameters(model, pretrained, verbose=True):\n    # ref: https://discuss.pytorch.org/t/how-to-load-part-of-pre-trained-model/1113/3\n\n    model_dict = model.state_dict()\n    pretrained_dict = pretrained['model_state_dict']\n    pretrained_dict = {k: v for k, v in pretrained_dict.items() if\n                       k in model_dict and pretrained_dict[k].size() == model_dict[k].size()}\n\n    if verbose:\n        print('=' * 27)\n        print('Restored Params and Shapes:')\n        for k, v in pretrained_dict.items():\n            print(k, ': ', v.size())\n        print('=' * 68)\n    model_dict.update(pretrained_dict)\n    model.load_state_dict(model_dict)\n    return model\n\n\ndef weights_init(m):\n    \"\"\"\n    Xavier normal initialisation for weights and zero bias,\n    find especially useful for completion and segmentation Tasks\n    \"\"\"\n    classname = m.__class__.__name__\n    if (classname.find('Conv1d') != -1) or (classname.find('Conv2d') != -1) or (classname.find('Linear') != -1):\n        torch.nn.init.xavier_normal_(m.weight.data)\n        if m.bias is not None:\n            torch.nn.init.constant_(m.bias.data, 0.0)\n\n\ndef bn_momentum_adjust(m, momentum):\n    if isinstance(m, torch.nn.BatchNorm2d) or isinstance(m, torch.nn.BatchNorm1d):\n        m.momentum = momentum\n"
  },
  {
    "path": "OcCo_Torch/utils/TrainLogger.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport os, logging, datetime, numpy as np, sklearn.metrics as metrics\nfrom pathlib import Path\n\n\nclass TrainLogger:\n\n    def __init__(self, args, name='model', subfold='cls', filename='train_log', cls2name=None):\n        self.step = 1\n        self.epoch = 1\n        self.args = args\n        self.name = name\n        self.sf = subfold\n        self.mkdir()\n        self.setup(filename=filename)\n        self.epoch_init()\n        self.save_model = False\n        self.cls2name = cls2name\n        self.best_instance_acc, self.best_class_acc, self.best_miou = 0., 0., 0.\n        self.best_instance_epoch, self.best_class_epoch, self.best_miou_epoch = 0, 0, 0\n        self.savepath = str(self.checkpoints_dir) + '/best_model.pth'\n\n    def setup(self, filename='train_log'):\n        self.logger = logging.getLogger(self.name)\n        self.logger.setLevel(logging.INFO)\n        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n        file_handler = logging.FileHandler(os.path.join(self.log_dir, filename + '.txt'))\n        file_handler.setLevel(logging.INFO)\n        file_handler.setFormatter(formatter)\n        # ref: https://stackoverflow.com/a/53496263/12525201\n        # define a Handler which writes INFO messages or higher to the sys.stderr\n        console = logging.StreamHandler()\n        console.setLevel(logging.INFO)\n        # logging.getLogger('').addHandler(console) # this is root logger\n        self.logger.addHandler(console)\n        self.logger.addHandler(file_handler)\n        self.logger.info('PARAMETER ...')\n        self.logger.info(self.args)\n        self.logger.removeHandler(console)\n\n    def mkdir(self):\n        timestr = str(datetime.datetime.now().strftime('%Y-%m-%d_%H-%M'))\n        experiment_dir = Path('./log/')\n        experiment_dir.mkdir(exist_ok=True)\n        experiment_dir = experiment_dir.joinpath(self.sf)\n        experiment_dir.mkdir(exist_ok=True)\n\n        if self.args.log_dir is None:\n            self.experiment_dir = experiment_dir.joinpath(timestr)\n        else:\n            self.experiment_dir = experiment_dir.joinpath(self.args.log_dir)\n\n        self.experiment_dir.mkdir(exist_ok=True)\n        self.checkpoints_dir = self.experiment_dir.joinpath('checkpoints/')\n        self.checkpoints_dir.mkdir(exist_ok=True)\n        self.log_dir = self.experiment_dir.joinpath('logs/')\n        self.log_dir.mkdir(exist_ok=True)\n\n    # @property.setter\n    def epoch_init(self, training=True):\n        self.loss, self.count, self.pred, self.gt = 0., 0., [], []\n        if training:\n            self.logger.info('Epoch %d/%d:' % (self.epoch, self.args.epoch))\n\n    def step_update(self, pred, gt, loss, training=True):\n        if training:\n            self.step += 1  # Use TensorFlow way to count training steps\n        self.gt.append(gt)\n        self.pred.append(pred)\n        batch_size = len(pred)\n        self.count += batch_size\n        self.loss += loss * batch_size\n\n    def epoch_update(self, training=True, mode='cls'):\n        self.save_model = False\n        self.gt = np.concatenate(self.gt)\n        self.pred = np.concatenate(self.pred)\n\n        instance_acc = metrics.accuracy_score(self.gt, self.pred)\n        if instance_acc > self.best_instance_acc and not training:\n            self.save_model = True if mode == 'cls' else False\n            self.best_instance_acc = instance_acc\n            self.best_instance_epoch = self.epoch\n\n        if mode == 'cls':\n            class_acc = metrics.balanced_accuracy_score(self.gt, self.pred)\n            if class_acc > self.best_class_acc and not training:\n                self.best_class_epoch = self.epoch\n                self.best_class_acc = class_acc\n            return instance_acc, class_acc\n        elif mode == 'semseg':\n            miou = self.calculate_IoU().mean()\n            if miou > self.best_miou and not training:\n                self.best_miou_epoch = self.epoch\n                self.save_model = True\n                self.best_miou = miou\n            return instance_acc, miou\n        else:\n            raise ValueError('Mode is not Supported by TrainLogger')\n\n    def epoch_summary(self, writer=None, training=True, mode='cls'):\n        criteria = 'Class Accuracy' if mode == 'cls' else 'mIoU'\n        instance_acc, class_acc = self.epoch_update(training=training, mode=mode)\n        if training:\n            if writer is not None:\n                writer.add_scalar('Train Instance Accuracy', instance_acc, self.step)\n                writer.add_scalar('Train %s' % criteria, class_acc, self.step)\n            self.logger.info('Train Instance Accuracy: %.3f' % instance_acc)\n            self.logger.info('Train %s: %.3f' % (criteria, class_acc))\n        else:\n            if writer is not None:\n                writer.add_scalar('Test Instance Accuracy', instance_acc, self.step)\n                writer.add_scalar('Test %s' % criteria, class_acc, self.step)\n            self.logger.info('Test Instance Accuracy: %.3f' % instance_acc)\n            self.logger.info('Test %s: %.3f' % (criteria, class_acc))\n            self.logger.info('Best Instance Accuracy: %.3f at Epoch %d ' % (\n                self.best_instance_acc, self.best_instance_epoch))\n            if self.best_class_acc > .1:\n                self.logger.info('Best Class Accuracy: %.3f at Epoch %d' % (\n                    self.best_class_acc, self.best_class_epoch))\n            if self.best_miou > .1:\n                self.logger.info('Best mIoU: %.3f at Epoch %d' % (\n                    self.best_miou, self.best_miou_epoch))\n\n        self.epoch += 1 if not training else 0\n        if self.save_model:\n            self.logger.info('Saving the Model Params to %s' % self.savepath)\n\n    def calculate_IoU(self):\n        num_class = len(self.cls2name)\n        Intersection = np.zeros(num_class)\n        Union = Intersection.copy()\n        # self.pred -> numpy.ndarray (total predictions, )\n\n        for sem_idx in range(num_class):\n            Intersection[sem_idx] = np.sum(np.logical_and(self.pred == sem_idx, self.gt == sem_idx))\n            Union[sem_idx] = np.sum(np.logical_or(self.pred == sem_idx, self.gt == sem_idx))\n        return Intersection / Union\n\n    def train_summary(self, mode='cls'):\n        self.logger.info('\\n\\nEnd of Training...')\n        self.logger.info('Best Instance Accuracy: %.3f at Epoch %d ' % (\n            self.best_instance_acc, self.best_instance_epoch))\n        if mode == 'cls':\n            self.logger.info('Best Class Accuracy: %.3f at Epoch %d' % (\n                self.best_class_acc, self.best_class_epoch))\n        elif mode == 'semseg':\n            self.logger.info('Best mIoU: %.3f at Epoch %d' % (\n                self.best_miou, self.best_miou_epoch))\n\n    def update_from_checkpoints(self, checkpoint):\n        self.logger.info('Use Pre-Trained Weights')\n        self.step = checkpoint['step']\n        self.epoch = checkpoint['epoch']\n        self.best_instance_epoch, self.best_instance_acc = checkpoint['epoch'], checkpoint['instance_acc']\n        self.best_class_epoch, self.best_class_acc = checkpoint['best_class_epoch'], checkpoint['best_class_acc']\n        self.logger.info('Best Class Acc {:.3f} at Epoch {}'.format(self.best_instance_acc, self.best_class_epoch))\n        self.logger.info('Best Instance Acc {:.3f} at Epoch {}'.format(self.best_instance_acc, self.best_instance_epoch))\n"
  },
  {
    "path": "OcCo_Torch/utils/Visu_Utility.py",
    "content": "#  Copyright (c) 2020. Author: Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/wentaoyuan/pcn/blob/master/visu_util.py\n\n# uncomment following commands if you have saving issues\n# ref: https://stackoverflow.com/questions/13336823/matplotlib-python-error\n# import matplotlib\n# matplotlib.use('Agg')\nfrom matplotlib import pyplot as plt\nfrom mpl_toolkits.mplot3d import Axes3D\n\n\ndef plot_pcd_three_views(filename, pcds, titles, suptitle='', sizes=None, cmap='viridis', zdir='y',\n                         xlim=(-0.3, 0.3), ylim=(-0.3, 0.3), zlim=(-0.3, 0.3)):\n    if sizes is None:\n        sizes = [0.5 for _ in range(len(pcds))]\n    fig = plt.figure(figsize=(len(pcds) * 3, 9))\n    for i in range(3):\n        elev = 30\n        azim = -45 + 90 * i\n        for j, (pcd, size) in enumerate(zip(pcds, sizes)):\n            color = pcd[:, 0]\n            ax = fig.add_subplot(3, len(pcds), i * len(pcds) + j + 1, projection='3d')\n            ax.view_init(elev, azim)\n            ax.scatter(pcd[:, 0], pcd[:, 1], pcd[:, 2], zdir=zdir, c=color, s=size, cmap=cmap, vmin=-1, vmax=0.5)\n            ax.set_title(titles[j])\n            ax.set_axis_off()\n            ax.set_xlim(xlim)\n            ax.set_ylim(ylim)\n            ax.set_zlim(zlim)\n    plt.subplots_adjust(left=0.05, right=0.95, bottom=0.05, top=0.9, wspace=0.1, hspace=0.1)\n    plt.suptitle(suptitle)\n    fig.savefig(filename)\n    plt.close(fig)\n\n\nif __name__ == \"__main__\":\n    pass\n    # filenames = ['airplane.pcd', 'car.pcd', 'chair.pcd', 'lamp.pcd']  # '../demo_data'\n    # for file in filenames:\n    # \tfilename = file.replace('.pcd', '')\n    # \tpcds = [np.asarray(read_point_cloud('../demo_data/' + file).points)]\n    # \t# pdb.set_trace()\n    # \ttitles = ['viewpoint 1', 'viewpoint 2', 'viewpoint 3']\n    # \tplot_pcd_three_views(s\n    # \t\tfilename, pcds, titles, suptitle=filename, sizes=None, cmap='viridis', zdir='y',\n    # \t\txlim=(-0.3, 0.3), ylim=(-0.3, 0.3), zlim=(-0.3, 0.3))\n"
  },
  {
    "path": "OcCo_Torch/utils/__init__.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\n"
  },
  {
    "path": "OcCo_Torch/utils/collect_indoor3d_data.py",
    "content": "#  Ref: https://github.com/charlesq34/pointnet/blob/master/sem_seg/collect_indoor3d_data.py\n\nimport os, sys, indoor3d_util\nBASE_DIR = os.path.dirname(os.path.abspath(__file__))\nROOT_DIR = os.path.dirname(BASE_DIR)\nsys.path.append(BASE_DIR)\n\nanno_paths = [line.rstrip() for line in open(os.path.join(BASE_DIR, 'meta/anno_paths.txt'))]\nanno_paths = [os.path.join(indoor3d_util.DATA_PATH, p) for p in anno_paths]\n\noutput_folder = os.path.join(ROOT_DIR, 'data/stanford_indoor3d')\n# output_folder = os.path.join('../data/stanford_indoor3d')\nif not os.path.exists(output_folder):\n    os.mkdir(output_folder)\n\n# Note: there is an extra character in the v1.2 data in Area_5/hallway_6. It's fixed manually.\n# Ref: https://github.com/charlesq34/pointnet/issues/45\nfor anno_path in anno_paths:\n    print(anno_path)\n    try:\n        elements = anno_path.split('/')\n        out_filename = elements[-3]+'_'+elements[-2]+'.npy'  # e.g., Area_1_hallway_1.npy\n        indoor3d_util.collect_point_label(\n            anno_path, os.path.join(output_folder, out_filename), 'numpy')\n    except:\n        print(anno_path, 'ERROR!!')\n"
  },
  {
    "path": "OcCo_Torch/utils/gen_indoor3d_h5.py",
    "content": "#  Ref: https://github.com/charlesq34/pointnet/blob/master/sem_seg/gen_indoor3d_h5.py\n\nimport os, sys, h5py, indoor3d_util, numpy as np\n\nBASE_DIR = os.path.dirname(os.path.abspath(__file__))\nROOT_DIR = os.path.dirname(BASE_DIR)\nsys.path.append(BASE_DIR)\nsys.path.append(os.path.join(ROOT_DIR, 'utils'))\ndata_dir = os.path.join(ROOT_DIR, 'data')\nindoor3d_data_dir = os.path.join(data_dir, 'stanford_indoor3d')\nNUM_POINT = 4096\nH5_BATCH_SIZE = 1000\ndata_dim = [NUM_POINT, 9]\nlabel_dim = [NUM_POINT]\ndata_dtype = 'float32'\nlabel_dtype = 'uint8'\n\n# Set paths\nfilelist = os.path.join(BASE_DIR, 'meta/all_data_label.txt')\ndata_label_files = [os.path.join(indoor3d_data_dir, line.rstrip()) for line in open(filelist)]\noutput_dir = os.path.join(data_dir, 'indoor3d_sem_seg_hdf5_data')\nif not os.path.exists(output_dir):\n    os.mkdir(output_dir)\noutput_filename_prefix = os.path.join(output_dir, 'ply_data_all')\noutput_room_filelist = os.path.join(output_dir, 'room_filelist.txt')\nfout_room = open(output_room_filelist, 'w')\n\n# --------------------------------------\n# ----- BATCH WRITE TO HDF5 -----\n# --------------------------------------\nbatch_data_dim = [H5_BATCH_SIZE] + data_dim\nbatch_label_dim = [H5_BATCH_SIZE] + label_dim\nh5_batch_data = np.zeros(batch_data_dim, dtype=np.float32)\nh5_batch_label = np.zeros(batch_label_dim, dtype=np.uint8)\nbuffer_size = 0  # state: record how many samples are currently in buffer\nh5_index = 0  # state: the next h5 file to save\n\n\ndef insert_batch(data, label, last_batch=False):\n    global h5_batch_data, h5_batch_label\n    global buffer_size, h5_index\n\n    def save_h5(h5_filename, data, label, data_dtype='uint8', label_dtype='uint8'):\n        h5_fout = h5py.File(h5_filename)\n        h5_fout.create_dataset(\n            name='data', data=data,\n            compression='gzip', compression_opts=4,\n            dtype=data_dtype)\n        h5_fout.create_dataset(\n            name='label', data=label,\n            compression='gzip', compression_opts=1,\n            dtype=label_dtype)\n        h5_fout.close()\n\n    data_size = data.shape[0]\n    # If there is enough space, just insert\n    if buffer_size + data_size <= h5_batch_data.shape[0]:\n        h5_batch_data[buffer_size:buffer_size + data_size, ...] = data\n        h5_batch_label[buffer_size:buffer_size + data_size] = label\n        buffer_size += data_size\n    else:  # not enough space\n        capacity = h5_batch_data.shape[0] - buffer_size\n        assert (capacity >= 0)\n        if capacity > 0:\n            h5_batch_data[buffer_size:buffer_size + capacity, ...] = data[0:capacity, ...]\n            h5_batch_label[buffer_size:buffer_size + capacity, ...] = label[0:capacity, ...]\n        # Save batch data and label to h5 file, reset buffer_size\n        h5_filename = output_filename_prefix + '_' + str(h5_index) + '.h5'\n        save_h5(h5_filename, h5_batch_data, h5_batch_label, data_dtype, label_dtype)\n        print('Stored {0} with size {1}'.format(h5_filename, h5_batch_data.shape[0]))\n        h5_index += 1\n        buffer_size = 0\n        # recursive call\n        insert_batch(data[capacity:, ...], label[capacity:, ...], last_batch)\n    if last_batch and buffer_size > 0:\n        h5_filename = output_filename_prefix + '_' + str(h5_index) + '.h5'\n        save_h5(h5_filename, h5_batch_data[0:buffer_size, ...],\n                h5_batch_label[0:buffer_size, ...], data_dtype, label_dtype)\n        print('Stored {0} with size {1}'.format(h5_filename, buffer_size))\n        h5_index += 1\n        buffer_size = 0\n    return\n\n\nsample_cnt = 0\nfor i, data_label_filename in enumerate(data_label_files):\n    print(data_label_filename)\n    data, label = indoor3d_util.room2blocks_wrapper_normalized(\n        data_label_filename, NUM_POINT, block_size=1.0, stride=0.5, random_sample=False, sample_num=None)\n    print('{0}, {1}'.format(data.shape, label.shape))\n    for _ in range(data.shape[0]):\n        fout_room.write(os.path.basename(data_label_filename)[0:-4] + '\\n')\n\n    sample_cnt += data.shape[0]\n    insert_batch(data, label, i == len(data_label_files) - 1)\n\nfout_room.close()\nprint(\"Total samples: {0}\".format(sample_cnt))\n"
  },
  {
    "path": "OcCo_Torch/utils/indoor3d_util.py",
    "content": "#  Ref: https://github.com/charlesq34/pointnet/blob/master/sem_seg/indoor3d_util.py\nimport os, sys, glob, numpy as np\n\nBASE_DIR = os.path.dirname(os.path.abspath(__file__))\nROOT_DIR = os.path.dirname(BASE_DIR)\nsys.path.append(BASE_DIR)\n\n# -----------------------------------------------------------------------------\n# CONSTANTS\n# -----------------------------------------------------------------------------\n\nDATA_PATH = os.path.join(ROOT_DIR, 'data', 'Stanford3dDataset_v1.2_Aligned_Version')\ng_classes = [x.rstrip() for x in open(os.path.join(BASE_DIR, 'meta/s3dis/class_names.txt'))]\ng_class2label = {cls: i for i, cls in enumerate(g_classes)}\ng_class2color = {'ceiling':  [0, 255, 0],\n                 'floor':    [0, 0, 255],\n                 'wall':     [0, 255, 255],\n                 'beam':     [255, 255, 0],\n                 'column':   [255, 0, 255],\n                 'window':   [100, 100, 255],\n                 'door':     [200, 200, 100],\n                 'table':    [170, 120, 200],\n                 'chair':    [255, 0, 0],\n                 'sofa':     [200, 100, 100],\n                 'bookcase': [10, 200, 100],\n                 'board':    [200, 200, 200],\n                 'clutter':  [50, 50, 50]}\ng_easy_view_labels = [7, 8, 9, 10, 11, 1]\ng_label2color = {g_classes.index(cls): g_class2color[cls] for cls in g_classes}\n\n\n# -----------------------------------------------------------------------------\n# CONVERT ORIGINAL DATA TO OUR DATA_LABEL FILES\n# -----------------------------------------------------------------------------\n\ndef collect_point_label(anno_path, out_filename, file_format='txt'):\n    \"\"\" Convert original dataset files to data_label file (each line is XYZRGBL).\n        We aggregated all the points from each instance in the room.\n\n    Args:\n        anno_path: path to annotations. e.g. Area_1/office_2/Annotations/\n        out_filename: path to save collected points and labels (each line is XYZRGBL)\n        file_format: txt or numpy, determines what file format to save.\n    Returns:\n        None\n    Note:\n        the points are shifted before save, the most negative point is now at origin.\n    \"\"\"\n    points_list = []\n    for f in glob.glob(os.path.join(anno_path, '*.txt')):\n        cls = os.path.basename(f).split('_')[0]\n        # print(f)\n        if cls not in g_classes:  # note: in some room there is 'staris' class..\n            cls = 'clutter'\n\n        points = np.loadtxt(f)\n        labels = np.ones((points.shape[0], 1)) * g_class2label[cls]\n        points_list.append(np.concatenate([points, labels], 1))  # Nx7\n\n    data_label = np.concatenate(points_list, 0)\n    xyz_min = np.amin(data_label, axis=0)[0:3]\n    data_label[:, 0:3] -= xyz_min\n\n    if file_format == 'txt':\n        fout = open(out_filename, 'w')\n        for i in range(data_label.shape[0]):\n            fout.write('%f %f %f %d %d %d %d\\n' %\n                       (data_label[i, 0], data_label[i, 1], data_label[i, 2],\n                        data_label[i, 3], data_label[i, 4], data_label[i, 5],\n                        data_label[i, 6]))\n        fout.close()\n    elif file_format == 'numpy':\n        np.save(out_filename, data_label)\n    else:\n        print('ERROR!! Unknown file format: %s, please use txt or numpy.' % file_format)\n        exit()\n\n\ndef data_to_obj(data, name='example.obj', no_wall=True):\n    fout = open(name, 'w')\n    label = data[:, -1].astype(int)\n    for i in range(data.shape[0]):\n        if no_wall and ((label[i] == 2) or (label[i] == 0)):\n            continue\n        fout.write('v %f %f %f %d %d %d\\n' % \\\n                   (data[i, 0], data[i, 1], data[i, 2], data[i, 3], data[i, 4], data[i, 5]))\n    fout.close()\n\n\ndef point_label_to_obj(input_filename, out_filename, label_color=True, easy_view=False, no_wall=False):\n    \"\"\" For visualization of a room from data_label file,\n    input_filename: each line is X Y Z R G B L\n    out_filename: OBJ filename,\n            visualize input file by coloring point with label color\n        easy_view: only visualize furnitures and floor\n    \"\"\"\n    data_label = np.loadtxt(input_filename)\n    data = data_label[:, 0:6]\n    label = data_label[:, -1].astype(int)\n    fout = open(out_filename, 'w')\n    for i in range(data.shape[0]):\n        color = g_label2color[label[i]]\n        if easy_view and (label[i] not in g_easy_view_labels):\n            continue\n        if no_wall and ((label[i] == 2) or (label[i] == 0)):\n            continue\n        if label_color:\n            fout.write('v %f %f %f %d %d %d\\n' % \\\n                       (data[i, 0], data[i, 1], data[i, 2], color[0], color[1], color[2]))\n        else:\n            fout.write('v %f %f %f %d %d %d\\n' % \\\n                       (data[i, 0], data[i, 1], data[i, 2], data[i, 3], data[i, 4], data[i, 5]))\n    fout.close()\n\n\n# -----------------------------------------------------------------------------\n# PREPARE BLOCK DATA FOR NETWORK TRAINING/TESTING\n# -----------------------------------------------------------------------------\n\ndef sample_data(data, num_sample):\n    \"\"\" data is in N x ...\n        we want to keep (num_sample, C) of them.\n        if N > num_sample, we will randomly keep num_sample of them.\n        if N < num_sample, we will randomly duplicate samples.\n    \"\"\"\n    N = data.shape[0]\n    if N == num_sample:\n        return data, range(N)\n    elif N > num_sample:\n        sample = np.random.choice(N, num_sample)\n        return data[sample, ...], sample\n    else:\n        sample = np.random.choice(N, num_sample - N)\n        dup_data = data[sample, ...]\n        return np.concatenate([data, dup_data], 0), list(range(N)) + list(sample)\n\n\ndef sample_data_label(data, label, num_sample):\n    # randomly sub select or duplicate for up-sampling\n    new_data, sample_indices = sample_data(data, num_sample)\n    new_label = label[sample_indices]\n    return new_data, new_label\n\n\ndef room2blocks(data, label, num_point, block_size=1.0, stride=1.0,\n                random_sample=False, sample_num=None, sample_aug=1):\n    \"\"\" Prepare block training data.\n    Args:\n        data: N x 6 numpy array, 012 are XYZ in meters, 345 are RGB in [0,1]\n            assumes the data is shifted (min point is origin) and aligned\n            (aligned with XYZ axis)\n        label: N size uint8 numpy array from 0-12\n        num_point: int, how many points to sample in each block\n        block_size: float, physical size of the block in meters\n        stride: float, stride for block sweeping\n        random_sample: bool, if True, we will randomly sample blocks in the room\n        sample_num: int, if random sample, how many blocks to sample\n            [default: room area]\n        sample_aug: if random sample, how much aug\n    Returns:\n        block_datas: K x num_point x 6 np array of XYZRGB, RGB is in [0,1]\n        block_labels: K x num_point x 1 np array of uint8 labels\n\n    TODO: for this version, blocking is in fixed, non-overlapping pattern.\n    \"\"\"\n    assert (stride <= block_size)\n\n    limit = np.amax(data, 0)[0:3]\n\n    # Get the corner location for our sampling blocks\n    xbeg_list = []\n    ybeg_list = []\n    if not random_sample:\n        num_block_x = int(np.ceil((limit[0] - block_size) / stride)) + 1\n        num_block_y = int(np.ceil(collect_point_label(limit[1] - block_size) / stride)) + 1\n        for i in range(num_block_x):\n            for j in range(num_block_y):\n                xbeg_list.append(i * stride)\n                ybeg_list.append(j * stride)\n    else:  # random sample blocks from the room, not used in gen_indoor3d_h5.py\n        num_block_x = int(np.ceil(limit[0] / block_size))\n        num_block_y = int(np.ceil(limit[1] / block_size))\n        if sample_num is None:\n            sample_num = num_block_x * num_block_y * sample_aug\n        for _ in range(sample_num):\n            xbeg = np.random.uniform(-block_size, limit[0])\n            ybeg = np.random.uniform(-block_size, limit[1])\n            xbeg_list.append(xbeg)\n            ybeg_list.append(ybeg)\n\n    # Collect blocks\n    block_data_list = []\n    block_label_list = []\n    for idx in range(len(xbeg_list)):\n        xbeg = xbeg_list[idx]\n        ybeg = ybeg_list[idx]\n        # xcond -> bool array with a shape of (Num_Total_Points, )\n        xcond = (data[:, 0] <= xbeg + block_size) & (data[:, 0] >= xbeg)\n        ycond = (data[:, 1] <= ybeg + block_size) & (data[:, 1] >= ybeg)\n        cond = xcond & ycond\n        if np.sum(cond) < 100:  # discard block if there are less than 100 pts.\n            continue\n\n        block_data = data[cond, :]\n        block_label = label[cond]\n\n        # randomly subsample data\n        block_data_sampled, block_label_sampled = \\\n            sample_data_label(block_data, block_label, num_point)\n        block_data_list.append(np.expand_dims(block_data_sampled, 0))\n        block_label_list.append(np.expand_dims(block_label_sampled, 0))\n\n    return np.concatenate(block_data_list, 0), np.concatenate(block_label_list, 0)\n\n\ndef room2blocks_plus(data_label, num_point, block_size, stride,\n                     random_sample, sample_num, sample_aug):\n    \"\"\" room2block with input filename and RGB pre-processing.\n    \"\"\"\n    data = data_label[:, 0:6]\n    data[:, 3:6] /= 255.0\n    label = data_label[:, -1].astype(np.uint8)\n\n    return room2blocks(data, label, num_point, block_size, stride,\n                       random_sample, sample_num, sample_aug)\n\n\ndef room2blocks_wrapper(data_label_filename, num_point, block_size=1.0, stride=1.0,\n                        random_sample=False, sample_num=None, sample_aug=1):\n    if data_label_filename[-3:] == 'txt':\n        data_label = np.loadtxt(data_label_filename)\n    elif data_label_filename[-3:] == 'npy':\n        data_label = np.load(data_label_filename)\n    else:\n        print('Unknown file type! exiting.')\n        exit()\n    return room2blocks_plus(data_label, num_point, block_size, stride,\n                            random_sample, sample_num, sample_aug)\n\n\ndef room2blocks_plus_normalized(data_label, num_point, block_size, stride,\n                                random_sample, sample_num, sample_aug):\n    \"\"\" room2block, with input filename and RGB preprocessing.\n        for each block centralize XYZ, add normalized XYZ as 678 channels\n    \"\"\"\n    data = data_label[:, 0:6]\n    data[:, 3:6] /= 255.0\n    label = data_label[:, -1].astype(np.uint8)\n    max_room_x = max(data[:, 0])\n    max_room_y = max(data[:, 1])\n    max_room_z = max(data[:, 2])\n\n    data_batch, label_batch = room2blocks(data, label, num_point, block_size, stride,\n                                          random_sample, sample_num, sample_aug)\n    new_data_batch = np.zeros((data_batch.shape[0], num_point, 9))\n    for b in range(data_batch.shape[0]):\n        new_data_batch[b, :, 6] = data_batch[b, :, 0] / max_room_x\n        new_data_batch[b, :, 7] = data_batch[b, :, 1] / max_room_y\n        new_data_batch[b, :, 8] = data_batch[b, :, 2] / max_room_z\n        minx = min(data_batch[b, :, 0])\n        miny = min(data_batch[b, :, 1])\n        data_batch[b, :, 0] -= (minx + block_size / 2)\n        data_batch[b, :, 1] -= (miny + block_size / 2)\n    new_data_batch[:, :, 0:6] = data_batch\n    return new_data_batch, label_batch\n\n\ndef room2blocks_wrapper_normalized(data_label_filename, num_point, block_size=1.0, stride=1.0,\n                                   random_sample=False, sample_num=None, sample_aug=1):\n    if data_label_filename[-3:] == 'txt':\n        data_label = np.loadtxt(data_label_filename)\n    elif data_label_filename[-3:] == 'npy':\n        data_label = np.load(data_label_filename)\n    else:\n        print('Unknown file type! exiting.')\n        exit()\n    return room2blocks_plus_normalized(data_label, num_point, block_size, stride,\n                                       random_sample, sample_num, sample_aug)\n\n\ndef room2samples(data, label, sample_num_point):\n    \"\"\" Prepare whole room samples.\n\n    Args:\n        data: N x 6 numpy array, 012 are XYZ in meters, 345 are RGB in [0,1]\n            assumes the data is shifted (min point is origin) and\n            aligned (aligned with XYZ axis)\n        label: N size uint8 numpy array from 0-12\n        sample_num_point: int, how many points to sample in each sample\n    Returns:\n        sample_datas: K x sample_num_point x 9\n                     numpy array of XYZRGBX'Y'Z', RGB is in [0,1]\n        sample_labels: K x sample_num_point x 1 np array of uint8 labels\n    \"\"\"\n    N = data.shape[0]\n    order = np.arange(N)\n    np.random.shuffle(order)\n    data = data[order, :]\n    label = label[order]\n\n    batch_num = int(np.ceil(N / float(sample_num_point)))\n    sample_datas = np.zeros((batch_num, sample_num_point, 6))\n    sample_labels = np.zeros((batch_num, sample_num_point, 1))\n\n    for i in range(batch_num):\n        beg_idx = i * sample_num_point\n        end_idx = min((i + 1) * sample_num_point, N)\n        num = end_idx - beg_idx\n        sample_datas[i, 0:num, :] = data[beg_idx:end_idx, :]\n        sample_labels[i, 0:num, 0] = label[beg_idx:end_idx]\n        if num < sample_num_point:\n            makeup_indices = np.random.choice(N, sample_num_point - num)\n            sample_datas[i, num:, :] = data[makeup_indices, :]\n            sample_labels[i, num:, 0] = label[makeup_indices]\n    return sample_datas, sample_labels\n\n\ndef room2samples_plus_normalized(data_label, num_point):\n    \"\"\" room2sample, with input filename and RGB preprocessing.\n        for each block centralize XYZ, add normalized XYZ as 678 channels\n    \"\"\"\n    data = data_label[:, 0:6]\n    data[:, 3:6] /= 255.0\n    label = data_label[:, -1].astype(np.uint8)\n    max_room_x = max(data[:, 0])\n    max_room_y = max(data[:, 1])\n    max_room_z = max(data[:, 2])\n    # print(max_room_x, max_room_y, max_room_z)\n\n    data_batch, label_batch = room2samples(data, label, num_point)\n    new_data_batch = np.zeros((data_batch.shape[0], num_point, 9))\n    for b in range(data_batch.shape[0]):\n        new_data_batch[b, :, 6] = data_batch[b, :, 0] / max_room_x\n        new_data_batch[b, :, 7] = data_batch[b, :, 1] / max_room_y\n        new_data_batch[b, :, 8] = data_batch[b, :, 2] / max_room_z\n    # minx = min(data_batch[b, :, 0])\n    # miny = min(data_batch[b, :, 1])\n    # data_batch[b, :, 0] -= (minx+block_size/2)\n    # data_batch[b, :, 1] -= (miny+block_size/2)\n    new_data_batch[:, :, 0:6] = data_batch\n    return new_data_batch, label_batch\n\n\ndef room2samples_wrapper_normalized(data_label_filename, num_point):\n    if data_label_filename[-3:] == 'txt':\n        data_label = np.loadtxt(data_label_filename)\n    elif data_label_filename[-3:] == 'npy':\n        data_label = np.load(data_label_filename)\n    else:\n        print('Unknown file type! exiting.')\n        exit()\n    return room2samples_plus_normalized(data_label, num_point)\n\n\n# -----------------------------------------------------------------------------\n# EXTRACT INSTANCE BBOX FROM ORIGINAL DATA (for detection evaluation)\n# -----------------------------------------------------------------------------\n\ndef collect_bounding_box(anno_path, out_filename):\n    \"\"\" Compute bounding boxes from each instance in original dataset files on\n        one room. **We assume the bbox is aligned with XYZ coordinate.**\n    \n    Args:\n        anno_path: path to annotations. e.g. Area_1/office_2/Annotations/\n        out_filename: path to save instance bounding boxes for that room.\n            each line is x1 y1 z1 x2 y2 z2 label,\n            where (x1,y1,z1) is the point on the diagonal closer to origin\n    Returns:\n        None\n    Note:\n        room points are shifted, the most negative point is now at origin.\n    \"\"\"\n    bbox_label_list = []\n\n    for f in glob.glob(os.path.join(anno_path, '*.txt')):\n        cls = os.path.basename(f).split('_')[0]\n        if cls not in g_classes:  # note: in some room there is 'staris' class..\n            cls = 'clutter'\n        points = np.loadtxt(f)\n        label = g_class2label[cls]\n        # Compute tightest axis aligned bounding box\n        xyz_min = np.amin(points[:, 0:3], axis=0)\n        xyz_max = np.amax(points[:, 0:3], axis=0)\n        ins_bbox_label = np.expand_dims(\n            np.concatenate([xyz_min, xyz_max, np.array([label])], 0), 0)\n        bbox_label_list.append(ins_bbox_label)\n\n    bbox_label = np.concatenate(bbox_label_list, 0)\n    room_xyz_min = np.amin(bbox_label[:, 0:3], axis=0)\n    bbox_label[:, 0:3] -= room_xyz_min\n    bbox_label[:, 3:6] -= room_xyz_min\n\n    fout = open(out_filename, 'w')\n    for i in range(bbox_label.shape[0]):\n        fout.write('%f %f %f %f %f %f %d\\n' % \\\n                   (bbox_label[i, 0], bbox_label[i, 1], bbox_label[i, 2],\n                    bbox_label[i, 3], bbox_label[i, 4], bbox_label[i, 5],\n                    bbox_label[i, 6]))\n    fout.close()\n\n\ndef bbox_label_to_obj(input_filename, out_filename_prefix, easy_view=False):\n    \"\"\" Visualization of bounding boxes.\n    \n    Args:\n        input_filename: each line is x1 y1 z1 x2 y2 z2 label\n        out_filename_prefix: OBJ filename prefix,\n            visualize object by g_label2color\n        easy_view: if True, only visualize furniture and floor\n    Returns:\n        output a list of OBJ file and MTL files with the same prefix\n    \"\"\"\n    bbox_label = np.loadtxt(input_filename)\n    bbox = bbox_label[:, 0:6]\n    label = bbox_label[:, -1].astype(int)\n    v_cnt = 0  # count vertex\n    ins_cnt = 0  # count instance\n    for i in range(bbox.shape[0]):\n        if easy_view and (label[i] not in g_easy_view_labels):\n            continue\n        obj_filename = out_filename_prefix + '_' + g_classes[label[i]] + '_' + str(ins_cnt) + '.obj'\n        mtl_filename = out_filename_prefix + '_' + g_classes[label[i]] + '_' + str(ins_cnt) + '.mtl'\n        fout_obj = open(obj_filename, 'w')\n        fout_mtl = open(mtl_filename, 'w')\n        fout_obj.write('mtllib %s\\n' % (os.path.basename(mtl_filename)))\n\n        length = bbox[i, 3:6] - bbox[i, 0:3]\n        a = length[0]\n        b = length[1]\n        c = length[2]\n        x = bbox[i, 0]\n        y = bbox[i, 1]\n        z = bbox[i, 2]\n        color = np.array(g_label2color[label[i]], dtype=float) / 255.0\n\n        material = 'material%d' % ins_cnt\n        fout_obj.write('usemtl %s\\n' % material)\n        fout_obj.write('v %f %f %f\\n' % (x, y, z + c))\n        fout_obj.write('v %f %f %f\\n' % (x, y + b, z + c))\n        fout_obj.write('v %f %f %f\\n' % (x + a, y + b, z + c))\n        fout_obj.write('v %f %f %f\\n' % (x + a, y, z + c))\n        fout_obj.write('v %f %f %f\\n' % (x, y, z))\n        fout_obj.write('v %f %f %f\\n' % (x, y + b, z))\n        fout_obj.write('v %f %f %f\\n' % (x + a, y + b, z))\n        fout_obj.write('v %f %f %f\\n' % (x + a, y, z))\n        fout_obj.write('g default\\n')\n        v_cnt = 0  # for individual box\n        fout_obj.write('f %d %d %d %d\\n' % (4 + v_cnt, 3 + v_cnt, 2 + v_cnt, 1 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (1 + v_cnt, 2 + v_cnt, 6 + v_cnt, 5 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (7 + v_cnt, 6 + v_cnt, 2 + v_cnt, 3 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (4 + v_cnt, 8 + v_cnt, 7 + v_cnt, 3 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (5 + v_cnt, 8 + v_cnt, 4 + v_cnt, 1 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (5 + v_cnt, 6 + v_cnt, 7 + v_cnt, 8 + v_cnt))\n        fout_obj.write('\\n')\n\n        fout_mtl.write('newmtl %s\\n' % material)\n        fout_mtl.write('Kd %f %f %f\\n' % (color[0], color[1], color[2]))\n        fout_mtl.write('\\n')\n        fout_obj.close()\n        fout_mtl.close()\n\n        v_cnt += 8\n        ins_cnt += 1\n\n\ndef bbox_label_to_obj_room(input_filename, out_filename_prefix, easy_view=False,\n                           permute=None, center=False, exclude_table=False):\n    \"\"\" Visualization of bounding boxes.\n    \n    Args:\n        input_filename: each line is x1 y1 z1 x2 y2 z2 label\n        out_filename_prefix: OBJ filename prefix,\n            visualize object by g_label2color\n        easy_view: if True, only visualize furniture and floor\n        permute: if not None, permute XYZ for rendering, e.g. [0 2 1]\n        center: if True, move obj to have zero origin\n    Returns:\n        output a list of OBJ file and MTL files with the same prefix\n    \"\"\"\n    bbox_label = np.loadtxt(input_filename)\n    bbox = bbox_label[:, 0:6]\n    if permute is not None:\n        assert (len(permute) == 3)\n        permute = np.array(permute)\n        bbox[:, 0:3] = bbox[:, permute]\n        bbox[:, 3:6] = bbox[:, permute + 3]\n    if center:\n        xyz_max = np.amax(bbox[:, 3:6], 0)\n        bbox[:, 0:3] -= (xyz_max / 2.0)\n        bbox[:, 3:6] -= (xyz_max / 2.0)\n        bbox /= np.max(xyz_max / 2.0)\n    label = bbox_label[:, -1].astype(int)\n    obj_filename = out_filename_prefix + '.obj'\n    mtl_filename = out_filename_prefix + '.mtl'\n\n    fout_obj = open(obj_filename, 'w')\n    fout_mtl = open(mtl_filename, 'w')\n    fout_obj.write('mtllib %s\\n' % (os.path.basename(mtl_filename)))\n    v_cnt = 0  # count vertex\n    ins_cnt = 0  # count instance\n    for i in range(bbox.shape[0]):\n        if easy_view and (label[i] not in g_easy_view_labels):\n            continue\n        if exclude_table and label[i] == g_classes.index('table'):\n            continue\n\n        length = bbox[i, 3:6] - bbox[i, 0:3]\n        a = length[0]\n        b = length[1]\n        c = length[2]\n        x = bbox[i, 0]\n        y = bbox[i, 1]\n        z = bbox[i, 2]\n        color = np.array(g_label2color[label[i]], dtype=float) / 255.0\n\n        material = 'material%d' % ins_cnt\n        fout_obj.write('usemtl %s\\n' % material)\n        fout_obj.write('v %f %f %f\\n' % (x, y, z + c))\n        fout_obj.write('v %f %f %f\\n' % (x, y + b, z + c))\n        fout_obj.write('v %f %f %f\\n' % (x + a, y + b, z + c))\n        fout_obj.write('v %f %f %f\\n' % (x + a, y, z + c))\n        fout_obj.write('v %f %f %f\\n' % (x, y, z))\n        fout_obj.write('v %f %f %f\\n' % (x, y + b, z))\n        fout_obj.write('v %f %f %f\\n' % (x + a, y + b, z))\n        fout_obj.write('v %f %f %f\\n' % (x + a, y, z))\n        fout_obj.write('g default\\n')\n        fout_obj.write('f %d %d %d %d\\n' % (4 + v_cnt, 3 + v_cnt, 2 + v_cnt, 1 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (1 + v_cnt, 2 + v_cnt, 6 + v_cnt, 5 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (7 + v_cnt, 6 + v_cnt, 2 + v_cnt, 3 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (4 + v_cnt, 8 + v_cnt, 7 + v_cnt, 3 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (5 + v_cnt, 8 + v_cnt, 4 + v_cnt, 1 + v_cnt))\n        fout_obj.write('f %d %d %d %d\\n' % (5 + v_cnt, 6 + v_cnt, 7 + v_cnt, 8 + v_cnt))\n        fout_obj.write('\\n')\n\n        fout_mtl.write('newmtl %s\\n' % material)\n        fout_mtl.write('Kd %f %f %f\\n' % (color[0], color[1], color[2]))\n        fout_mtl.write('\\n')\n\n        v_cnt += 8\n        ins_cnt += 1\n\n    fout_obj.close()\n    fout_mtl.close()\n\n\ndef collect_point_bounding_box(anno_path, out_filename, file_format):\n    \"\"\" Compute bounding boxes from each instance in original dataset files on\n        one room. **We assume the bbox is aligned with XYZ coordinate.**\n        Save both the point XYZRGB and the bounding box for the point's\n        parent element.\n \n    Args:\n        anno_path: path to annotations. e.g. Area_1/office_2/Annotations/\n        out_filename: path to save instance bounding boxes for each point,\n            plus the point's XYZRGBL\n            each line is XYZRGBL offsetX offsetY offsetZ a b c,\n            where cx = X+offsetX, cy=X+offsetY, cz=Z+offsetZ\n            where (cx,cy,cz) is center of the box, a,b,c are distances from center\n            to the surfaces of the box, i.e. x1 = cx-a, x2 = cx+a, y1=cy-b etc.\n        file_format: output file format, txt or numpy\n    Returns:\n        None\n\n    Note:\n        room points are shifted, the most negative point is now at origin.\n    \"\"\"\n    point_bbox_list = []\n\n    for f in glob.glob(os.path.join(anno_path, '*.txt')):\n        cls = os.path.basename(f).split('_')[0]\n        if cls not in g_classes:  # note: in some room there is 'stairs' class..\n            cls = 'clutter'\n        points = np.loadtxt(f)  # Nx6\n        label = g_class2label[cls]  # N,\n        # Compute tightest axis aligned bounding box\n        xyz_min = np.amin(points[:, 0:3], axis=0)  # 3,\n        xyz_max = np.amax(points[:, 0:3], axis=0)  # 3,\n        xyz_center = (xyz_min + xyz_max) / 2\n        dimension = (xyz_max - xyz_min) / 2\n\n        xyz_offsets = xyz_center - points[:, 0:3]  # Nx3\n        dimensions = np.ones((points.shape[0], 3)) * dimension  # Nx3\n        labels = np.ones((points.shape[0], 1)) * label  # N\n        point_bbox_list.append(np.concatenate([points, labels,\n                                               xyz_offsets, dimensions], 1))  # Nx13\n\n    point_bbox = np.concatenate(point_bbox_list, 0)  # KxNx13\n    room_xyz_min = np.amin(point_bbox[:, 0:3], axis=0)\n    point_bbox[:, 0:3] -= room_xyz_min\n\n    if file_format == 'txt':\n        fout = open(out_filename, 'w')\n        for i in range(point_bbox.shape[0]):\n            fout.write('%f %f %f %d %d %d %d %f %f %f %f %f %f\\n' %\n                       (point_bbox[i, 0], point_bbox[i, 1], point_bbox[i, 2],\n                        point_bbox[i, 3], point_bbox[i, 4], point_bbox[i, 5],\n                        point_bbox[i, 6],\n                        point_bbox[i, 7], point_bbox[i, 8], point_bbox[i, 9],\n                        point_bbox[i, 10], point_bbox[i, 11], point_bbox[i, 12]))\n\n        fout.close()\n    elif file_format == 'numpy':\n        np.save(out_filename, point_bbox)\n    else:\n        print('ERROR!! Unknown file format: %s, please use txt or numpy.' % file_format)\n        exit()\n"
  },
  {
    "path": "OcCo_Torch/utils/lmdb2hdf5.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport os, h5py, json, argparse, numpy as np\nfrom LMDB_DataFlow import lmdb_dataflow\nfrom tqdm import tqdm\n\n\ndef fix2len(point_cloud, fix_length):\n    if len(point_cloud) >= fix_length:\n        point_cloud = point_cloud[np.random.choice(len(point_cloud), fix_length)]\n    else:\n        point_cloud = np.concatenate(\n            [point_cloud, point_cloud[np.random.choice(len(point_cloud), fix_length - len(point_cloud))]], axis=0)\n    return point_cloud\n\n\nif __name__ == \"__main__\":\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"--fname\", type=str, default='train')\n    parser.add_argument(\"--lmdb_path\", type=str, default=r'../data/modelnet40_pcn/')\n    parser.add_argument(\"--hdf5_path\", type=str, default=r'../data/modelnet40_pcn/hdf5_partial_1024')\n    parser.add_argument(\"--partial\", action='store_true', help='store partial scan or not')\n    parser.add_argument('--num_per_obj', type=int, default=1024)\n    parser.add_argument('--num_scan', type=int, default=10)\n\n    args = parser.parse_args()\n\n    lmdb_file = os.path.join(args.lmdb_path, args.f_name + '.lmdb')\n    os.system('mkdir -p %s' % args.hdf5_path)\n    df_train, num_train = lmdb_dataflow(\n        lmdb_path=lmdb_file, batch_size=1, input_size=args.num_per_obj,\n        output_size=args.num_per_obj, is_training=False)\n\n    if args.partial:\n        print('Now we generate point cloud from partial observed objects.')\n\n    file_per_h5 = 2048 * 4  # of objects within each hdf5 file\n    data_gen = df_train.get_data()\n\n    idx = 0\n    data_np = np.zeros((file_per_h5, args.num_per_obj, 3))\n    label_np = np.zeros((file_per_h5,), dtype=np.int32)\n    ids_np = np.chararray((file_per_h5,), itemsize=32)\n\n    # convert label string to integers\n    hash_label = json.load(open('../data/shapenet_names.json'))\n    f_open = open(os.path.join(args.hdf5_path, '%s_file.txt' % args.f_name), 'a+')\n\n    for i in tqdm(range(num_train)):\n        '''each object has eight different views'''\n\n        ids, inputs, npts, gt = next(data_gen)\n        object_pc = inputs[0] if args.partial else gt[0]\n\n        if len(object_pc) != args.num_per_obj:\n            object_pc = fix2len(object_pc, args.num_per_obj)\n        if args.partial:\n            data_np[i % file_per_h5, :, :] = object_pc\n            label_np[i % file_per_h5] = int(hash_label[(ids[0].split('_')[0])])\n            ids_np[i % file_per_h5] = ids[0]  # .split('_')[1]\n\n        else:\n            if i % args.num_scan != 0:\n                continue\n            data_np[(i // args.num_scan) % file_per_h5, :, :] = object_pc\n            label_np[(i // args.num_scan) % file_per_h5] = int(hash_label[(ids[0].split('_')[0])])\n            ids_np[(i // args.num_scan) % file_per_h5] = ids[0].split('_')[1]\n\n        num_obj_ = i if args.partial else i // args.num_scan\n\n        if num_obj_ - idx * file_per_h5 >= file_per_h5:\n            h5_file = os.path.join(args.hdf5_path, '%s%d.h5' % (args.f_name, idx))\n            print('the last two objects coordinates, labels and ids:')\n            print(data_np[-2:])\n            print(label_np[-2:])\n            print(ids_np[-2:])\n            print('\\n')\n\n            hf = h5py.File(h5_file, 'w')\n            hf.create_dataset('data', data=data_np)\n            hf.create_dataset('label', data=label_np)\n            hf.create_dataset('id', data=ids_np)\n            hf.close()\n\n            f_open.writelines(h5_file.replace('../', './') + '\\n')\n            print('%s_%s.h5 has been saved' % (args.f_name, idx))\n            print('====================\\n\\n')\n            idx += 1\n\n    '''to deal with the remaining in the end'''\n    h5_file = os.path.join(args.hdf5_path, '%s%d.h5' % (args.f_name, idx))\n    hf = h5py.File(h5_file, 'w')\n\n    if args.partial:\n        label_res = label_np[:num_train % file_per_h5]\n        data_res = data_np[:num_train % file_per_h5]\n        id_res = ids_np[:num_train % file_per_h5]\n\n    else:\n        label_res = label_np[:(num_train // args.num_scan) % file_per_h5]\n        data_res = data_np[:(num_train // args.num_scan) % file_per_h5]\n        id_res = ids_np[:(num_train // args.num_scan) % file_per_h5]\n\n    print('the remaining  objects coordinates, labels and ids:')\n    print(data_res[-2:], '\\n', label_res[-2:], '\\n', id_res[-2:], '\\n\\n')\n\n    hf.create_dataset('label', data=label_res)\n    hf.create_dataset('data', data=data_res)\n    hf.create_dataset('id', data=id_res)\n    hf.close()\n    print('the last part has been saved into %s_%s.h5' % (args.f_name, idx))\n\n    f_open.writelines(h5_file.replace('../', './'))\n    f_open.close()\n\n    print('convert from lmdb to hdf5 has finished')\n"
  },
  {
    "path": "readme.md",
    "content": "## OcCo: Unsupervised Point Cloud Pre-training via Occlusion Completion\nThis repository is the official implementation of paper: \"Unsupervised Point Cloud Pre-training via Occlusion Completion\"\n\n[[Paper](https://arxiv.org/abs/2010.01089)] [[Project Page](https://hansen7.github.io/OcCo/)]\n\n### Intro\n\n![image](assets/teaser.png)\n\nIn this work, we train a completion model that learns how to reconstruct the occluded points, given the partial observations. In this way, our method learns a pre-trained encoder that can identify the visual constraints inherently embedded in real-world point clouds.\n\nWe call our method **Occlusion Completion (OcCo)**. We demonstrate that OcCo learns representations that: improve generalization on downstream tasks over prior pre-training methods, transfer to different datasets, reduce training time, and improve labeled sample efficiency.\n\n\n### Citation\nOur paper is preprinted on arxiv:\n\n```\n@inproceedings{OcCo,\n\ttitle = {Unsupervised Point Cloud Pre-Training via Occlusion Completion},\n\tauthor = {Hanchen Wang and Qi Liu and Xiangyu Yue and Joan Lasenby and Matthew J. Kusner},\n\tyear = 2021,\n\tbooktitle = {International Conference on Computer Vision, ICCV}\n}\n```\n\n### Usage\n\nWe provide codes in both PyTorch (1.3): <a href=\"OcCo_Torch\">OcCo_Torch</a> and TensorFlow (1.13-1.15): <a href=\"OcCo_TF\">OcCo_TF</a>. We also provide with docker configuration <a href=\"OcCo_Torch/docker\">docker</a>. Our recommended development environment PyTorch + docker, the following descriptions are based on  <a href=\"OcCo_Torch\">OcCo_Torch</a>, we refer the readme in the  <a href=\"OcCo_TF\">OcCo_TF</a> for the details of TensorFlow implementation.\n\n\n\n#### 1) Prerequisite\n\n##### Docker\n\nIn the <a href=\"OcCo_Torch/docker\">docker</a> folder, we provide the build, configuration and launch scripts:\n\n```\ndocker\n| - Dockerfile_Torch  # configuration\n| - build_docker_torch.sh  # scripts for building up from the docker images\n| - launch_docker_torch.sh  # launch from the built image\n| - .dockerignore  # ignore the log and data folder while building up \n```\n\nwhich can be automatically set up as following:\n\n```bash\n# build up from docker images\ncd OcCo_Torch/docker\nsh build_docker_torch.sh\n\n# launch the docker image, conduct completion/classification/segmentation experiments\ncd OcCo_Torch/docker\nsh launch_docker_torch.sh\n```\n\n##### Non-Docker Setup\n\nJust go with `pip install -r Requirements_Torch.txt` with the `PyTorch 1.3.0, CUDA 10.1, CUDNN 7`  (otherwise you may encounter errors while building the C++ extension <a href=\"OcCo_Torch/chamfer_distance\">chamfer_distance</a> for calculating the Chamfer Distance), my development environment besides docker is `Ubuntu 16.04.6 LTS, gcc/g++ 5.4.0, cuda10.1, CUDNN 7`.\n\n\n\n#### 2) Pre-Training via Occlusion Completion (OcCo)\n\n##### Data Usage:\n\nFor the details in the data setup, please see <a href=\"OcCo_Torch/data/readme.md\">data/readme.md</a>.\n\n##### Training Scripts:\n\nWe unify the training of all three models (`PointNet`, `PCN` and `DGCNN`) in <a href=\"OcCo_Torch/train_completion.py\">train_completion.py</a> as well as the bash templates, see <a href=\"OcCo_Torch/bash_template/train_completion_template.sh\">bash_template/train_completion_template.sh</a> for details:\n\n```bash\n#!/usr/bin/env bash\n\ncd ../\n\n# train pointnet-occo model on ModelNet, from scratch\npython train_completion.py \\\n\t--gpu 0,1 \\\n\t--dataset modelnet \\\n\t--model pointnet_occo \\\n\t--log_dir modelnet_pointnet_vanilla ;\n\n# train dgcnn-occo model on ShapeNet, from scratch\npython train_completion.py \\\n\t--gpu 0,1 \\\n\t--batch_size 16 \\\n\t--dataset shapenet \\\n\t--model dgcnn_occo \\\n\t--log_dir shapenet_dgcnn_vanilla ;\n```\n\n##### Pre-Trained Weights\n\nWe will provide the OcCo pre-trained models for all the three models [here](https://drive.google.com/drive/folders/15H1JH9oTfp_sVkj9nwgnThZHRI9ef2bT?usp=sharing), you can use them for visualization of completing self-occluded point cloud, fine tuning on classification, scene semantic and object part segmentation tasks.\n\n\n\n#### 3) Sanity Check on Pre-Training\n\nWe use single channel values as well as the t-SNE for dimensionality reduction to visualize the learned object embeddings on objects from the ShapeNet10, while the encoders are pre-trained on the ModelNet40 dataset, see <a href=\"OcCo_Torch/utils/TSNE_Visu.py\">utils/TSNE_Visu.py</a> for details.\n\nWe also train a Support Vector Machine (SVM) based on the learned embeddings object recognition. It is in <a href=\"OcCo_Torch/train_svm.py\">train_svm.py</a>. We also provide the bash template for this, see <a href=\"OcCo_Torch/bash_template/train_svm_template.sh\">bash_template/train_svm_template.sh</a> for details:\n\n```bash\n#!/usr/bin/env bash\n\ncd ../\n\n# fit a simple linear SVM on ModelNet40 with OcCo PCN\npython train_svm.py \\\n\t--gpu 0 \\\n\t--model pcn_util \\\n\t--dataset modelnet40 \\\n\t--restore_path log/completion/modelnet_pcn_vanilla/checkpoints/best_model.pth ;\n\n# grid search the best svm parameters with rbf kernel on ScanObjectNN(OBJ_BG) with OcCo DGCNN\npython train_svm.py \\\n\t--gpu 0 \\\n\t--grid_search \\\n\t--batch_size 8 \\\n\t--model dgcnn_util \\\n\t--dataset scanobjectnn \\\n\t--bn \\\n\t--restore_path log/completion/modelnet_dgcnn_vanilla/checkpoints/best_model.pth ;\n```\n\n\n\n#### 4) Fine Tuning Task - Classification\n\n##### Data Usage:\n\nFor the details in the data setup, please see <a href=\"OcCo_Torch/data/readme.md\">data/readme.md</a>.\n\n##### Training/Testing Scripts:\n\nWe unify the training and testing of all three models (`PointNet`, `PCN` and `DGCNN`) in <a href=\"OcCo_Torch/train_cls.py\">train_cls.py</a>. We also provide the bash template for training each models from scratch, JigSaw/OcCo pre-trained checkpoints, see <a href=\"OcCo_Torch/bash_template/train_cls_template.sh\">bash_template/train_cls_template.sh</a> for details:\n\n```bash\n#!/usr/bin/env bash\n\ncd ../\n\n# training pointnet on ModelNet40, from scratch\npython train_cls.py \\\n\t--gpu 0 \\\n\t--model pointnet_cls \\\n\t--dataset modelnet40 \\\n\t--log_dir modelnet40_pointnet_scratch ;\n\n# fine tuning pcn on ScanNet10, using jigsaw pre-trained checkpoints\npython train_cls.py \\\n\t--gpu 0 \\\n\t--model pcn_cls \\\n\t--dataset scannet10 \\\n\t--log_dir scannet10_pcn_jigsaw \\\n\t--restore \\\n\t--restore_path log/completion/modelnet_pcn_vanilla/checkpoints/best_model.pth ;\n\n# fine tuning dgcnn on ScanObjectNN(OBJ_BG), using jigsaw pre-trained checkpoints\npython train_cls.py \\\n\t--gpu 0,1 \\\n\t--epoch 250 \\\n\t--use_sgd \\\n\t--scheduler cos \\\n\t--model dgcnn_cls \\\n\t--dataset scanobjectnn \\\n\t--bn \\\n\t--log_dir scanobjectnn_dgcnn_occo \\\n\t--restore \\\n\t--restore_path log/completion/modelnet_dgcnn_vanilla/checkpoints/best_model.pth ;\n\n# test pointnet on ModelNet40 from pre-trained checkpoints\npython train_cls.py \\\n\t--gpu 1 \\\n\t--mode test \\\n\t--model pointnet_cls \\\n\t--dataset modelnet40 \\\n\t--log_dir modelnet40_pointnet_scratch \\\n\t--restore \\\n\t--restore_path log/cls/modelnet40_pointnet_scratch/checkpoints/best_model.pth ;\n```\n\n\n\n#### 5) Fine Tuning Task - Semantic Segmentation\n\n##### Data Usage:\n\nFor the details in the data setup, please see <a href=\"OcCo_Torch/data/readme.md\">data/readme.md</a>.\n\n##### Training/Testing Scripts:\n\nWe unify the training and testing of all three models (PointNet, PCN and DGCNN) in <a href=\"OcCo_Torch/train_semseg.py\">train_semseg.py</a>. We also provide the bash template for training each models from scratch, JigSaw/OcCo pre-trained checkpoints, see <a href=\"OcCo_Torch/bash_template/train_semseg_template.sh\">bash_template/train_semseg_template.sh</a> for details:\n\n```bash\n#!/usr/bin/env bash\n\ncd ../\n\n# train pointnet_semseg on 6-fold cv of S3DIS, from scratch\nfor area in $(seq 1 1 6)\ndo\npython train_semseg.py \\\n\t--gpu 0,1 \\\n\t--model pointnet_semseg \\\n\t--bn_decay \\\n\t--xavier_init \\\n\t--test_area ${area} \\\n\t--scheduler step \\\n\t--log_dir pointnet_area${area}_scratch ;\ndone\n\n# fine tune pcn_semseg on 6-fold cv of S3DIS, using jigsaw pre-trained weights\nfor area in $(seq 1 1 6)\ndo\npython train_semseg.py \\\n\t--gpu 0,1 \\\n\t--model pcn_semseg \\\n\t--bn_decay \\\n\t--test_area ${area} \\\n\t--log_dir pcn_area${area}_jigsaw \\\n\t--restore \\\n\t--restore_path log/jigsaw/modelnet_pcn_vanilla/checkpoints/best_model.pth ;\ndone\n\n# fine tune dgcnn_semseg on 6-fold cv of S3DIS, using occo pre-trained weights\nfor area in $(seq 1 1 6)\ndo\npython train_semseg.py \\\n\t--gpu 0,1 \\\n\t--test_area ${area} \\\n\t--optimizer sgd \\\n\t--scheduler cos \\\n\t--model dgcnn_semseg \\\n\t--log_dir dgcnn_area${area}_occo \\\n\t--restore \\\n\t--restore_path log/completion/modelnet_dgcnn_vanilla/checkpoints/best_model.pth ;\ndone\n\n# test pointnet_semseg on 6-fold cv of S3DIS, from saved checkpoints\nfor area in $(seq 1 1 6)\ndo\npython train_semseg.py \\\n\t--gpu 0,1 \\\n\t--mode test \\\n\t--model pointnet_semseg \\\n\t--test_area ${area} \\\n\t--scheduler step \\\n\t--log_dir pointnet_area${area}_scratch \\\n\t--restore \\\n\t--restore_path log/semseg/pointnet_area${area}_scratch/checkpoints/best_model.pth ;\ndone\n```\n\n\n\n##### Visualization:\n\nWe recommended using relevant code snippets in [RandLA-Net](https://github.com/QingyongHu/RandLA-Net) for visualization.\n\n\n\n#### 6) Fine Tuning Task - Part Segmentation\n\n##### Data Usage:\n\nFor the details in the data setup, please see <a href=\"OcCo_Torch/data/readme.md\">data/readme.md</a>.\n\n##### Training/Testing Scripts:\n\nWe unify the training and testing of all three models (PointNet, PCN and DGCNN) in <a href=\"OcCo_Torch/train_partseg.py\">train_partseg.py</a>. We also provide the bash template for training each models from scratch, JigSaw/OcCo pre-trained checkpoints, see <a href=\"OcCo_Torch/bash_template/train_partseg_template.sh\">bash_template/train_partseg_template.sh</a> for details:\n\n```bash\n#!/usr/bin/env bash\n\ncd ../\n\n# training pointnet on ShapeNetPart, from scratch\npython train_partseg.py \\\n\t--gpu 0 \\\n\t--normal \\\n\t--bn_decay \\\n\t--xavier_init \\\n\t--model pointnet_partseg \\\n    --log_dir pointnet_scratch ;\n\n\n# fine tuning pcn on ShapeNetPart, using jigsaw pre-trained checkpoints\npython train_partseg.py \\\n\t--gpu 0 \\\n\t--normal \\\n\t--bn_decay \\\n\t--xavier_init \\\n\t--model pcn_partseg \\\n\t--log_dir pcn_jigsaw \\\n\t--restore \\\n\t--restore_path log/jigsaw/modelnet_pcn_vanilla/checkpoints/best_model.pth ;\n\n\n# fine tuning dgcnn on ShapeNetPart, using occo pre-trained checkpoints\npython train_partseg.py \\\n\t--gpu 0,1 \\\n\t--normal \\\n\t--use_sgd \\\n\t--xavier_init \\\n\t--scheduler cos \\\n\t--model dgcnn_partseg \\\n\t--log_dir dgcnn_occo \\\n\t--restore \\\n\t--restore_path log/completion/modelnet_dgcnn_vanilla/checkpoints/best_model.pth ;\n\n\n# test fine tuned pointnet on ShapeNetPart, using multiple votes\npython train_partseg.py \\\n\t--gpu 1 \\\n\t--epoch 1 \\\n\t--mode test \\\n\t--num_votes 3 \\\n\t--model pointnet_partseg \\\n\t--log_dir pointnet_scratch \\\n\t--restore \\\n\t--restore_path log/partseg/pointnet_occo/checkpoints/best_model.pth ;\n```\n\n\n\n#### 6) OcCo Data Generation (Create Your Own Dataset for OcCo Pre-Training)\n\nFor the details in the self-occluded point cloud generation, please see <a href=\"render/readme.md\">render/readme.md</a>.\n\n\n\n#### 7) Just Completion (Complete Your Own Data with Pre-Trained Model)\n\nYou can use it for completing your occluded point cloud data with our provided OcCo checkpoints.\n\n\n\n#### 8) Jigsaw Puzzle\n\nWe also provide our implementation (developed from scratch) on pre-training point cloud models via solving 3d jigsaw puzzles tasks as well as data generation, the method is described in this [paper](https://papers.nips.cc/paper/9455-self-supervised-deep-learning-on-point-clouds-by-reconstructing-space.pdf), while the authors did not reprocess to our code request. The details of our implementation is reported in our paper appendix.\n\nFor the details of our implementation, please refer to description in the appendix of our paper and relevant code snippets, i.e.,  <a href=\"OcCo_Torch/train_jigsaw.py\">train_jigsaw.py</a>,  <a href=\"OcCo_Torch/utils/3DPC_Data_Gen.py\">utils/3DPC_Data_Gen.py</a> and <a href=\"OcCo_Torch/bash_template/train_jigsaw_template.sh\">train_jigsaw_template.sh</a>.\n\n\n\n### Results\n\n##### Generated Dataset:\n\n<p align='center'>\n\t<img src=\"assets/data_overview_new.png\" alt=\"image\" class=\"center\" width=\"650\"/>\n</p>\n\n##### Completed Occluded Point Cloud:\n\n-- PointNet:\n\n<p align='center'>\n    <img src=\"assets/pointnet_combine.png\" alt=\"image\"/>\n</p>\n\n\n-- PCN:\n\n<p align='center'>\n    <img src=\"assets/pcn_combine.png\" alt=\"image\"/>\n</p>\n-- DGCNN:\n\n<p align='center'>\n    <img src=\"assets/dgcnn_combine.png\" alt=\"image\"/>\n</p>\n\n-- Failure Examples:\n\n<p align='center'>\n    <img src=\"assets/failure_combine.png\" alt=\"image\"/>\n</p>\n\n##### Visualization of learned features:\n\n<p align='center'>\n    <img src=\"assets/tsne.png\" alt=\"image\" class=\"center\" width=\"700\"/>\n</p>\n\n##### Classification (linear SVM):\n\n<p align='center'>\n\t<img src=\"assets/svm.png\" alt=\"image\" class=\"center\" width=\"700\"/>\n</p>\n\n\n##### Classification:\n\n<p align='center'>\n\t<img src=\"assets/cls.png\" alt=\"image\" class=\"center\" width=\"700\"/>\n</p>\n##### Semantic Segmentation:\n\n<p align='center'>\n\t<img src=\"assets/semseg.png\" alt=\"image\" class=\"center\" width=\"700\"/>\n</p>\n##### Part Segmentation:\n\n<p align='center'>\n\t<img src=\"assets/partseg.png\" alt=\"image\" class=\"center\" width=\"700\"/>\n</p>\n\n\n##### Sample Efficiency:\n\n<p align='center'>\n\t<img src=\"assets/eff.png\" alt=\"image\" class=\"center\" width=\"700\" />\n</p>\n\n\n##### Learning Efficiency:\n\n<p align='center'>\n\t<img src=\"assets/lr.png\" alt=\"image\" class=\"center\" width=\"700\" />\n</p>\n\nFor the description and discussion of the results, please refer to our paper, thanks :)\n\n\n\n### Contributing\n\nThe code of this project is released under the MIT License. \n\nWe would like to thank and acknowledge referenced codes from the following repositories:\n\nhttps://github.com/wentaoyuan/pcn\n\nhttps://github.com/hansen7/NRS_3D\n\nhttps://github.com/WangYueFt/dgcnn\n\nhttps://github.com/charlesq34/pointnet\n\nhttps://github.com/charlesq34/pointnet2\n\nhttps://github.com/PointCloudLibrary/pcl\n\nhttps://github.com/AnTao97/dgcnn.pytorch\n\nhttps://github.com/HuguesTHOMAS/KPConv\n\nhttps://github.com/QingyongHu/RandLA-Net\n\nhttps://github.com/chrdiller/pyTorchChamferDistance\n\nhttps://github.com/yanx27/Pointnet_Pointnet2_pytorch\n\nhttps://github.com/AnTao97/UnsupervisedPointCloudReconstruction\n\nWe appreciate the help from the supportive technicians, Peter and Raf, from Cambridge Engineering :)\n"
  },
  {
    "path": "render/Depth_Renderer.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/wentaoyuan/pcn/blob/master/render/render_depth.py\n# Usage: blender -b -P Depth_Renderer.py [ShapeNet directory] [model list] [output directory] [num scans per model]\n\nimport os, sys, bpy, time, mathutils, numpy as np\n\n\ndef random_pose():\n    \"\"\"generate a random camera pose\"\"\"\n    angle_x = np.random.uniform() * 2 * np.pi\n    angle_y = np.random.uniform() * 2 * np.pi\n    angle_z = np.random.uniform() * 2 * np.pi\n    Rx = np.array([[1, 0, 0],\n                   [0, np.cos(angle_x), -np.sin(angle_x)],\n                   [0, np.sin(angle_x), np.cos(angle_x)]])\n    Ry = np.array([[np.cos(angle_y), 0, np.sin(angle_y)],\n                   [0, 1, 0],\n                   [-np.sin(angle_y), 0, np.cos(angle_y)]])\n    Rz = np.array([[np.cos(angle_z), -np.sin(angle_z), 0],\n                   [np.sin(angle_z), np.cos(angle_z), 0],\n                   [0, 0, 1]])\n    R = np.dot(Rz, np.dot(Ry, Rx))\n    # a rotation matrix with arbitrarily chosen yaw, pitch, roll\n    # Set camera pointing to the origin and 1 unit away from the origin\n    t = np.expand_dims(R[:, 2], 1)  # select the third column, reshape into (3, 1)-vector\n\n    # pose -> 4 * 4\n    pose = np.concatenate([np.concatenate([R, t], 1), [[0, 0, 0, 1]]], 0)\n    return pose\n\n\ndef setup_blender(width, height, focal_length):\n    \"\"\"using blender to rendering a scene\"\"\"\n    # camera, a class in the bpy\n    camera = bpy.data.objects['Camera']\n    camera.data.angle = np.arctan(width / 2 / focal_length) * 2\n\n    # render layer\n    scene = bpy.context.scene\n    scene.render.filepath = 'buffer'\n    scene.render.image_settings.color_depth = '16'\n    scene.render.resolution_percentage = 100\n    scene.render.resolution_x = width\n    scene.render.resolution_y = height\n\n    # compositor nodes\n    scene.use_nodes = True\n    tree = scene.node_tree\n    rl = tree.nodes.new('CompositorNodeRLayers')\n    output = tree.nodes.new('CompositorNodeOutputFile')\n    output.base_path = ''\n    output.format.file_format = 'OPEN_EXR'\n    # tree.links.new(rl.outputs['Depth'], output.inputs[0])\n    tree.links.new(rl.outputs['Z'], output.inputs[0])\n    # ref: https://github.com/panmari/stanford-shapenet-renderer/issues/8\n\n    # remove default cube\n    bpy.data.objects['Cube'].select = True\n    bpy.ops.object.delete()\n\n    return scene, camera, output\n\n\nif __name__ == '__main__':\n    # Usage: blender -b -P Depth_Renderer.py [ShapeNet directory] [model list] [output directory] [num scans per model]\n    model_dir = sys.argv[-4]\n    list_path = sys.argv[-3]\n    output_dir = sys.argv[-2]\n    num_scans = int(sys.argv[-1])\n\n    '''Generate Intrinsic Camera Matrix'''\n    # High Resolution: width = 1600,\n    # Middle Resolution: width = 1600//4,\n    # Coarse Resolution: width = 1600//10,\n\n    width = 1600//4\n    height = 1200//4\n    focal = 1000//4\n    scene, camera, output = setup_blender(width, height, focal)\n    # offset is the center of images, the unit of focal here is the pixels(on the image)\n    intrinsics = np.array([[focal, 0, width / 2], [0, focal, height / 2], [0, 0, 1]])\n\n    # os.system('rm -rf %s' % output_dir)\n    if not os.path.exists(output_dir):\n        os.makedirs(output_dir)\n    with open(os.path.join(list_path)) as file:\n        model_list = [line.strip() for line in file]\n    open(os.path.join(output_dir, 'blender.log'), 'w+').close()\n    np.savetxt(os.path.join(output_dir, 'intrinsics.txt'), intrinsics, '%f')\n    # camera-referenced system\n\n    num_total_f = len(model_list)\n\n    start = time.time()\n    '''rendering from the mesh to 2.5D depth images'''\n    for idx, model_id in enumerate(model_list):\n        # start = time.time()\n        exr_dir = os.path.join(output_dir, 'exr', model_id)\n        pose_dir = os.path.join(output_dir, 'pose', model_id)\n        os.makedirs(exr_dir)\n        os.makedirs(pose_dir)\n        # os.removedirs(exr_dir)\n        # os.removedirs(pose_dir)\n\n        # Redirect output to log file\n        old_os_out = os.dup(1)\n        os.close(1)\n        os.open(os.path.join(output_dir, 'blender.log'), os.O_WRONLY)\n\n        # Import mesh model\n        # model_path = os.path.join(model_dir, model_id, 'models/model_normalized.obj')\n        # bpy.ops.import_scene.obj(filepath=model_path)\n\n        model_path = os.path.join(model_dir, model_id + '.obj')\n        bpy.ops.import_scene.obj(filepath=model_path)\n\n        # Rotate model by 90 degrees around x-axis (z-up => y-up) to match ShapeNet's coordinates\n        bpy.ops.transform.rotate(value=-np.pi / 2, axis=(1, 0, 0))\n\n        # Render\n        for i in range(num_scans):\n            scene.frame_set(i)\n            pose = random_pose()\n            camera.matrix_world = mathutils.Matrix(pose)\n            # output.file_slots[0].path = os.path.join(exr_dir, '#.exr')\n            output.file_slots[0].path = exr_dir + '_#.exr'\n            bpy.ops.render.render(write_still=True)\n            # np.savetxt(os.path.join(pose_dir, '%d.txt' % i), pose, '%f')\n            np.savetxt(pose_dir + '_%d.txt' % i, pose, '%f')\n\n        # Clean up\n        bpy.ops.object.delete()\n        for m in bpy.data.meshes:\n            bpy.data.meshes.remove(m)\n        for m in bpy.data.materials:\n            m.user_clear()\n            bpy.data.materials.remove(m)\n\n        # Print used time\n        os.close(1)\n        os.dup(old_os_out)\n        os.close(old_os_out)\n        print('%d/%d: %s done, time=%.4f sec' % (idx + 1, num_total_f, model_id, time.time() - start))\n        os.removedirs(exr_dir)\n        os.removedirs(pose_dir)\n"
  },
  {
    "path": "render/EXR_Process.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n#  Ref: https://github.com/wentaoyuan/pcn/blob/master/render/process_exr.py\n\nimport os, array, Imath, OpenEXR, argparse, numpy as np, matplotlib.pyplot as plt\nfrom open3d.open3d.geometry import PointCloud\nfrom open3d.open3d.utility import Vector3dVector\nfrom open3d.open3d.io import write_point_cloud\nfrom tqdm import tqdm\n\n\ndef read_exr(exr_path, height, width):\n    \"\"\"from EXR files to extract depth information\"\"\"\n    file = OpenEXR.InputFile(exr_path)\n    depth_arr = array.array('f', file.channel('R', Imath.PixelType(Imath.PixelType.FLOAT)))\n    depth = np.array(depth_arr).reshape((height, width))\n    depth[depth < 0] = 0\n    depth[np.isinf(depth)] = 0\n    return depth\n\n\ndef depth2pcd(depth, intrinsics, pose):\n    \"\"\"backproject to points cloud from 2.5D depth images\"\"\"\n    inv_K = np.linalg.inv(intrinsics)\n    inv_K[2, 2] = -1\n    depth = np.flipud(depth)  # upside-down\n\n    y, x = np.where(depth > 0)\n    # image coordinates -> camera coordinates\n    points = np.dot(inv_K, np.stack([x, y, np.ones_like(x)] * depth[y, x], 0))\n    # camera coordinates -> world coordinates\n    points = np.dot(pose, np.concatenate([points, np.ones((1, points.shape[1]))], 0)).T[:, :3]\n    return points\n\n\nif __name__ == '__main__':\n    parser = argparse.ArgumentParser()\n    parser.add_argument('--list_file', default=r'./ModelNet_Flist.txt')\n    parser.add_argument('--intrinsics', default=r'./intrinsics.txt')\n    parser.add_argument('--output_dir', default=r'./dump')\n    parser.add_argument('--num_scans', default=10)\n    args = parser.parse_args()\n\n    with open(args.list_file) as file:\n        model_list = file.read().splitlines()\n    intrinsics = np.loadtxt(args.intrinsics_file)\n    width = int(intrinsics[0, 2] * 2)\n    height = int(intrinsics[1, 2] * 2)\n\n    counter = 0\n\n    for model_id in tqdm(model_list):\n        depth_dir = os.path.join(args.output_dir, 'depth')\n        pcd_dir = os.path.join(args.output_dir, 'pcd', model_id)\n        os.makedirs(depth_dir, exist_ok=True)\n        os.makedirs(pcd_dir, exist_ok=True)\n        for i in range(args.num_scans):\n            counter += 1\n\n            exr_path = os.path.join(args.output_dir, 'exr', model_id + '_%d.exr' % i)\n            pose_path = os.path.join(args.output_dir, 'pose', model_id + '_%d.txt' % i)\n            depth_path = os.path.join(args.output_dir, 'depth', model_id + '_%d.npy' % i)\n            depth = read_exr(exr_path, height, width)\n            np.save(depth_path, np.array(depth))\n            # depth_img = Image(np.uint16(depth * 1000))\n            # write_image(os.path.join(depth_dir, '%s_%d.png' % (model_id,  i)), depth_img)\n\n            if counter % 1 == 0:\n                counter = 1\n                plt.figure(figsize=(16, 10))\n                plt.imshow(np.array(depth), cmap='inferno')\n                plt.colorbar(label='Normalised Distance to Camera')\n                plt.title('Depth image')\n                plt.xlabel('X Pixel')\n                plt.ylabel('Y Pixel')\n                plt.tight_layout()\n                plt.savefig(os.path.join(depth_dir, model_id.split('/')[-1] + '_%d.png' % i), dpi=200)\n                plt.close()\n\n            pose = np.loadtxt(pose_path)\n            points = depth2pcd(depth, intrinsics, pose)\n            try:\n                normalised_points = points/((points**2).sum(axis=1).max())\n                pcd = PointCloud()\n\n            except:\n                print('there is an exception in the partial normalised process: ', model_id, i)\n\n            # if there is something wrong with the normalisation process, it will automatically save\n            # the previous normalised point cloud for the current objects\n            # pcd.points = Vector3dVector(normalised_points)\n\n            pcd.points = Vector3dVector(points)\n            write_point_cloud(pcd_dir + '_%d.pcd' % i, pcd)\n\n        # os.removedirs(depth_dir)\n        os.removedirs(pcd_dir)\n"
  },
  {
    "path": "render/ModelNet_Flist.txt",
    "content": "mantel/train/mantel_0029_normalised\nmantel/train/mantel_0037_normalised\nmantel/train/mantel_0210_normalised\nmantel/train/mantel_0284_normalised\nmantel/train/mantel_0241_normalised\nmantel/train/mantel_0208_normalised\nmantel/train/mantel_0202_normalised\nmantel/train/mantel_0039_normalised\nmantel/train/mantel_0006_normalised\nmantel/train/mantel_0252_normalised\nmantel/train/mantel_0074_normalised\nmantel/train/mantel_0015_normalised\nmantel/train/mantel_0267_normalised\nmantel/train/mantel_0181_normalised\nmantel/train/mantel_0212_normalised\nmantel/train/mantel_0004_normalised\nmantel/train/mantel_0114_normalised\nmantel/train/mantel_0155_normalised\nmantel/train/mantel_0274_normalised\nmantel/train/mantel_0223_normalised\nmantel/train/mantel_0135_normalised\nmantel/train/mantel_0083_normalised\nmantel/train/mantel_0028_normalised\nmantel/train/mantel_0229_normalised\nmantel/train/mantel_0185_normalised\nmantel/train/mantel_0234_normalised\nmantel/train/mantel_0244_normalised\nmantel/train/mantel_0151_normalised\nmantel/train/mantel_0128_normalised\nmantel/train/mantel_0113_normalised\nmantel/train/mantel_0157_normalised\nmantel/train/mantel_0044_normalised\nmantel/train/mantel_0097_normalised\nmantel/train/mantel_0048_normalised\nmantel/train/mantel_0127_normalised\nmantel/train/mantel_0054_normalised\nmantel/train/mantel_0136_normalised\nmantel/train/mantel_0250_normalised\nmantel/train/mantel_0279_normalised\nmantel/train/mantel_0012_normalised\nmantel/train/mantel_0261_normalised\nmantel/train/mantel_0056_normalised\nmantel/train/mantel_0163_normalised\nmantel/train/mantel_0175_normalised\nmantel/train/mantel_0154_normalised\nmantel/train/mantel_0152_normalised\nmantel/train/mantel_0222_normalised\nmantel/train/mantel_0003_normalised\nmantel/train/mantel_0017_normalised\nmantel/train/mantel_0071_normalised\nmantel/train/mantel_0139_normalised\nmantel/train/mantel_0211_normalised\nmantel/train/mantel_0068_normalised\nmantel/train/mantel_0230_normalised\nmantel/train/mantel_0183_normalised\nmantel/train/mantel_0069_normalised\nmantel/train/mantel_0180_normalised\nmantel/train/mantel_0059_normalised\nmantel/train/mantel_0182_normalised\nmantel/train/mantel_0138_normalised\nmantel/train/mantel_0132_normalised\nmantel/train/mantel_0087_normalised\nmantel/train/mantel_0032_normalised\nmantel/train/mantel_0100_normalised\nmantel/train/mantel_0049_normalised\nmantel/train/mantel_0115_normalised\nmantel/train/mantel_0177_normalised\nmantel/train/mantel_0111_normalised\nmantel/train/mantel_0118_normalised\nmantel/train/mantel_0081_normalised\nmantel/train/mantel_0067_normalised\nmantel/train/mantel_0107_normalised\nmantel/train/mantel_0153_normalised\nmantel/train/mantel_0243_normalised\nmantel/train/mantel_0103_normalised\nmantel/train/mantel_0207_normalised\nmantel/train/mantel_0131_normalised\nmantel/train/mantel_0173_normalised\nmantel/train/mantel_0036_normalised\nmantel/train/mantel_0001_normalised\nmantel/train/mantel_0174_normalised\nmantel/train/mantel_0248_normalised\nmantel/train/mantel_0188_normalised\nmantel/train/mantel_0079_normalised\nmantel/train/mantel_0200_normalised\nmantel/train/mantel_0240_normalised\nmantel/train/mantel_0228_normalised\nmantel/train/mantel_0171_normalised\nmantel/train/mantel_0193_normalised\nmantel/train/mantel_0213_normalised\nmantel/train/mantel_0220_normalised\nmantel/train/mantel_0053_normalised\nmantel/train/mantel_0122_normalised\nmantel/train/mantel_0051_normalised\nmantel/train/mantel_0046_normalised\nmantel/train/mantel_0216_normalised\nmantel/train/mantel_0224_normalised\nmantel/train/mantel_0101_normalised\nmantel/train/mantel_0253_normalised\nmantel/train/mantel_0278_normalised\nmantel/train/mantel_0055_normalised\nmantel/train/mantel_0194_normalised\nmantel/train/mantel_0242_normalised\nmantel/train/mantel_0260_normalised\nmantel/train/mantel_0206_normalised\nmantel/train/mantel_0257_normalised\nmantel/train/mantel_0050_normalised\nmantel/train/mantel_0172_normalised\nmantel/train/mantel_0130_normalised\nmantel/train/mantel_0057_normalised\nmantel/train/mantel_0007_normalised\nmantel/train/mantel_0259_normalised\nmantel/train/mantel_0245_normalised\nmantel/train/mantel_0145_normalised\nmantel/train/mantel_0099_normalised\nmantel/train/mantel_0226_normalised\nmantel/train/mantel_0108_normalised\nmantel/train/mantel_0179_normalised\nmantel/train/mantel_0043_normalised\nmantel/train/mantel_0065_normalised\nmantel/train/mantel_0110_normalised\nmantel/train/mantel_0192_normalised\nmantel/train/mantel_0156_normalised\nmantel/train/mantel_0196_normalised\nmantel/train/mantel_0124_normalised\nmantel/train/mantel_0137_normalised\nmantel/train/mantel_0021_normalised\nmantel/train/mantel_0013_normalised\nmantel/train/mantel_0016_normalised\nmantel/train/mantel_0060_normalised\nmantel/train/mantel_0249_normalised\nmantel/train/mantel_0121_normalised\nmantel/train/mantel_0197_normalised\nmantel/train/mantel_0275_normalised\nmantel/train/mantel_0277_normalised\nmantel/train/mantel_0010_normalised\nmantel/train/mantel_0143_normalised\nmantel/train/mantel_0041_normalised\nmantel/train/mantel_0063_normalised\nmantel/train/mantel_0201_normalised\nmantel/train/mantel_0062_normalised\nmantel/train/mantel_0042_normalised\nmantel/train/mantel_0119_normalised\nmantel/train/mantel_0027_normalised\nmantel/train/mantel_0026_normalised\nmantel/train/mantel_0178_normalised\nmantel/train/mantel_0129_normalised\nmantel/train/mantel_0120_normalised\nmantel/train/mantel_0160_normalised\nmantel/train/mantel_0203_normalised\nmantel/train/mantel_0058_normalised\nmantel/train/mantel_0025_normalised\nmantel/train/mantel_0272_normalised\nmantel/train/mantel_0251_normalised\nmantel/train/mantel_0162_normalised\nmantel/train/mantel_0147_normalised\nmantel/train/mantel_0247_normalised\nmantel/train/mantel_0077_normalised\nmantel/train/mantel_0022_normalised\nmantel/train/mantel_0190_normalised\nmantel/train/mantel_0011_normalised\nmantel/train/mantel_0024_normalised\nmantel/train/mantel_0076_normalised\nmantel/train/mantel_0187_normalised\nmantel/train/mantel_0283_normalised\nmantel/train/mantel_0085_normalised\nmantel/train/mantel_0094_normalised\nmantel/train/mantel_0064_normalised\nmantel/train/mantel_0199_normalised\nmantel/train/mantel_0106_normalised\nmantel/train/mantel_0084_normalised\nmantel/train/mantel_0019_normalised\nmantel/train/mantel_0080_normalised\nmantel/train/mantel_0186_normalised\nmantel/train/mantel_0133_normalised\nmantel/train/mantel_0070_normalised\nmantel/train/mantel_0263_normalised\nmantel/train/mantel_0045_normalised\nmantel/train/mantel_0142_normalised\nmantel/train/mantel_0170_normalised\nmantel/train/mantel_0198_normalised\nmantel/train/mantel_0165_normalised\nmantel/train/mantel_0093_normalised\nmantel/train/mantel_0146_normalised\nmantel/train/mantel_0235_normalised\nmantel/train/mantel_0237_normalised\nmantel/train/mantel_0112_normalised\nmantel/train/mantel_0219_normalised\nmantel/train/mantel_0169_normalised\nmantel/train/mantel_0281_normalised\nmantel/train/mantel_0141_normalised\nmantel/train/mantel_0109_normalised\nmantel/train/mantel_0030_normalised\nmantel/train/mantel_0218_normalised\nmantel/train/mantel_0276_normalised\nmantel/train/mantel_0238_normalised\nmantel/train/mantel_0092_normalised\nmantel/train/mantel_0023_normalised\nmantel/train/mantel_0227_normalised\nmantel/train/mantel_0159_normalised\nmantel/train/mantel_0005_normalised\nmantel/train/mantel_0232_normalised\nmantel/train/mantel_0255_normalised\nmantel/train/mantel_0282_normalised\nmantel/train/mantel_0018_normalised\nmantel/train/mantel_0038_normalised\nmantel/train/mantel_0280_normalised\nmantel/train/mantel_0231_normalised\nmantel/train/mantel_0268_normalised\nmantel/train/mantel_0221_normalised\nmantel/train/mantel_0040_normalised\nmantel/train/mantel_0166_normalised\nmantel/train/mantel_0265_normalised\nmantel/train/mantel_0184_normalised\nmantel/train/mantel_0073_normalised\nmantel/train/mantel_0066_normalised\nmantel/train/mantel_0140_normalised\nmantel/train/mantel_0176_normalised\nmantel/train/mantel_0236_normalised\nmantel/train/mantel_0264_normalised\nmantel/train/mantel_0191_normalised\nmantel/train/mantel_0266_normalised\nmantel/train/mantel_0104_normalised\nmantel/train/mantel_0271_normalised\nmantel/train/mantel_0089_normalised\nmantel/train/mantel_0189_normalised\nmantel/train/mantel_0150_normalised\nmantel/train/mantel_0033_normalised\nmantel/train/mantel_0158_normalised\nmantel/train/mantel_0075_normalised\nmantel/train/mantel_0144_normalised\nmantel/train/mantel_0078_normalised\nmantel/train/mantel_0034_normalised\nmantel/train/mantel_0125_normalised\nmantel/train/mantel_0215_normalised\nmantel/train/mantel_0082_normalised\nmantel/train/mantel_0273_normalised\nmantel/train/mantel_0047_normalised\nmantel/train/mantel_0009_normalised\nmantel/train/mantel_0086_normalised\nmantel/train/mantel_0262_normalised\nmantel/train/mantel_0002_normalised\nmantel/train/mantel_0052_normalised\nmantel/train/mantel_0091_normalised\nmantel/train/mantel_0088_normalised\nmantel/train/mantel_0204_normalised\nmantel/train/mantel_0225_normalised\nmantel/train/mantel_0134_normalised\nmantel/train/mantel_0126_normalised\nmantel/train/mantel_0031_normalised\nmantel/train/mantel_0246_normalised\nmantel/train/mantel_0008_normalised\nmantel/train/mantel_0148_normalised\nmantel/train/mantel_0217_normalised\nmantel/train/mantel_0205_normalised\nmantel/train/mantel_0102_normalised\nmantel/train/mantel_0072_normalised\nmantel/train/mantel_0149_normalised\nmantel/train/mantel_0105_normalised\nmantel/train/mantel_0090_normalised\nmantel/train/mantel_0168_normalised\nmantel/train/mantel_0239_normalised\nmantel/train/mantel_0269_normalised\nmantel/train/mantel_0014_normalised\nmantel/train/mantel_0161_normalised\nmantel/train/mantel_0096_normalised\nmantel/train/mantel_0117_normalised\nmantel/train/mantel_0061_normalised\nmantel/train/mantel_0258_normalised\nmantel/train/mantel_0256_normalised\nmantel/train/mantel_0123_normalised\nmantel/train/mantel_0164_normalised\nmantel/train/mantel_0095_normalised\nmantel/train/mantel_0214_normalised\nmantel/train/mantel_0167_normalised\nmantel/train/mantel_0254_normalised\nmantel/train/mantel_0209_normalised\nmantel/train/mantel_0020_normalised\nmantel/train/mantel_0116_normalised\nmantel/train/mantel_0035_normalised\nmantel/train/mantel_0195_normalised\nmantel/train/mantel_0270_normalised\nmantel/train/mantel_0098_normalised\nmantel/train/mantel_0233_normalised\nmantel/test/mantel_0336_normalised\nmantel/test/mantel_0290_normalised\nmantel/test/mantel_0285_normalised\nmantel/test/mantel_0346_normalised\nmantel/test/mantel_0358_normalised\nmantel/test/mantel_0339_normalised\nmantel/test/mantel_0337_normalised\nmantel/test/mantel_0303_normalised\nmantel/test/mantel_0371_normalised\nmantel/test/mantel_0342_normalised\nmantel/test/mantel_0354_normalised\nmantel/test/mantel_0310_normalised\nmantel/test/mantel_0348_normalised\nmantel/test/mantel_0316_normalised\nmantel/test/mantel_0345_normalised\nmantel/test/mantel_0326_normalised\nmantel/test/mantel_0318_normalised\nmantel/test/mantel_0364_normalised\nmantel/test/mantel_0309_normalised\nmantel/test/mantel_0287_normalised\nmantel/test/mantel_0300_normalised\nmantel/test/mantel_0296_normalised\nmantel/test/mantel_0352_normalised\nmantel/test/mantel_0320_normalised\nmantel/test/mantel_0294_normalised\nmantel/test/mantel_0297_normalised\nmantel/test/mantel_0363_normalised\nmantel/test/mantel_0332_normalised\nmantel/test/mantel_0341_normalised\nmantel/test/mantel_0302_normalised\nmantel/test/mantel_0323_normalised\nmantel/test/mantel_0292_normalised\nmantel/test/mantel_0305_normalised\nmantel/test/mantel_0286_normalised\nmantel/test/mantel_0291_normalised\nmantel/test/mantel_0353_normalised\nmantel/test/mantel_0334_normalised\nmantel/test/mantel_0373_normalised\nmantel/test/mantel_0382_normalised\nmantel/test/mantel_0370_normalised\nmantel/test/mantel_0333_normalised\nmantel/test/mantel_0331_normalised\nmantel/test/mantel_0375_normalised\nmantel/test/mantel_0338_normalised\nmantel/test/mantel_0301_normalised\nmantel/test/mantel_0325_normalised\nmantel/test/mantel_0374_normalised\nmantel/test/mantel_0369_normalised\nmantel/test/mantel_0367_normalised\nmantel/test/mantel_0384_normalised\nmantel/test/mantel_0335_normalised\nmantel/test/mantel_0299_normalised\nmantel/test/mantel_0365_normalised\nmantel/test/mantel_0314_normalised\nmantel/test/mantel_0308_normalised\nmantel/test/mantel_0307_normalised\nmantel/test/mantel_0313_normalised\nmantel/test/mantel_0315_normalised\nmantel/test/mantel_0343_normalised\nmantel/test/mantel_0368_normalised\nmantel/test/mantel_0360_normalised\nmantel/test/mantel_0324_normalised\nmantel/test/mantel_0383_normalised\nmantel/test/mantel_0340_normalised\nmantel/test/mantel_0361_normalised\nmantel/test/mantel_0351_normalised\nmantel/test/mantel_0322_normalised\nmantel/test/mantel_0372_normalised\nmantel/test/mantel_0349_normalised\nmantel/test/mantel_0357_normalised\nmantel/test/mantel_0312_normalised\nmantel/test/mantel_0329_normalised\nmantel/test/mantel_0379_normalised\nmantel/test/mantel_0295_normalised\nmantel/test/mantel_0311_normalised\nmantel/test/mantel_0347_normalised\nmantel/test/mantel_0304_normalised\nmantel/test/mantel_0377_normalised\nmantel/test/mantel_0319_normalised\nmantel/test/mantel_0330_normalised\nmantel/test/mantel_0317_normalised\nmantel/test/mantel_0288_normalised\nmantel/test/mantel_0350_normalised\nmantel/test/mantel_0328_normalised\nmantel/test/mantel_0366_normalised\nmantel/test/mantel_0344_normalised\nmantel/test/mantel_0381_normalised\nmantel/test/mantel_0376_normalised\nmantel/test/mantel_0321_normalised\nmantel/test/mantel_0293_normalised\nmantel/test/mantel_0362_normalised\nmantel/test/mantel_0306_normalised\nmantel/test/mantel_0356_normalised\nmantel/test/mantel_0327_normalised\nmantel/test/mantel_0380_normalised\nmantel/test/mantel_0378_normalised\nmantel/test/mantel_0359_normalised\nmantel/test/mantel_0355_normalised\nmantel/test/mantel_0289_normalised\nmantel/test/mantel_0298_normalised\nvase/train/vase_0198_normalised\nvase/train/vase_0104_normalised\nvase/train/vase_0377_normalised\nvase/train/vase_0064_normalised\nvase/train/vase_0405_normalised\nvase/train/vase_0328_normalised\nvase/train/vase_0384_normalised\nvase/train/vase_0299_normalised\nvase/train/vase_0375_normalised\nvase/train/vase_0036_normalised\nvase/train/vase_0286_normalised\nvase/train/vase_0325_normalised\nvase/train/vase_0181_normalised\nvase/train/vase_0318_normalised\nvase/train/vase_0100_normalised\nvase/train/vase_0461_normalised\nvase/train/vase_0334_normalised\nvase/train/vase_0057_normalised\nvase/train/vase_0374_normalised\nvase/train/vase_0469_normalised\nvase/train/vase_0454_normalised\nvase/train/vase_0090_normalised\nvase/train/vase_0209_normalised\nvase/train/vase_0452_normalised\nvase/train/vase_0330_normalised\nvase/train/vase_0457_normalised\nvase/train/vase_0004_normalised\nvase/train/vase_0133_normalised\nvase/train/vase_0431_normalised\nvase/train/vase_0031_normalised\nvase/train/vase_0037_normalised\nvase/train/vase_0034_normalised\nvase/train/vase_0308_normalised\nvase/train/vase_0368_normalised\nvase/train/vase_0144_normalised\nvase/train/vase_0310_normalised\nvase/train/vase_0053_normalised\nvase/train/vase_0391_normalised\nvase/train/vase_0143_normalised\nvase/train/vase_0432_normalised\nvase/train/vase_0108_normalised\nvase/train/vase_0091_normalised\nvase/train/vase_0386_normalised\nvase/train/vase_0438_normalised\nvase/train/vase_0361_normalised\nvase/train/vase_0217_normalised\nvase/train/vase_0351_normalised\nvase/train/vase_0146_normalised\nvase/train/vase_0092_normalised\nvase/train/vase_0124_normalised\nvase/train/vase_0178_normalised\nvase/train/vase_0215_normalised\nvase/train/vase_0032_normalised\nvase/train/vase_0392_normalised\nvase/train/vase_0060_normalised\nvase/train/vase_0383_normalised\nvase/train/vase_0376_normalised\nvase/train/vase_0131_normalised\nvase/train/vase_0419_normalised\nvase/train/vase_0190_normalised\nvase/train/vase_0281_normalised\nvase/train/vase_0398_normalised\nvase/train/vase_0362_normalised\nvase/train/vase_0357_normalised\nvase/train/vase_0358_normalised\nvase/train/vase_0474_normalised\nvase/train/vase_0331_normalised\nvase/train/vase_0121_normalised\nvase/train/vase_0232_normalised\nvase/train/vase_0022_normalised\nvase/train/vase_0068_normalised\nvase/train/vase_0157_normalised\nvase/train/vase_0088_normalised\nvase/train/vase_0338_normalised\nvase/train/vase_0254_normalised\nvase/train/vase_0188_normalised\nvase/train/vase_0109_normalised\nvase/train/vase_0071_normalised\nvase/train/vase_0026_normalised\nvase/train/vase_0235_normalised\nvase/train/vase_0213_normalised\nvase/train/vase_0344_normalised\nvase/train/vase_0139_normalised\nvase/train/vase_0055_normalised\nvase/train/vase_0447_normalised\nvase/train/vase_0283_normalised\nvase/train/vase_0291_normalised\nvase/train/vase_0424_normalised\nvase/train/vase_0007_normalised\nvase/train/vase_0009_normalised\nvase/train/vase_0025_normalised\nvase/train/vase_0394_normalised\nvase/train/vase_0095_normalised\nvase/train/vase_0161_normalised\nvase/train/vase_0065_normalised\nvase/train/vase_0173_normalised\nvase/train/vase_0113_normalised\nvase/train/vase_0341_normalised\nvase/train/vase_0002_normalised\nvase/train/vase_0409_normalised\nvase/train/vase_0335_normalised\nvase/train/vase_0048_normalised\nvase/train/vase_0446_normalised\nvase/train/vase_0155_normalised\nvase/train/vase_0411_normalised\nvase/train/vase_0142_normalised\nvase/train/vase_0029_normalised\nvase/train/vase_0140_normalised\nvase/train/vase_0244_normalised\nvase/train/vase_0003_normalised\nvase/train/vase_0249_normalised\nvase/train/vase_0099_normalised\nvase/train/vase_0404_normalised\nvase/train/vase_0159_normalised\nvase/train/vase_0132_normalised\nvase/train/vase_0280_normalised\nvase/train/vase_0129_normalised\nvase/train/vase_0225_normalised\nvase/train/vase_0282_normalised\nvase/train/vase_0214_normalised\nvase/train/vase_0207_normalised\nvase/train/vase_0073_normalised\nvase/train/vase_0083_normalised\nvase/train/vase_0464_normalised\nvase/train/vase_0472_normalised\nvase/train/vase_0201_normalised\nvase/train/vase_0303_normalised\nvase/train/vase_0006_normalised\nvase/train/vase_0082_normalised\nvase/train/vase_0302_normalised\nvase/train/vase_0089_normalised\nvase/train/vase_0018_normalised\nvase/train/vase_0439_normalised\nvase/train/vase_0059_normalised\nvase/train/vase_0112_normalised\nvase/train/vase_0203_normalised\nvase/train/vase_0301_normalised\nvase/train/vase_0345_normalised\nvase/train/vase_0183_normalised\nvase/train/vase_0047_normalised\nvase/train/vase_0240_normalised\nvase/train/vase_0196_normalised\nvase/train/vase_0321_normalised\nvase/train/vase_0125_normalised\nvase/train/vase_0243_normalised\nvase/train/vase_0237_normalised\nvase/train/vase_0016_normalised\nvase/train/vase_0193_normalised\nvase/train/vase_0260_normalised\nvase/train/vase_0304_normalised\nvase/train/vase_0288_normalised\nvase/train/vase_0128_normalised\nvase/train/vase_0256_normalised\nvase/train/vase_0365_normalised\nvase/train/vase_0150_normalised\nvase/train/vase_0422_normalised\nvase/train/vase_0255_normalised\nvase/train/vase_0218_normalised\nvase/train/vase_0040_normalised\nvase/train/vase_0312_normalised\nvase/train/vase_0176_normalised\nvase/train/vase_0185_normalised\nvase/train/vase_0168_normalised\nvase/train/vase_0182_normalised\nvase/train/vase_0242_normalised\nvase/train/vase_0363_normalised\nvase/train/vase_0019_normalised\nvase/train/vase_0426_normalised\nvase/train/vase_0101_normalised\nvase/train/vase_0410_normalised\nvase/train/vase_0115_normalised\nvase/train/vase_0028_normalised\nvase/train/vase_0013_normalised\nvase/train/vase_0466_normalised\nvase/train/vase_0440_normalised\nvase/train/vase_0075_normalised\nvase/train/vase_0450_normalised\nvase/train/vase_0154_normalised\nvase/train/vase_0292_normalised\nvase/train/vase_0094_normalised\nvase/train/vase_0449_normalised\nvase/train/vase_0023_normalised\nvase/train/vase_0030_normalised\nvase/train/vase_0223_normalised\nvase/train/vase_0056_normalised\nvase/train/vase_0274_normalised\nvase/train/vase_0180_normalised\nvase/train/vase_0211_normalised\nvase/train/vase_0074_normalised\nvase/train/vase_0306_normalised\nvase/train/vase_0273_normalised\nvase/train/vase_0259_normalised\nvase/train/vase_0020_normalised\nvase/train/vase_0387_normalised\nvase/train/vase_0241_normalised\nvase/train/vase_0434_normalised\nvase/train/vase_0420_normalised\nvase/train/vase_0169_normalised\nvase/train/vase_0197_normalised\nvase/train/vase_0443_normalised\nvase/train/vase_0250_normalised\nvase/train/vase_0228_normalised\nvase/train/vase_0370_normalised\nvase/train/vase_0134_normalised\nvase/train/vase_0277_normalised\nvase/train/vase_0289_normalised\nvase/train/vase_0378_normalised\nvase/train/vase_0295_normalised\nvase/train/vase_0135_normalised\nvase/train/vase_0152_normalised\nvase/train/vase_0336_normalised\nvase/train/vase_0216_normalised\nvase/train/vase_0402_normalised\nvase/train/vase_0147_normalised\nvase/train/vase_0296_normalised\nvase/train/vase_0118_normalised\nvase/train/vase_0268_normalised\nvase/train/vase_0267_normalised\nvase/train/vase_0078_normalised\nvase/train/vase_0421_normalised\nvase/train/vase_0165_normalised\nvase/train/vase_0266_normalised\nvase/train/vase_0110_normalised\nvase/train/vase_0427_normalised\nvase/train/vase_0340_normalised\nvase/train/vase_0212_normalised\nvase/train/vase_0046_normalised\nvase/train/vase_0204_normalised\nvase/train/vase_0412_normalised\nvase/train/vase_0024_normalised\nvase/train/vase_0261_normalised\nvase/train/vase_0322_normalised\nvase/train/vase_0263_normalised\nvase/train/vase_0167_normalised\nvase/train/vase_0415_normalised\nvase/train/vase_0317_normalised\nvase/train/vase_0395_normalised\nvase/train/vase_0151_normalised\nvase/train/vase_0067_normalised\nvase/train/vase_0236_normalised\nvase/train/vase_0248_normalised\nvase/train/vase_0414_normalised\nvase/train/vase_0311_normalised\nvase/train/vase_0435_normalised\nvase/train/vase_0364_normalised\nvase/train/vase_0195_normalised\nvase/train/vase_0329_normalised\nvase/train/vase_0130_normalised\nvase/train/vase_0367_normalised\nvase/train/vase_0272_normalised\nvase/train/vase_0061_normalised\nvase/train/vase_0170_normalised\nvase/train/vase_0298_normalised\nvase/train/vase_0284_normalised\nvase/train/vase_0307_normalised\nvase/train/vase_0471_normalised\nvase/train/vase_0077_normalised\nvase/train/vase_0390_normalised\nvase/train/vase_0111_normalised\nvase/train/vase_0175_normalised\nvase/train/vase_0371_normalised\nvase/train/vase_0136_normalised\nvase/train/vase_0238_normalised\nvase/train/vase_0072_normalised\nvase/train/vase_0166_normalised\nvase/train/vase_0049_normalised\nvase/train/vase_0062_normalised\nvase/train/vase_0418_normalised\nvase/train/vase_0008_normalised\nvase/train/vase_0403_normalised\nvase/train/vase_0275_normalised\nvase/train/vase_0231_normalised\nvase/train/vase_0187_normalised\nvase/train/vase_0206_normalised\nvase/train/vase_0285_normalised\nvase/train/vase_0085_normalised\nvase/train/vase_0096_normalised\nvase/train/vase_0429_normalised\nvase/train/vase_0246_normalised\nvase/train/vase_0337_normalised\nvase/train/vase_0448_normalised\nvase/train/vase_0356_normalised\nvase/train/vase_0359_normalised\nvase/train/vase_0010_normalised\nvase/train/vase_0380_normalised\nvase/train/vase_0117_normalised\nvase/train/vase_0262_normalised\nvase/train/vase_0202_normalised\nvase/train/vase_0051_normalised\nvase/train/vase_0093_normalised\nvase/train/vase_0222_normalised\nvase/train/vase_0313_normalised\nvase/train/vase_0189_normalised\nvase/train/vase_0360_normalised\nvase/train/vase_0297_normalised\nvase/train/vase_0171_normalised\nvase/train/vase_0014_normalised\nvase/train/vase_0320_normalised\nvase/train/vase_0253_normalised\nvase/train/vase_0270_normalised\nvase/train/vase_0063_normalised\nvase/train/vase_0191_normalised\nvase/train/vase_0366_normalised\nvase/train/vase_0417_normalised\nvase/train/vase_0208_normalised\nvase/train/vase_0126_normalised\nvase/train/vase_0323_normalised\nvase/train/vase_0005_normalised\nvase/train/vase_0430_normalised\nvase/train/vase_0388_normalised\nvase/train/vase_0379_normalised\nvase/train/vase_0423_normalised\nvase/train/vase_0319_normalised\nvase/train/vase_0470_normalised\nvase/train/vase_0247_normalised\nvase/train/vase_0199_normalised\nvase/train/vase_0086_normalised\nvase/train/vase_0389_normalised\nvase/train/vase_0343_normalised\nvase/train/vase_0251_normalised\nvase/train/vase_0327_normalised\nvase/train/vase_0070_normalised\nvase/train/vase_0107_normalised\nvase/train/vase_0456_normalised\nvase/train/vase_0172_normalised\nvase/train/vase_0407_normalised\nvase/train/vase_0038_normalised\nvase/train/vase_0332_normalised\nvase/train/vase_0138_normalised\nvase/train/vase_0473_normalised\nvase/train/vase_0265_normalised\nvase/train/vase_0179_normalised\nvase/train/vase_0084_normalised\nvase/train/vase_0314_normalised\nvase/train/vase_0373_normalised\nvase/train/vase_0347_normalised\nvase/train/vase_0052_normalised\nvase/train/vase_0044_normalised\nvase/train/vase_0210_normalised\nvase/train/vase_0354_normalised\nvase/train/vase_0369_normalised\nvase/train/vase_0050_normalised\nvase/train/vase_0186_normalised\nvase/train/vase_0425_normalised\nvase/train/vase_0163_normalised\nvase/train/vase_0433_normalised\nvase/train/vase_0252_normalised\nvase/train/vase_0413_normalised\nvase/train/vase_0416_normalised\nvase/train/vase_0160_normalised\nvase/train/vase_0290_normalised\nvase/train/vase_0475_normalised\nvase/train/vase_0227_normalised\nvase/train/vase_0042_normalised\nvase/train/vase_0045_normalised\nvase/train/vase_0396_normalised\nvase/train/vase_0069_normalised\nvase/train/vase_0462_normalised\nvase/train/vase_0385_normalised\nvase/train/vase_0239_normalised\nvase/train/vase_0453_normalised\nvase/train/vase_0279_normalised\nvase/train/vase_0399_normalised\nvase/train/vase_0401_normalised\nvase/train/vase_0164_normalised\nvase/train/vase_0041_normalised\nvase/train/vase_0153_normalised\nvase/train/vase_0120_normalised\nvase/train/vase_0226_normalised\nvase/train/vase_0428_normalised\nvase/train/vase_0350_normalised\nvase/train/vase_0123_normalised\nvase/train/vase_0349_normalised\nvase/train/vase_0406_normalised\nvase/train/vase_0097_normalised\nvase/train/vase_0141_normalised\nvase/train/vase_0076_normalised\nvase/train/vase_0293_normalised\nvase/train/vase_0194_normalised\nvase/train/vase_0393_normalised\nvase/train/vase_0033_normalised\nvase/train/vase_0264_normalised\nvase/train/vase_0114_normalised\nvase/train/vase_0105_normalised\nvase/train/vase_0116_normalised\nvase/train/vase_0400_normalised\nvase/train/vase_0397_normalised\nvase/train/vase_0408_normalised\nvase/train/vase_0200_normalised\nvase/train/vase_0381_normalised\nvase/train/vase_0087_normalised\nvase/train/vase_0445_normalised\nvase/train/vase_0122_normalised\nvase/train/vase_0305_normalised\nvase/train/vase_0355_normalised\nvase/train/vase_0230_normalised\nvase/train/vase_0326_normalised\nvase/train/vase_0027_normalised\nvase/train/vase_0278_normalised\nvase/train/vase_0177_normalised\nvase/train/vase_0333_normalised\nvase/train/vase_0234_normalised\nvase/train/vase_0066_normalised\nvase/train/vase_0309_normalised\nvase/train/vase_0058_normalised\nvase/train/vase_0459_normalised\nvase/train/vase_0352_normalised\nvase/train/vase_0017_normalised\nvase/train/vase_0012_normalised\nvase/train/vase_0316_normalised\nvase/train/vase_0441_normalised\nvase/train/vase_0346_normalised\nvase/train/vase_0148_normalised\nvase/train/vase_0184_normalised\nvase/train/vase_0382_normalised\nvase/train/vase_0081_normalised\nvase/train/vase_0276_normalised\nvase/train/vase_0103_normalised\nvase/train/vase_0451_normalised\nvase/train/vase_0271_normalised\nvase/train/vase_0098_normalised\nvase/train/vase_0463_normalised\nvase/train/vase_0220_normalised\nvase/train/vase_0233_normalised\nvase/train/vase_0149_normalised\nvase/train/vase_0035_normalised\nvase/train/vase_0245_normalised\nvase/train/vase_0043_normalised\nvase/train/vase_0158_normalised\nvase/train/vase_0015_normalised\nvase/train/vase_0080_normalised\nvase/train/vase_0054_normalised\nvase/train/vase_0039_normalised\nvase/train/vase_0455_normalised\nvase/train/vase_0442_normalised\nvase/train/vase_0221_normalised\nvase/train/vase_0339_normalised\nvase/train/vase_0294_normalised\nvase/train/vase_0145_normalised\nvase/train/vase_0079_normalised\nvase/train/vase_0342_normalised\nvase/train/vase_0219_normalised\nvase/train/vase_0372_normalised\nvase/train/vase_0300_normalised\nvase/train/vase_0162_normalised\nvase/train/vase_0436_normalised\nvase/train/vase_0001_normalised\nvase/train/vase_0437_normalised\nvase/train/vase_0106_normalised\nvase/train/vase_0205_normalised\nvase/train/vase_0021_normalised\nvase/train/vase_0315_normalised\nvase/train/vase_0324_normalised\nvase/train/vase_0258_normalised\nvase/train/vase_0119_normalised\nvase/train/vase_0174_normalised\nvase/train/vase_0127_normalised\nvase/train/vase_0353_normalised\nvase/train/vase_0460_normalised\nvase/train/vase_0444_normalised\nvase/train/vase_0458_normalised\nvase/train/vase_0465_normalised\nvase/train/vase_0229_normalised\nvase/train/vase_0287_normalised\nvase/train/vase_0257_normalised\nvase/train/vase_0156_normalised\nvase/train/vase_0467_normalised\nvase/train/vase_0011_normalised\nvase/train/vase_0269_normalised\nvase/train/vase_0102_normalised\nvase/train/vase_0224_normalised\nvase/train/vase_0348_normalised\nvase/train/vase_0137_normalised\nvase/train/vase_0468_normalised\nvase/train/vase_0192_normalised\nvase/test/vase_0522_normalised\nvase/test/vase_0498_normalised\nvase/test/vase_0536_normalised\nvase/test/vase_0532_normalised\nvase/test/vase_0495_normalised\nvase/test/vase_0548_normalised\nvase/test/vase_0551_normalised\nvase/test/vase_0557_normalised\nvase/test/vase_0567_normalised\nvase/test/vase_0496_normalised\nvase/test/vase_0483_normalised\nvase/test/vase_0513_normalised\nvase/test/vase_0499_normalised\nvase/test/vase_0506_normalised\nvase/test/vase_0540_normalised\nvase/test/vase_0528_normalised\nvase/test/vase_0542_normalised\nvase/test/vase_0569_normalised\nvase/test/vase_0534_normalised\nvase/test/vase_0480_normalised\nvase/test/vase_0510_normalised\nvase/test/vase_0556_normalised\nvase/test/vase_0554_normalised\nvase/test/vase_0501_normalised\nvase/test/vase_0508_normalised\nvase/test/vase_0549_normalised\nvase/test/vase_0547_normalised\nvase/test/vase_0544_normalised\nvase/test/vase_0486_normalised\nvase/test/vase_0565_normalised\nvase/test/vase_0538_normalised\nvase/test/vase_0491_normalised\nvase/test/vase_0481_normalised\nvase/test/vase_0482_normalised\nvase/test/vase_0509_normalised\nvase/test/vase_0511_normalised\nvase/test/vase_0559_normalised\nvase/test/vase_0535_normalised\nvase/test/vase_0516_normalised\nvase/test/vase_0537_normalised\nvase/test/vase_0489_normalised\nvase/test/vase_0566_normalised\nvase/test/vase_0525_normalised\nvase/test/vase_0493_normalised\nvase/test/vase_0477_normalised\nvase/test/vase_0572_normalised\nvase/test/vase_0505_normalised\nvase/test/vase_0517_normalised\nvase/test/vase_0520_normalised\nvase/test/vase_0497_normalised\nvase/test/vase_0503_normalised\nvase/test/vase_0539_normalised\nvase/test/vase_0545_normalised\nvase/test/vase_0563_normalised\nvase/test/vase_0543_normalised\nvase/test/vase_0530_normalised\nvase/test/vase_0526_normalised\nvase/test/vase_0523_normalised\nvase/test/vase_0524_normalised\nvase/test/vase_0570_normalised\nvase/test/vase_0529_normalised\nvase/test/vase_0560_normalised\nvase/test/vase_0507_normalised\nvase/test/vase_0558_normalised\nvase/test/vase_0514_normalised\nvase/test/vase_0552_normalised\nvase/test/vase_0512_normalised\nvase/test/vase_0521_normalised\nvase/test/vase_0574_normalised\nvase/test/vase_0575_normalised\nvase/test/vase_0553_normalised\nvase/test/vase_0571_normalised\nvase/test/vase_0533_normalised\nvase/test/vase_0562_normalised\nvase/test/vase_0564_normalised\nvase/test/vase_0519_normalised\nvase/test/vase_0504_normalised\nvase/test/vase_0541_normalised\nvase/test/vase_0531_normalised\nvase/test/vase_0478_normalised\nvase/test/vase_0550_normalised\nvase/test/vase_0492_normalised\nvase/test/vase_0490_normalised\nvase/test/vase_0476_normalised\nvase/test/vase_0561_normalised\nvase/test/vase_0502_normalised\nvase/test/vase_0546_normalised\nvase/test/vase_0488_normalised\nvase/test/vase_0527_normalised\nvase/test/vase_0555_normalised\nvase/test/vase_0500_normalised\nvase/test/vase_0485_normalised\nvase/test/vase_0515_normalised\nvase/test/vase_0573_normalised\nvase/test/vase_0568_normalised\nvase/test/vase_0479_normalised\nvase/test/vase_0518_normalised\nvase/test/vase_0494_normalised\nvase/test/vase_0484_normalised\nvase/test/vase_0487_normalised\nbowl/train/bowl_0050_normalised\nbowl/train/bowl_0031_normalised\nbowl/train/bowl_0038_normalised\nbowl/train/bowl_0051_normalised\nbowl/train/bowl_0026_normalised\nbowl/train/bowl_0019_normalised\nbowl/train/bowl_0005_normalised\nbowl/train/bowl_0054_normalised\nbowl/train/bowl_0048_normalised\nbowl/train/bowl_0013_normalised\nbowl/train/bowl_0044_normalised\nbowl/train/bowl_0025_normalised\nbowl/train/bowl_0027_normalised\nbowl/train/bowl_0022_normalised\nbowl/train/bowl_0035_normalised\nbowl/train/bowl_0001_normalised\nbowl/train/bowl_0017_normalised\nbowl/train/bowl_0029_normalised\nbowl/train/bowl_0015_normalised\nbowl/train/bowl_0011_normalised\nbowl/train/bowl_0018_normalised\nbowl/train/bowl_0043_normalised\nbowl/train/bowl_0049_normalised\nbowl/train/bowl_0042_normalised\nbowl/train/bowl_0004_normalised\nbowl/train/bowl_0041_normalised\nbowl/train/bowl_0009_normalised\nbowl/train/bowl_0062_normalised\nbowl/train/bowl_0060_normalised\nbowl/train/bowl_0055_normalised\nbowl/train/bowl_0003_normalised\nbowl/train/bowl_0030_normalised\nbowl/train/bowl_0039_normalised\nbowl/train/bowl_0052_normalised\nbowl/train/bowl_0059_normalised\nbowl/train/bowl_0028_normalised\nbowl/train/bowl_0064_normalised\nbowl/train/bowl_0016_normalised\nbowl/train/bowl_0061_normalised\nbowl/train/bowl_0057_normalised\nbowl/train/bowl_0007_normalised\nbowl/train/bowl_0053_normalised\nbowl/train/bowl_0058_normalised\nbowl/train/bowl_0021_normalised\nbowl/train/bowl_0056_normalised\nbowl/train/bowl_0037_normalised\nbowl/train/bowl_0023_normalised\nbowl/train/bowl_0032_normalised\nbowl/train/bowl_0033_normalised\nbowl/train/bowl_0036_normalised\nbowl/train/bowl_0040_normalised\nbowl/train/bowl_0008_normalised\nbowl/train/bowl_0046_normalised\nbowl/train/bowl_0010_normalised\nbowl/train/bowl_0012_normalised\nbowl/train/bowl_0020_normalised\nbowl/train/bowl_0014_normalised\nbowl/train/bowl_0045_normalised\nbowl/train/bowl_0047_normalised\nbowl/train/bowl_0024_normalised\nbowl/train/bowl_0002_normalised\nbowl/train/bowl_0063_normalised\nbowl/train/bowl_0034_normalised\nbowl/train/bowl_0006_normalised\nbowl/test/bowl_0082_normalised\nbowl/test/bowl_0073_normalised\nbowl/test/bowl_0067_normalised\nbowl/test/bowl_0069_normalised\nbowl/test/bowl_0080_normalised\nbowl/test/bowl_0070_normalised\nbowl/test/bowl_0084_normalised\nbowl/test/bowl_0072_normalised\nbowl/test/bowl_0081_normalised\nbowl/test/bowl_0065_normalised\nbowl/test/bowl_0066_normalised\nbowl/test/bowl_0083_normalised\nbowl/test/bowl_0078_normalised\nbowl/test/bowl_0076_normalised\nbowl/test/bowl_0075_normalised\nbowl/test/bowl_0074_normalised\nbowl/test/bowl_0068_normalised\nbowl/test/bowl_0079_normalised\nbowl/test/bowl_0071_normalised\nbowl/test/bowl_0077_normalised\nmonitor/train/monitor_0153_normalised\nmonitor/train/monitor_0333_normalised\nmonitor/train/monitor_0204_normalised\nmonitor/train/monitor_0053_normalised\nmonitor/train/monitor_0141_normalised\nmonitor/train/monitor_0279_normalised\nmonitor/train/monitor_0159_normalised\nmonitor/train/monitor_0158_normalised\nmonitor/train/monitor_0250_normalised\nmonitor/train/monitor_0205_normalised\nmonitor/train/monitor_0020_normalised\nmonitor/train/monitor_0316_normalised\nmonitor/train/monitor_0126_normalised\nmonitor/train/monitor_0339_normalised\nmonitor/train/monitor_0086_normalised\nmonitor/train/monitor_0219_normalised\nmonitor/train/monitor_0226_normalised\nmonitor/train/monitor_0001_normalised\nmonitor/train/monitor_0256_normalised\nmonitor/train/monitor_0409_normalised\nmonitor/train/monitor_0125_normalised\nmonitor/train/monitor_0380_normalised\nmonitor/train/monitor_0277_normalised\nmonitor/train/monitor_0064_normalised\nmonitor/train/monitor_0340_normalised\nmonitor/train/monitor_0173_normalised\nmonitor/train/monitor_0444_normalised\nmonitor/train/monitor_0140_normalised\nmonitor/train/monitor_0131_normalised\nmonitor/train/monitor_0358_normalised\nmonitor/train/monitor_0424_normalised\nmonitor/train/monitor_0451_normalised\nmonitor/train/monitor_0022_normalised\nmonitor/train/monitor_0080_normalised\nmonitor/train/monitor_0148_normalised\nmonitor/train/monitor_0386_normalised\nmonitor/train/monitor_0324_normalised\nmonitor/train/monitor_0088_normalised\nmonitor/train/monitor_0076_normalised\nmonitor/train/monitor_0273_normalised\nmonitor/train/monitor_0121_normalised\nmonitor/train/monitor_0100_normalised\nmonitor/train/monitor_0046_normalised\nmonitor/train/monitor_0310_normalised\nmonitor/train/monitor_0332_normalised\nmonitor/train/monitor_0172_normalised\nmonitor/train/monitor_0374_normalised\nmonitor/train/monitor_0317_normalised\nmonitor/train/monitor_0354_normalised\nmonitor/train/monitor_0342_normalised\nmonitor/train/monitor_0166_normalised\nmonitor/train/monitor_0388_normalised\nmonitor/train/monitor_0190_normalised\nmonitor/train/monitor_0106_normalised\nmonitor/train/monitor_0315_normalised\nmonitor/train/monitor_0055_normalised\nmonitor/train/monitor_0338_normalised\nmonitor/train/monitor_0066_normalised\nmonitor/train/monitor_0248_normalised\nmonitor/train/monitor_0102_normalised\nmonitor/train/monitor_0459_normalised\nmonitor/train/monitor_0132_normalised\nmonitor/train/monitor_0156_normalised\nmonitor/train/monitor_0160_normalised\nmonitor/train/monitor_0075_normalised\nmonitor/train/monitor_0063_normalised\nmonitor/train/monitor_0382_normalised\nmonitor/train/monitor_0208_normalised\nmonitor/train/monitor_0413_normalised\nmonitor/train/monitor_0124_normalised\nmonitor/train/monitor_0214_normalised\nmonitor/train/monitor_0133_normalised\nmonitor/train/monitor_0081_normalised\nmonitor/train/monitor_0411_normalised\nmonitor/train/monitor_0312_normalised\nmonitor/train/monitor_0187_normalised\nmonitor/train/monitor_0201_normalised\nmonitor/train/monitor_0300_normalised\nmonitor/train/monitor_0280_normalised\nmonitor/train/monitor_0128_normalised\nmonitor/train/monitor_0334_normalised\nmonitor/train/monitor_0243_normalised\nmonitor/train/monitor_0067_normalised\nmonitor/train/monitor_0435_normalised\nmonitor/train/monitor_0393_normalised\nmonitor/train/monitor_0186_normalised\nmonitor/train/monitor_0321_normalised\nmonitor/train/monitor_0034_normalised\nmonitor/train/monitor_0272_normalised\nmonitor/train/monitor_0271_normalised\nmonitor/train/monitor_0403_normalised\nmonitor/train/monitor_0419_normalised\nmonitor/train/monitor_0050_normalised\nmonitor/train/monitor_0071_normalised\nmonitor/train/monitor_0406_normalised\nmonitor/train/monitor_0239_normalised\nmonitor/train/monitor_0116_normalised\nmonitor/train/monitor_0290_normalised\nmonitor/train/monitor_0045_normalised\nmonitor/train/monitor_0335_normalised\nmonitor/train/monitor_0263_normalised\nmonitor/train/monitor_0010_normalised\nmonitor/train/monitor_0288_normalised\nmonitor/train/monitor_0068_normalised\nmonitor/train/monitor_0301_normalised\nmonitor/train/monitor_0123_normalised\nmonitor/train/monitor_0412_normalised\nmonitor/train/monitor_0448_normalised\nmonitor/train/monitor_0089_normalised\nmonitor/train/monitor_0287_normalised\nmonitor/train/monitor_0258_normalised\nmonitor/train/monitor_0161_normalised\nmonitor/train/monitor_0165_normalised\nmonitor/train/monitor_0259_normalised\nmonitor/train/monitor_0097_normalised\nmonitor/train/monitor_0069_normalised\nmonitor/train/monitor_0306_normalised\nmonitor/train/monitor_0142_normalised\nmonitor/train/monitor_0058_normalised\nmonitor/train/monitor_0127_normalised\nmonitor/train/monitor_0139_normalised\nmonitor/train/monitor_0215_normalised\nmonitor/train/monitor_0041_normalised\nmonitor/train/monitor_0117_normalised\nmonitor/train/monitor_0200_normalised\nmonitor/train/monitor_0442_normalised\nmonitor/train/monitor_0177_normalised\nmonitor/train/monitor_0202_normalised\nmonitor/train/monitor_0307_normalised\nmonitor/train/monitor_0237_normalised\nmonitor/train/monitor_0426_normalised\nmonitor/train/monitor_0213_normalised\nmonitor/train/monitor_0061_normalised\nmonitor/train/monitor_0225_normalised\nmonitor/train/monitor_0051_normalised\nmonitor/train/monitor_0114_normalised\nmonitor/train/monitor_0152_normalised\nmonitor/train/monitor_0236_normalised\nmonitor/train/monitor_0144_normalised\nmonitor/train/monitor_0189_normalised\nmonitor/train/monitor_0281_normalised\nmonitor/train/monitor_0349_normalised\nmonitor/train/monitor_0129_normalised\nmonitor/train/monitor_0107_normalised\nmonitor/train/monitor_0112_normalised\nmonitor/train/monitor_0005_normalised\nmonitor/train/monitor_0381_normalised\nmonitor/train/monitor_0452_normalised\nmonitor/train/monitor_0291_normalised\nmonitor/train/monitor_0014_normalised\nmonitor/train/monitor_0085_normalised\nmonitor/train/monitor_0458_normalised\nmonitor/train/monitor_0143_normalised\nmonitor/train/monitor_0035_normalised\nmonitor/train/monitor_0249_normalised\nmonitor/train/monitor_0325_normalised\nmonitor/train/monitor_0233_normalised\nmonitor/train/monitor_0318_normalised\nmonitor/train/monitor_0352_normalised\nmonitor/train/monitor_0293_normalised\nmonitor/train/monitor_0115_normalised\nmonitor/train/monitor_0350_normalised\nmonitor/train/monitor_0345_normalised\nmonitor/train/monitor_0429_normalised\nmonitor/train/monitor_0244_normalised\nmonitor/train/monitor_0257_normalised\nmonitor/train/monitor_0235_normalised\nmonitor/train/monitor_0037_normalised\nmonitor/train/monitor_0042_normalised\nmonitor/train/monitor_0024_normalised\nmonitor/train/monitor_0136_normalised\nmonitor/train/monitor_0163_normalised\nmonitor/train/monitor_0157_normalised\nmonitor/train/monitor_0240_normalised\nmonitor/train/monitor_0169_normalised\nmonitor/train/monitor_0004_normalised\nmonitor/train/monitor_0427_normalised\nmonitor/train/monitor_0049_normalised\nmonitor/train/monitor_0191_normalised\nmonitor/train/monitor_0074_normalised\nmonitor/train/monitor_0054_normalised\nmonitor/train/monitor_0432_normalised\nmonitor/train/monitor_0445_normalised\nmonitor/train/monitor_0031_normalised\nmonitor/train/monitor_0434_normalised\nmonitor/train/monitor_0286_normalised\nmonitor/train/monitor_0164_normalised\nmonitor/train/monitor_0137_normalised\nmonitor/train/monitor_0180_normalised\nmonitor/train/monitor_0439_normalised\nmonitor/train/monitor_0373_normalised\nmonitor/train/monitor_0234_normalised\nmonitor/train/monitor_0360_normalised\nmonitor/train/monitor_0062_normalised\nmonitor/train/monitor_0328_normalised\nmonitor/train/monitor_0383_normalised\nmonitor/train/monitor_0405_normalised\nmonitor/train/monitor_0028_normalised\nmonitor/train/monitor_0443_normalised\nmonitor/train/monitor_0422_normalised\nmonitor/train/monitor_0021_normalised\nmonitor/train/monitor_0449_normalised\nmonitor/train/monitor_0267_normalised\nmonitor/train/monitor_0260_normalised\nmonitor/train/monitor_0297_normalised\nmonitor/train/monitor_0430_normalised\nmonitor/train/monitor_0111_normalised\nmonitor/train/monitor_0105_normalised\nmonitor/train/monitor_0222_normalised\nmonitor/train/monitor_0029_normalised\nmonitor/train/monitor_0417_normalised\nmonitor/train/monitor_0149_normalised\nmonitor/train/monitor_0103_normalised\nmonitor/train/monitor_0032_normalised\nmonitor/train/monitor_0266_normalised\nmonitor/train/monitor_0145_normalised\nmonitor/train/monitor_0018_normalised\nmonitor/train/monitor_0090_normalised\nmonitor/train/monitor_0351_normalised\nmonitor/train/monitor_0241_normalised\nmonitor/train/monitor_0418_normalised\nmonitor/train/monitor_0231_normalised\nmonitor/train/monitor_0199_normalised\nmonitor/train/monitor_0092_normalised\nmonitor/train/monitor_0387_normalised\nmonitor/train/monitor_0450_normalised\nmonitor/train/monitor_0059_normalised\nmonitor/train/monitor_0461_normalised\nmonitor/train/monitor_0394_normalised\nmonitor/train/monitor_0320_normalised\nmonitor/train/monitor_0094_normalised\nmonitor/train/monitor_0275_normalised\nmonitor/train/monitor_0282_normalised\nmonitor/train/monitor_0184_normalised\nmonitor/train/monitor_0456_normalised\nmonitor/train/monitor_0247_normalised\nmonitor/train/monitor_0463_normalised\nmonitor/train/monitor_0082_normalised\nmonitor/train/monitor_0436_normalised\nmonitor/train/monitor_0308_normalised\nmonitor/train/monitor_0192_normalised\nmonitor/train/monitor_0401_normalised\nmonitor/train/monitor_0361_normalised\nmonitor/train/monitor_0416_normalised\nmonitor/train/monitor_0353_normalised\nmonitor/train/monitor_0400_normalised\nmonitor/train/monitor_0385_normalised\nmonitor/train/monitor_0396_normalised\nmonitor/train/monitor_0438_normalised\nmonitor/train/monitor_0437_normalised\nmonitor/train/monitor_0196_normalised\nmonitor/train/monitor_0220_normalised\nmonitor/train/monitor_0255_normalised\nmonitor/train/monitor_0254_normalised\nmonitor/train/monitor_0043_normalised\nmonitor/train/monitor_0295_normalised\nmonitor/train/monitor_0276_normalised\nmonitor/train/monitor_0211_normalised\nmonitor/train/monitor_0404_normalised\nmonitor/train/monitor_0346_normalised\nmonitor/train/monitor_0232_normalised\nmonitor/train/monitor_0091_normalised\nmonitor/train/monitor_0217_normalised\nmonitor/train/monitor_0392_normalised\nmonitor/train/monitor_0078_normalised\nmonitor/train/monitor_0284_normalised\nmonitor/train/monitor_0195_normalised\nmonitor/train/monitor_0104_normalised\nmonitor/train/monitor_0384_normalised\nmonitor/train/monitor_0083_normalised\nmonitor/train/monitor_0056_normalised\nmonitor/train/monitor_0303_normalised\nmonitor/train/monitor_0407_normalised\nmonitor/train/monitor_0134_normalised\nmonitor/train/monitor_0040_normalised\nmonitor/train/monitor_0030_normalised\nmonitor/train/monitor_0012_normalised\nmonitor/train/monitor_0329_normalised\nmonitor/train/monitor_0414_normalised\nmonitor/train/monitor_0181_normalised\nmonitor/train/monitor_0278_normalised\nmonitor/train/monitor_0355_normalised\nmonitor/train/monitor_0228_normalised\nmonitor/train/monitor_0343_normalised\nmonitor/train/monitor_0326_normalised\nmonitor/train/monitor_0203_normalised\nmonitor/train/monitor_0313_normalised\nmonitor/train/monitor_0065_normalised\nmonitor/train/monitor_0370_normalised\nmonitor/train/monitor_0130_normalised\nmonitor/train/monitor_0299_normalised\nmonitor/train/monitor_0175_normalised\nmonitor/train/monitor_0221_normalised\nmonitor/train/monitor_0268_normalised\nmonitor/train/monitor_0072_normalised\nmonitor/train/monitor_0344_normalised\nmonitor/train/monitor_0229_normalised\nmonitor/train/monitor_0264_normalised\nmonitor/train/monitor_0147_normalised\nmonitor/train/monitor_0011_normalised\nmonitor/train/monitor_0242_normalised\nmonitor/train/monitor_0391_normalised\nmonitor/train/monitor_0162_normalised\nmonitor/train/monitor_0212_normalised\nmonitor/train/monitor_0182_normalised\nmonitor/train/monitor_0033_normalised\nmonitor/train/monitor_0441_normalised\nmonitor/train/monitor_0410_normalised\nmonitor/train/monitor_0025_normalised\nmonitor/train/monitor_0003_normalised\nmonitor/train/monitor_0425_normalised\nmonitor/train/monitor_0305_normalised\nmonitor/train/monitor_0099_normalised\nmonitor/train/monitor_0178_normalised\nmonitor/train/monitor_0108_normalised\nmonitor/train/monitor_0095_normalised\nmonitor/train/monitor_0079_normalised\nmonitor/train/monitor_0357_normalised\nmonitor/train/monitor_0398_normalised\nmonitor/train/monitor_0183_normalised\nmonitor/train/monitor_0465_normalised\nmonitor/train/monitor_0216_normalised\nmonitor/train/monitor_0423_normalised\nmonitor/train/monitor_0209_normalised\nmonitor/train/monitor_0375_normalised\nmonitor/train/monitor_0016_normalised\nmonitor/train/monitor_0348_normalised\nmonitor/train/monitor_0265_normalised\nmonitor/train/monitor_0337_normalised\nmonitor/train/monitor_0397_normalised\nmonitor/train/monitor_0341_normalised\nmonitor/train/monitor_0019_normalised\nmonitor/train/monitor_0188_normalised\nmonitor/train/monitor_0390_normalised\nmonitor/train/monitor_0365_normalised\nmonitor/train/monitor_0057_normalised\nmonitor/train/monitor_0372_normalised\nmonitor/train/monitor_0283_normalised\nmonitor/train/monitor_0210_normalised\nmonitor/train/monitor_0376_normalised\nmonitor/train/monitor_0093_normalised\nmonitor/train/monitor_0378_normalised\nmonitor/train/monitor_0023_normalised\nmonitor/train/monitor_0262_normalised\nmonitor/train/monitor_0302_normalised\nmonitor/train/monitor_0098_normalised\nmonitor/train/monitor_0431_normalised\nmonitor/train/monitor_0389_normalised\nmonitor/train/monitor_0207_normalised\nmonitor/train/monitor_0120_normalised\nmonitor/train/monitor_0013_normalised\nmonitor/train/monitor_0176_normalised\nmonitor/train/monitor_0377_normalised\nmonitor/train/monitor_0296_normalised\nmonitor/train/monitor_0138_normalised\nmonitor/train/monitor_0185_normalised\nmonitor/train/monitor_0002_normalised\nmonitor/train/monitor_0027_normalised\nmonitor/train/monitor_0322_normalised\nmonitor/train/monitor_0292_normalised\nmonitor/train/monitor_0110_normalised\nmonitor/train/monitor_0109_normalised\nmonitor/train/monitor_0174_normalised\nmonitor/train/monitor_0238_normalised\nmonitor/train/monitor_0285_normalised\nmonitor/train/monitor_0330_normalised\nmonitor/train/monitor_0052_normalised\nmonitor/train/monitor_0227_normalised\nmonitor/train/monitor_0015_normalised\nmonitor/train/monitor_0077_normalised\nmonitor/train/monitor_0245_normalised\nmonitor/train/monitor_0289_normalised\nmonitor/train/monitor_0167_normalised\nmonitor/train/monitor_0269_normalised\nmonitor/train/monitor_0362_normalised\nmonitor/train/monitor_0155_normalised\nmonitor/train/monitor_0154_normalised\nmonitor/train/monitor_0359_normalised\nmonitor/train/monitor_0457_normalised\nmonitor/train/monitor_0428_normalised\nmonitor/train/monitor_0179_normalised\nmonitor/train/monitor_0415_normalised\nmonitor/train/monitor_0347_normalised\nmonitor/train/monitor_0395_normalised\nmonitor/train/monitor_0399_normalised\nmonitor/train/monitor_0170_normalised\nmonitor/train/monitor_0206_normalised\nmonitor/train/monitor_0270_normalised\nmonitor/train/monitor_0460_normalised\nmonitor/train/monitor_0084_normalised\nmonitor/train/monitor_0274_normalised\nmonitor/train/monitor_0446_normalised\nmonitor/train/monitor_0151_normalised\nmonitor/train/monitor_0369_normalised\nmonitor/train/monitor_0294_normalised\nmonitor/train/monitor_0252_normalised\nmonitor/train/monitor_0440_normalised\nmonitor/train/monitor_0168_normalised\nmonitor/train/monitor_0246_normalised\nmonitor/train/monitor_0314_normalised\nmonitor/train/monitor_0309_normalised\nmonitor/train/monitor_0038_normalised\nmonitor/train/monitor_0367_normalised\nmonitor/train/monitor_0454_normalised\nmonitor/train/monitor_0366_normalised\nmonitor/train/monitor_0146_normalised\nmonitor/train/monitor_0113_normalised\nmonitor/train/monitor_0135_normalised\nmonitor/train/monitor_0253_normalised\nmonitor/train/monitor_0371_normalised\nmonitor/train/monitor_0224_normalised\nmonitor/train/monitor_0311_normalised\nmonitor/train/monitor_0363_normalised\nmonitor/train/monitor_0402_normalised\nmonitor/train/monitor_0039_normalised\nmonitor/train/monitor_0251_normalised\nmonitor/train/monitor_0453_normalised\nmonitor/train/monitor_0304_normalised\nmonitor/train/monitor_0319_normalised\nmonitor/train/monitor_0197_normalised\nmonitor/train/monitor_0017_normalised\nmonitor/train/monitor_0408_normalised\nmonitor/train/monitor_0087_normalised\nmonitor/train/monitor_0048_normalised\nmonitor/train/monitor_0026_normalised\nmonitor/train/monitor_0462_normalised\nmonitor/train/monitor_0331_normalised\nmonitor/train/monitor_0101_normalised\nmonitor/train/monitor_0070_normalised\nmonitor/train/monitor_0218_normalised\nmonitor/train/monitor_0364_normalised\nmonitor/train/monitor_0150_normalised\nmonitor/train/monitor_0096_normalised\nmonitor/train/monitor_0421_normalised\nmonitor/train/monitor_0194_normalised\nmonitor/train/monitor_0261_normalised\nmonitor/train/monitor_0455_normalised\nmonitor/train/monitor_0230_normalised\nmonitor/train/monitor_0447_normalised\nmonitor/train/monitor_0122_normalised\nmonitor/train/monitor_0223_normalised\nmonitor/train/monitor_0379_normalised\nmonitor/train/monitor_0036_normalised\nmonitor/train/monitor_0006_normalised\nmonitor/train/monitor_0171_normalised\nmonitor/train/monitor_0044_normalised\nmonitor/train/monitor_0298_normalised\nmonitor/train/monitor_0464_normalised\nmonitor/train/monitor_0336_normalised\nmonitor/train/monitor_0007_normalised\nmonitor/train/monitor_0356_normalised\nmonitor/train/monitor_0118_normalised\nmonitor/train/monitor_0420_normalised\nmonitor/train/monitor_0433_normalised\nmonitor/train/monitor_0009_normalised\nmonitor/train/monitor_0047_normalised\nmonitor/train/monitor_0323_normalised\nmonitor/train/monitor_0193_normalised\nmonitor/train/monitor_0060_normalised\nmonitor/train/monitor_0198_normalised\nmonitor/train/monitor_0368_normalised\nmonitor/train/monitor_0327_normalised\nmonitor/train/monitor_0073_normalised\nmonitor/train/monitor_0119_normalised\nmonitor/train/monitor_0008_normalised\nmonitor/test/monitor_0536_normalised\nmonitor/test/monitor_0506_normalised\nmonitor/test/monitor_0559_normalised\nmonitor/test/monitor_0542_normalised\nmonitor/test/monitor_0513_normalised\nmonitor/test/monitor_0473_normalised\nmonitor/test/monitor_0550_normalised\nmonitor/test/monitor_0512_normalised\nmonitor/test/monitor_0507_normalised\nmonitor/test/monitor_0498_normalised\nmonitor/test/monitor_0471_normalised\nmonitor/test/monitor_0477_normalised\nmonitor/test/monitor_0468_normalised\nmonitor/test/monitor_0546_normalised\nmonitor/test/monitor_0472_normalised\nmonitor/test/monitor_0499_normalised\nmonitor/test/monitor_0562_normalised\nmonitor/test/monitor_0531_normalised\nmonitor/test/monitor_0530_normalised\nmonitor/test/monitor_0484_normalised\nmonitor/test/monitor_0482_normalised\nmonitor/test/monitor_0523_normalised\nmonitor/test/monitor_0485_normalised\nmonitor/test/monitor_0526_normalised\nmonitor/test/monitor_0491_normalised\nmonitor/test/monitor_0509_normalised\nmonitor/test/monitor_0493_normalised\nmonitor/test/monitor_0552_normalised\nmonitor/test/monitor_0527_normalised\nmonitor/test/monitor_0556_normalised\nmonitor/test/monitor_0502_normalised\nmonitor/test/monitor_0554_normalised\nmonitor/test/monitor_0541_normalised\nmonitor/test/monitor_0483_normalised\nmonitor/test/monitor_0545_normalised\nmonitor/test/monitor_0497_normalised\nmonitor/test/monitor_0480_normalised\nmonitor/test/monitor_0489_normalised\nmonitor/test/monitor_0476_normalised\nmonitor/test/monitor_0516_normalised\nmonitor/test/monitor_0486_normalised\nmonitor/test/monitor_0514_normalised\nmonitor/test/monitor_0520_normalised\nmonitor/test/monitor_0470_normalised\nmonitor/test/monitor_0558_normalised\nmonitor/test/monitor_0535_normalised\nmonitor/test/monitor_0495_normalised\nmonitor/test/monitor_0519_normalised\nmonitor/test/monitor_0511_normalised\nmonitor/test/monitor_0565_normalised\nmonitor/test/monitor_0518_normalised\nmonitor/test/monitor_0543_normalised\nmonitor/test/monitor_0479_normalised\nmonitor/test/monitor_0492_normalised\nmonitor/test/monitor_0553_normalised\nmonitor/test/monitor_0525_normalised\nmonitor/test/monitor_0533_normalised\nmonitor/test/monitor_0515_normalised\nmonitor/test/monitor_0538_normalised\nmonitor/test/monitor_0517_normalised\nmonitor/test/monitor_0487_normalised\nmonitor/test/monitor_0474_normalised\nmonitor/test/monitor_0557_normalised\nmonitor/test/monitor_0528_normalised\nmonitor/test/monitor_0547_normalised\nmonitor/test/monitor_0510_normalised\nmonitor/test/monitor_0539_normalised\nmonitor/test/monitor_0551_normalised\nmonitor/test/monitor_0540_normalised\nmonitor/test/monitor_0503_normalised\nmonitor/test/monitor_0488_normalised\nmonitor/test/monitor_0500_normalised\nmonitor/test/monitor_0544_normalised\nmonitor/test/monitor_0537_normalised\nmonitor/test/monitor_0501_normalised\nmonitor/test/monitor_0564_normalised\nmonitor/test/monitor_0522_normalised\nmonitor/test/monitor_0521_normalised\nmonitor/test/monitor_0548_normalised\nmonitor/test/monitor_0549_normalised\nmonitor/test/monitor_0475_normalised\nmonitor/test/monitor_0504_normalised\nmonitor/test/monitor_0524_normalised\nmonitor/test/monitor_0529_normalised\nmonitor/test/monitor_0469_normalised\nmonitor/test/monitor_0467_normalised\nmonitor/test/monitor_0560_normalised\nmonitor/test/monitor_0490_normalised\nmonitor/test/monitor_0505_normalised\nmonitor/test/monitor_0494_normalised\nmonitor/test/monitor_0466_normalised\nmonitor/test/monitor_0478_normalised\nmonitor/test/monitor_0496_normalised\nmonitor/test/monitor_0561_normalised\nmonitor/test/monitor_0481_normalised\nmonitor/test/monitor_0508_normalised\nmonitor/test/monitor_0555_normalised\nmonitor/test/monitor_0534_normalised\nmonitor/test/monitor_0532_normalised\nmonitor/test/monitor_0563_normalised\ncone/train/cone_0084_normalised\ncone/train/cone_0121_normalised\ncone/train/cone_0017_normalised\ncone/train/cone_0118_normalised\ncone/train/cone_0147_normalised\ncone/train/cone_0162_normalised\ncone/train/cone_0141_normalised\ncone/train/cone_0112_normalised\ncone/train/cone_0167_normalised\ncone/train/cone_0166_normalised\ncone/train/cone_0119_normalised\ncone/train/cone_0128_normalised\ncone/train/cone_0050_normalised\ncone/train/cone_0061_normalised\ncone/train/cone_0014_normalised\ncone/train/cone_0132_normalised\ncone/train/cone_0103_normalised\ncone/train/cone_0148_normalised\ncone/train/cone_0060_normalised\ncone/train/cone_0107_normalised\ncone/train/cone_0042_normalised\ncone/train/cone_0057_normalised\ncone/train/cone_0041_normalised\ncone/train/cone_0037_normalised\ncone/train/cone_0055_normalised\ncone/train/cone_0140_normalised\ncone/train/cone_0159_normalised\ncone/train/cone_0083_normalised\ncone/train/cone_0086_normalised\ncone/train/cone_0163_normalised\ncone/train/cone_0016_normalised\ncone/train/cone_0080_normalised\ncone/train/cone_0040_normalised\ncone/train/cone_0032_normalised\ncone/train/cone_0097_normalised\ncone/train/cone_0105_normalised\ncone/train/cone_0150_normalised\ncone/train/cone_0092_normalised\ncone/train/cone_0048_normalised\ncone/train/cone_0054_normalised\ncone/train/cone_0106_normalised\ncone/train/cone_0018_normalised\ncone/train/cone_0089_normalised\ncone/train/cone_0123_normalised\ncone/train/cone_0051_normalised\ncone/train/cone_0052_normalised\ncone/train/cone_0033_normalised\ncone/train/cone_0088_normalised\ncone/train/cone_0028_normalised\ncone/train/cone_0063_normalised\ncone/train/cone_0143_normalised\ncone/train/cone_0029_normalised\ncone/train/cone_0144_normalised\ncone/train/cone_0049_normalised\ncone/train/cone_0134_normalised\ncone/train/cone_0023_normalised\ncone/train/cone_0066_normalised\ncone/train/cone_0153_normalised\ncone/train/cone_0021_normalised\ncone/train/cone_0013_normalised\ncone/train/cone_0098_normalised\ncone/train/cone_0031_normalised\ncone/train/cone_0120_normalised\ncone/train/cone_0034_normalised\ncone/train/cone_0043_normalised\ncone/train/cone_0056_normalised\ncone/train/cone_0068_normalised\ncone/train/cone_0087_normalised\ncone/train/cone_0161_normalised\ncone/train/cone_0038_normalised\ncone/train/cone_0093_normalised\ncone/train/cone_0067_normalised\ncone/train/cone_0076_normalised\ncone/train/cone_0094_normalised\ncone/train/cone_0136_normalised\ncone/train/cone_0065_normalised\ncone/train/cone_0026_normalised\ncone/train/cone_0126_normalised\ncone/train/cone_0149_normalised\ncone/train/cone_0025_normalised\ncone/train/cone_0122_normalised\ncone/train/cone_0027_normalised\ncone/train/cone_0006_normalised\ncone/train/cone_0130_normalised\ncone/train/cone_0059_normalised\ncone/train/cone_0045_normalised\ncone/train/cone_0079_normalised\ncone/train/cone_0090_normalised\ncone/train/cone_0104_normalised\ncone/train/cone_0078_normalised\ncone/train/cone_0070_normalised\ncone/train/cone_0002_normalised\ncone/train/cone_0010_normalised\ncone/train/cone_0001_normalised\ncone/train/cone_0091_normalised\ncone/train/cone_0146_normalised\ncone/train/cone_0101_normalised\ncone/train/cone_0073_normalised\ncone/train/cone_0155_normalised\ncone/train/cone_0009_normalised\ncone/train/cone_0062_normalised\ncone/train/cone_0137_normalised\ncone/train/cone_0005_normalised\ncone/train/cone_0109_normalised\ncone/train/cone_0110_normalised\ncone/train/cone_0154_normalised\ncone/train/cone_0053_normalised\ncone/train/cone_0003_normalised\ncone/train/cone_0035_normalised\ncone/train/cone_0111_normalised\ncone/train/cone_0139_normalised\ncone/train/cone_0047_normalised\ncone/train/cone_0125_normalised\ncone/train/cone_0058_normalised\ncone/train/cone_0075_normalised\ncone/train/cone_0015_normalised\ncone/train/cone_0072_normalised\ncone/train/cone_0133_normalised\ncone/train/cone_0007_normalised\ncone/train/cone_0165_normalised\ncone/train/cone_0036_normalised\ncone/train/cone_0004_normalised\ncone/train/cone_0164_normalised\ncone/train/cone_0113_normalised\ncone/train/cone_0082_normalised\ncone/train/cone_0030_normalised\ncone/train/cone_0145_normalised\ncone/train/cone_0069_normalised\ncone/train/cone_0129_normalised\ncone/train/cone_0074_normalised\ncone/train/cone_0081_normalised\ncone/train/cone_0135_normalised\ncone/train/cone_0046_normalised\ncone/train/cone_0127_normalised\ncone/train/cone_0100_normalised\ncone/train/cone_0124_normalised\ncone/train/cone_0108_normalised\ncone/train/cone_0039_normalised\ncone/train/cone_0115_normalised\ncone/train/cone_0116_normalised\ncone/train/cone_0102_normalised\ncone/train/cone_0096_normalised\ncone/train/cone_0085_normalised\ncone/train/cone_0008_normalised\ncone/train/cone_0114_normalised\ncone/train/cone_0160_normalised\ncone/train/cone_0019_normalised\ncone/train/cone_0020_normalised\ncone/train/cone_0152_normalised\ncone/train/cone_0117_normalised\ncone/train/cone_0158_normalised\ncone/train/cone_0077_normalised\ncone/train/cone_0131_normalised\ncone/train/cone_0138_normalised\ncone/train/cone_0151_normalised\ncone/train/cone_0012_normalised\ncone/train/cone_0044_normalised\ncone/train/cone_0099_normalised\ncone/train/cone_0022_normalised\ncone/train/cone_0064_normalised\ncone/train/cone_0157_normalised\ncone/train/cone_0011_normalised\ncone/train/cone_0142_normalised\ncone/train/cone_0024_normalised\ncone/train/cone_0095_normalised\ncone/train/cone_0071_normalised\ncone/train/cone_0156_normalised\ncone/test/cone_0178_normalised\ncone/test/cone_0172_normalised\ncone/test/cone_0173_normalised\ncone/test/cone_0171_normalised\ncone/test/cone_0185_normalised\ncone/test/cone_0168_normalised\ncone/test/cone_0186_normalised\ncone/test/cone_0175_normalised\ncone/test/cone_0187_normalised\ncone/test/cone_0174_normalised\ncone/test/cone_0177_normalised\ncone/test/cone_0183_normalised\ncone/test/cone_0179_normalised\ncone/test/cone_0181_normalised\ncone/test/cone_0180_normalised\ncone/test/cone_0176_normalised\ncone/test/cone_0184_normalised\ncone/test/cone_0182_normalised\ncone/test/cone_0169_normalised\ncone/test/cone_0170_normalised\npiano/train/piano_0139_normalised\npiano/train/piano_0122_normalised\npiano/train/piano_0004_normalised\npiano/train/piano_0003_normalised\npiano/train/piano_0115_normalised\npiano/train/piano_0117_normalised\npiano/train/piano_0184_normalised\npiano/train/piano_0062_normalised\npiano/train/piano_0098_normalised\npiano/train/piano_0045_normalised\npiano/train/piano_0221_normalised\npiano/train/piano_0227_normalised\npiano/train/piano_0224_normalised\npiano/train/piano_0130_normalised\npiano/train/piano_0136_normalised\npiano/train/piano_0075_normalised\npiano/train/piano_0015_normalised\npiano/train/piano_0168_normalised\npiano/train/piano_0048_normalised\npiano/train/piano_0101_normalised\npiano/train/piano_0171_normalised\npiano/train/piano_0203_normalised\npiano/train/piano_0135_normalised\npiano/train/piano_0215_normalised\npiano/train/piano_0206_normalised\npiano/train/piano_0167_normalised\npiano/train/piano_0088_normalised\npiano/train/piano_0005_normalised\npiano/train/piano_0137_normalised\npiano/train/piano_0060_normalised\npiano/train/piano_0047_normalised\npiano/train/piano_0163_normalised\npiano/train/piano_0159_normalised\npiano/train/piano_0046_normalised\npiano/train/piano_0185_normalised\npiano/train/piano_0176_normalised\npiano/train/piano_0020_normalised\npiano/train/piano_0077_normalised\npiano/train/piano_0129_normalised\npiano/train/piano_0002_normalised\npiano/train/piano_0140_normalised\npiano/train/piano_0051_normalised\npiano/train/piano_0052_normalised\npiano/train/piano_0170_normalised\npiano/train/piano_0123_normalised\npiano/train/piano_0065_normalised\npiano/train/piano_0126_normalised\npiano/train/piano_0009_normalised\npiano/train/piano_0111_normalised\npiano/train/piano_0012_normalised\npiano/train/piano_0109_normalised\npiano/train/piano_0008_normalised\npiano/train/piano_0090_normalised\npiano/train/piano_0013_normalised\npiano/train/piano_0225_normalised\npiano/train/piano_0084_normalised\npiano/train/piano_0202_normalised\npiano/train/piano_0162_normalised\npiano/train/piano_0076_normalised\npiano/train/piano_0174_normalised\npiano/train/piano_0128_normalised\npiano/train/piano_0106_normalised\npiano/train/piano_0169_normalised\npiano/train/piano_0161_normalised\npiano/train/piano_0016_normalised\npiano/train/piano_0138_normalised\npiano/train/piano_0056_normalised\npiano/train/piano_0131_normalised\npiano/train/piano_0007_normalised\npiano/train/piano_0041_normalised\npiano/train/piano_0053_normalised\npiano/train/piano_0193_normalised\npiano/train/piano_0034_normalised\npiano/train/piano_0160_normalised\npiano/train/piano_0144_normalised\npiano/train/piano_0067_normalised\npiano/train/piano_0023_normalised\npiano/train/piano_0081_normalised\npiano/train/piano_0156_normalised\npiano/train/piano_0042_normalised\npiano/train/piano_0145_normalised\npiano/train/piano_0212_normalised\npiano/train/piano_0173_normalised\npiano/train/piano_0142_normalised\npiano/train/piano_0116_normalised\npiano/train/piano_0022_normalised\npiano/train/piano_0208_normalised\npiano/train/piano_0014_normalised\npiano/train/piano_0229_normalised\npiano/train/piano_0124_normalised\npiano/train/piano_0112_normalised\npiano/train/piano_0228_normalised\npiano/train/piano_0019_normalised\npiano/train/piano_0057_normalised\npiano/train/piano_0149_normalised\npiano/train/piano_0061_normalised\npiano/train/piano_0150_normalised\npiano/train/piano_0165_normalised\npiano/train/piano_0092_normalised\npiano/train/piano_0134_normalised\npiano/train/piano_0172_normalised\npiano/train/piano_0018_normalised\npiano/train/piano_0213_normalised\npiano/train/piano_0113_normalised\npiano/train/piano_0079_normalised\npiano/train/piano_0209_normalised\npiano/train/piano_0082_normalised\npiano/train/piano_0189_normalised\npiano/train/piano_0103_normalised\npiano/train/piano_0100_normalised\npiano/train/piano_0155_normalised\npiano/train/piano_0063_normalised\npiano/train/piano_0073_normalised\npiano/train/piano_0039_normalised\npiano/train/piano_0044_normalised\npiano/train/piano_0205_normalised\npiano/train/piano_0070_normalised\npiano/train/piano_0151_normalised\npiano/train/piano_0217_normalised\npiano/train/piano_0141_normalised\npiano/train/piano_0231_normalised\npiano/train/piano_0078_normalised\npiano/train/piano_0099_normalised\npiano/train/piano_0180_normalised\npiano/train/piano_0119_normalised\npiano/train/piano_0219_normalised\npiano/train/piano_0154_normalised\npiano/train/piano_0210_normalised\npiano/train/piano_0001_normalised\npiano/train/piano_0181_normalised\npiano/train/piano_0083_normalised\npiano/train/piano_0146_normalised\npiano/train/piano_0031_normalised\npiano/train/piano_0199_normalised\npiano/train/piano_0021_normalised\npiano/train/piano_0096_normalised\npiano/train/piano_0069_normalised\npiano/train/piano_0035_normalised\npiano/train/piano_0179_normalised\npiano/train/piano_0214_normalised\npiano/train/piano_0158_normalised\npiano/train/piano_0108_normalised\npiano/train/piano_0166_normalised\npiano/train/piano_0105_normalised\npiano/train/piano_0107_normalised\npiano/train/piano_0094_normalised\npiano/train/piano_0091_normalised\npiano/train/piano_0192_normalised\npiano/train/piano_0133_normalised\npiano/train/piano_0074_normalised\npiano/train/piano_0049_normalised\npiano/train/piano_0072_normalised\npiano/train/piano_0071_normalised\npiano/train/piano_0147_normalised\npiano/train/piano_0029_normalised\npiano/train/piano_0152_normalised\npiano/train/piano_0037_normalised\npiano/train/piano_0043_normalised\npiano/train/piano_0087_normalised\npiano/train/piano_0204_normalised\npiano/train/piano_0207_normalised\npiano/train/piano_0085_normalised\npiano/train/piano_0038_normalised\npiano/train/piano_0095_normalised\npiano/train/piano_0006_normalised\npiano/train/piano_0068_normalised\npiano/train/piano_0177_normalised\npiano/train/piano_0183_normalised\npiano/train/piano_0190_normalised\npiano/train/piano_0157_normalised\npiano/train/piano_0032_normalised\npiano/train/piano_0194_normalised\npiano/train/piano_0050_normalised\npiano/train/piano_0110_normalised\npiano/train/piano_0196_normalised\npiano/train/piano_0195_normalised\npiano/train/piano_0027_normalised\npiano/train/piano_0187_normalised\npiano/train/piano_0223_normalised\npiano/train/piano_0030_normalised\npiano/train/piano_0114_normalised\npiano/train/piano_0127_normalised\npiano/train/piano_0143_normalised\npiano/train/piano_0188_normalised\npiano/train/piano_0200_normalised\npiano/train/piano_0125_normalised\npiano/train/piano_0104_normalised\npiano/train/piano_0132_normalised\npiano/train/piano_0024_normalised\npiano/train/piano_0121_normalised\npiano/train/piano_0080_normalised\npiano/train/piano_0097_normalised\npiano/train/piano_0026_normalised\npiano/train/piano_0040_normalised\npiano/train/piano_0201_normalised\npiano/train/piano_0182_normalised\npiano/train/piano_0175_normalised\npiano/train/piano_0153_normalised\npiano/train/piano_0059_normalised\npiano/train/piano_0120_normalised\npiano/train/piano_0033_normalised\npiano/train/piano_0064_normalised\npiano/train/piano_0066_normalised\npiano/train/piano_0028_normalised\npiano/train/piano_0010_normalised\npiano/train/piano_0178_normalised\npiano/train/piano_0191_normalised\npiano/train/piano_0054_normalised\npiano/train/piano_0102_normalised\npiano/train/piano_0055_normalised\npiano/train/piano_0025_normalised\npiano/train/piano_0220_normalised\npiano/train/piano_0211_normalised\npiano/train/piano_0086_normalised\npiano/train/piano_0148_normalised\npiano/train/piano_0222_normalised\npiano/train/piano_0198_normalised\npiano/train/piano_0230_normalised\npiano/train/piano_0216_normalised\npiano/train/piano_0186_normalised\npiano/train/piano_0118_normalised\npiano/train/piano_0036_normalised\npiano/train/piano_0011_normalised\npiano/train/piano_0089_normalised\npiano/train/piano_0197_normalised\npiano/train/piano_0017_normalised\npiano/train/piano_0093_normalised\npiano/train/piano_0226_normalised\npiano/train/piano_0058_normalised\npiano/train/piano_0218_normalised\npiano/train/piano_0164_normalised\npiano/test/piano_0282_normalised\npiano/test/piano_0328_normalised\npiano/test/piano_0234_normalised\npiano/test/piano_0317_normalised\npiano/test/piano_0295_normalised\npiano/test/piano_0281_normalised\npiano/test/piano_0302_normalised\npiano/test/piano_0258_normalised\npiano/test/piano_0287_normalised\npiano/test/piano_0267_normalised\npiano/test/piano_0293_normalised\npiano/test/piano_0253_normalised\npiano/test/piano_0313_normalised\npiano/test/piano_0326_normalised\npiano/test/piano_0299_normalised\npiano/test/piano_0278_normalised\npiano/test/piano_0312_normalised\npiano/test/piano_0248_normalised\npiano/test/piano_0314_normalised\npiano/test/piano_0305_normalised\npiano/test/piano_0320_normalised\npiano/test/piano_0310_normalised\npiano/test/piano_0241_normalised\npiano/test/piano_0296_normalised\npiano/test/piano_0284_normalised\npiano/test/piano_0270_normalised\npiano/test/piano_0297_normalised\npiano/test/piano_0307_normalised\npiano/test/piano_0294_normalised\npiano/test/piano_0243_normalised\npiano/test/piano_0247_normalised\npiano/test/piano_0285_normalised\npiano/test/piano_0286_normalised\npiano/test/piano_0323_normalised\npiano/test/piano_0275_normalised\npiano/test/piano_0260_normalised\npiano/test/piano_0252_normalised\npiano/test/piano_0259_normalised\npiano/test/piano_0311_normalised\npiano/test/piano_0239_normalised\npiano/test/piano_0290_normalised\npiano/test/piano_0322_normalised\npiano/test/piano_0262_normalised\npiano/test/piano_0318_normalised\npiano/test/piano_0265_normalised\npiano/test/piano_0233_normalised\npiano/test/piano_0232_normalised\npiano/test/piano_0254_normalised\npiano/test/piano_0246_normalised\npiano/test/piano_0292_normalised\npiano/test/piano_0288_normalised\npiano/test/piano_0303_normalised\npiano/test/piano_0263_normalised\npiano/test/piano_0236_normalised\npiano/test/piano_0264_normalised\npiano/test/piano_0316_normalised\npiano/test/piano_0325_normalised\npiano/test/piano_0250_normalised\npiano/test/piano_0283_normalised\npiano/test/piano_0272_normalised\npiano/test/piano_0315_normalised\npiano/test/piano_0245_normalised\npiano/test/piano_0321_normalised\npiano/test/piano_0319_normalised\npiano/test/piano_0255_normalised\npiano/test/piano_0276_normalised\npiano/test/piano_0331_normalised\npiano/test/piano_0309_normalised\npiano/test/piano_0327_normalised\npiano/test/piano_0257_normalised\npiano/test/piano_0242_normalised\npiano/test/piano_0240_normalised\npiano/test/piano_0269_normalised\npiano/test/piano_0268_normalised\npiano/test/piano_0280_normalised\npiano/test/piano_0244_normalised\npiano/test/piano_0289_normalised\npiano/test/piano_0300_normalised\npiano/test/piano_0279_normalised\npiano/test/piano_0251_normalised\npiano/test/piano_0256_normalised\npiano/test/piano_0274_normalised\npiano/test/piano_0249_normalised\npiano/test/piano_0324_normalised\npiano/test/piano_0237_normalised\npiano/test/piano_0261_normalised\npiano/test/piano_0238_normalised\npiano/test/piano_0291_normalised\npiano/test/piano_0330_normalised\npiano/test/piano_0298_normalised\npiano/test/piano_0277_normalised\npiano/test/piano_0271_normalised\npiano/test/piano_0273_normalised\npiano/test/piano_0266_normalised\npiano/test/piano_0304_normalised\npiano/test/piano_0306_normalised\npiano/test/piano_0301_normalised\npiano/test/piano_0308_normalised\npiano/test/piano_0329_normalised\npiano/test/piano_0235_normalised\nkeyboard/train/keyboard_0122_normalised\nkeyboard/train/keyboard_0144_normalised\nkeyboard/train/keyboard_0143_normalised\nkeyboard/train/keyboard_0130_normalised\nkeyboard/train/keyboard_0071_normalised\nkeyboard/train/keyboard_0014_normalised\nkeyboard/train/keyboard_0021_normalised\nkeyboard/train/keyboard_0125_normalised\nkeyboard/train/keyboard_0103_normalised\nkeyboard/train/keyboard_0032_normalised\nkeyboard/train/keyboard_0061_normalised\nkeyboard/train/keyboard_0015_normalised\nkeyboard/train/keyboard_0054_normalised\nkeyboard/train/keyboard_0006_normalised\nkeyboard/train/keyboard_0069_normalised\nkeyboard/train/keyboard_0104_normalised\nkeyboard/train/keyboard_0093_normalised\nkeyboard/train/keyboard_0034_normalised\nkeyboard/train/keyboard_0051_normalised\nkeyboard/train/keyboard_0057_normalised\nkeyboard/train/keyboard_0120_normalised\nkeyboard/train/keyboard_0058_normalised\nkeyboard/train/keyboard_0074_normalised\nkeyboard/train/keyboard_0090_normalised\nkeyboard/train/keyboard_0095_normalised\nkeyboard/train/keyboard_0113_normalised\nkeyboard/train/keyboard_0097_normalised\nkeyboard/train/keyboard_0042_normalised\nkeyboard/train/keyboard_0003_normalised\nkeyboard/train/keyboard_0001_normalised\nkeyboard/train/keyboard_0128_normalised\nkeyboard/train/keyboard_0108_normalised\nkeyboard/train/keyboard_0009_normalised\nkeyboard/train/keyboard_0065_normalised\nkeyboard/train/keyboard_0138_normalised\nkeyboard/train/keyboard_0029_normalised\nkeyboard/train/keyboard_0088_normalised\nkeyboard/train/keyboard_0127_normalised\nkeyboard/train/keyboard_0012_normalised\nkeyboard/train/keyboard_0041_normalised\nkeyboard/train/keyboard_0081_normalised\nkeyboard/train/keyboard_0022_normalised\nkeyboard/train/keyboard_0089_normalised\nkeyboard/train/keyboard_0115_normalised\nkeyboard/train/keyboard_0075_normalised\nkeyboard/train/keyboard_0076_normalised\nkeyboard/train/keyboard_0017_normalised\nkeyboard/train/keyboard_0100_normalised\nkeyboard/train/keyboard_0101_normalised\nkeyboard/train/keyboard_0141_normalised\nkeyboard/train/keyboard_0142_normalised\nkeyboard/train/keyboard_0023_normalised\nkeyboard/train/keyboard_0027_normalised\nkeyboard/train/keyboard_0136_normalised\nkeyboard/train/keyboard_0094_normalised\nkeyboard/train/keyboard_0039_normalised\nkeyboard/train/keyboard_0018_normalised\nkeyboard/train/keyboard_0082_normalised\nkeyboard/train/keyboard_0020_normalised\nkeyboard/train/keyboard_0109_normalised\nkeyboard/train/keyboard_0112_normalised\nkeyboard/train/keyboard_0077_normalised\nkeyboard/train/keyboard_0055_normalised\nkeyboard/train/keyboard_0086_normalised\nkeyboard/train/keyboard_0121_normalised\nkeyboard/train/keyboard_0035_normalised\nkeyboard/train/keyboard_0028_normalised\nkeyboard/train/keyboard_0053_normalised\nkeyboard/train/keyboard_0005_normalised\nkeyboard/train/keyboard_0080_normalised\nkeyboard/train/keyboard_0126_normalised\nkeyboard/train/keyboard_0016_normalised\nkeyboard/train/keyboard_0117_normalised\nkeyboard/train/keyboard_0132_normalised\nkeyboard/train/keyboard_0083_normalised\nkeyboard/train/keyboard_0114_normalised\nkeyboard/train/keyboard_0040_normalised\nkeyboard/train/keyboard_0107_normalised\nkeyboard/train/keyboard_0106_normalised\nkeyboard/train/keyboard_0131_normalised\nkeyboard/train/keyboard_0079_normalised\nkeyboard/train/keyboard_0102_normalised\nkeyboard/train/keyboard_0073_normalised\nkeyboard/train/keyboard_0048_normalised\nkeyboard/train/keyboard_0066_normalised\nkeyboard/train/keyboard_0026_normalised\nkeyboard/train/keyboard_0092_normalised\nkeyboard/train/keyboard_0031_normalised\nkeyboard/train/keyboard_0099_normalised\nkeyboard/train/keyboard_0024_normalised\nkeyboard/train/keyboard_0110_normalised\nkeyboard/train/keyboard_0011_normalised\nkeyboard/train/keyboard_0137_normalised\nkeyboard/train/keyboard_0105_normalised\nkeyboard/train/keyboard_0134_normalised\nkeyboard/train/keyboard_0067_normalised\nkeyboard/train/keyboard_0129_normalised\nkeyboard/train/keyboard_0052_normalised\nkeyboard/train/keyboard_0133_normalised\nkeyboard/train/keyboard_0119_normalised\nkeyboard/train/keyboard_0004_normalised\nkeyboard/train/keyboard_0084_normalised\nkeyboard/train/keyboard_0013_normalised\nkeyboard/train/keyboard_0118_normalised\nkeyboard/train/keyboard_0096_normalised\nkeyboard/train/keyboard_0025_normalised\nkeyboard/train/keyboard_0038_normalised\nkeyboard/train/keyboard_0043_normalised\nkeyboard/train/keyboard_0145_normalised\nkeyboard/train/keyboard_0068_normalised\nkeyboard/train/keyboard_0036_normalised\nkeyboard/train/keyboard_0087_normalised\nkeyboard/train/keyboard_0008_normalised\nkeyboard/train/keyboard_0123_normalised\nkeyboard/train/keyboard_0046_normalised\nkeyboard/train/keyboard_0030_normalised\nkeyboard/train/keyboard_0019_normalised\nkeyboard/train/keyboard_0060_normalised\nkeyboard/train/keyboard_0072_normalised\nkeyboard/train/keyboard_0085_normalised\nkeyboard/train/keyboard_0010_normalised\nkeyboard/train/keyboard_0135_normalised\nkeyboard/train/keyboard_0063_normalised\nkeyboard/train/keyboard_0007_normalised\nkeyboard/train/keyboard_0056_normalised\nkeyboard/train/keyboard_0064_normalised\nkeyboard/train/keyboard_0091_normalised\nkeyboard/train/keyboard_0033_normalised\nkeyboard/train/keyboard_0044_normalised\nkeyboard/train/keyboard_0070_normalised\nkeyboard/train/keyboard_0111_normalised\nkeyboard/train/keyboard_0050_normalised\nkeyboard/train/keyboard_0047_normalised\nkeyboard/train/keyboard_0140_normalised\nkeyboard/train/keyboard_0078_normalised\nkeyboard/train/keyboard_0059_normalised\nkeyboard/train/keyboard_0139_normalised\nkeyboard/train/keyboard_0037_normalised\nkeyboard/train/keyboard_0062_normalised\nkeyboard/train/keyboard_0116_normalised\nkeyboard/train/keyboard_0049_normalised\nkeyboard/train/keyboard_0045_normalised\nkeyboard/train/keyboard_0124_normalised\nkeyboard/train/keyboard_0002_normalised\nkeyboard/train/keyboard_0098_normalised\nkeyboard/test/keyboard_0158_normalised\nkeyboard/test/keyboard_0165_normalised\nkeyboard/test/keyboard_0157_normalised\nkeyboard/test/keyboard_0160_normalised\nkeyboard/test/keyboard_0150_normalised\nkeyboard/test/keyboard_0151_normalised\nkeyboard/test/keyboard_0163_normalised\nkeyboard/test/keyboard_0153_normalised\nkeyboard/test/keyboard_0162_normalised\nkeyboard/test/keyboard_0152_normalised\nkeyboard/test/keyboard_0155_normalised\nkeyboard/test/keyboard_0164_normalised\nkeyboard/test/keyboard_0146_normalised\nkeyboard/test/keyboard_0161_normalised\nkeyboard/test/keyboard_0149_normalised\nkeyboard/test/keyboard_0159_normalised\nkeyboard/test/keyboard_0156_normalised\nkeyboard/test/keyboard_0154_normalised\nkeyboard/test/keyboard_0147_normalised\nkeyboard/test/keyboard_0148_normalised\nguitar/train/guitar_0127_normalised\nguitar/train/guitar_0073_normalised\nguitar/train/guitar_0063_normalised\nguitar/train/guitar_0016_normalised\nguitar/train/guitar_0029_normalised\nguitar/train/guitar_0109_normalised\nguitar/train/guitar_0039_normalised\nguitar/train/guitar_0065_normalised\nguitar/train/guitar_0035_normalised\nguitar/train/guitar_0096_normalised\nguitar/train/guitar_0111_normalised\nguitar/train/guitar_0137_normalised\nguitar/train/guitar_0069_normalised\nguitar/train/guitar_0125_normalised\nguitar/train/guitar_0026_normalised\nguitar/train/guitar_0116_normalised\nguitar/train/guitar_0133_normalised\nguitar/train/guitar_0086_normalised\nguitar/train/guitar_0041_normalised\nguitar/train/guitar_0151_normalised\nguitar/train/guitar_0131_normalised\nguitar/train/guitar_0130_normalised\nguitar/train/guitar_0018_normalised\nguitar/train/guitar_0019_normalised\nguitar/train/guitar_0152_normalised\nguitar/train/guitar_0036_normalised\nguitar/train/guitar_0107_normalised\nguitar/train/guitar_0059_normalised\nguitar/train/guitar_0044_normalised\nguitar/train/guitar_0033_normalised\nguitar/train/guitar_0129_normalised\nguitar/train/guitar_0141_normalised\nguitar/train/guitar_0061_normalised\nguitar/train/guitar_0022_normalised\nguitar/train/guitar_0046_normalised\nguitar/train/guitar_0034_normalised\nguitar/train/guitar_0124_normalised\nguitar/train/guitar_0020_normalised\nguitar/train/guitar_0089_normalised\nguitar/train/guitar_0082_normalised\nguitar/train/guitar_0045_normalised\nguitar/train/guitar_0012_normalised\nguitar/train/guitar_0114_normalised\nguitar/train/guitar_0076_normalised\nguitar/train/guitar_0098_normalised\nguitar/train/guitar_0083_normalised\nguitar/train/guitar_0060_normalised\nguitar/train/guitar_0055_normalised\nguitar/train/guitar_0027_normalised\nguitar/train/guitar_0110_normalised\nguitar/train/guitar_0074_normalised\nguitar/train/guitar_0101_normalised\nguitar/train/guitar_0052_normalised\nguitar/train/guitar_0150_normalised\nguitar/train/guitar_0084_normalised\nguitar/train/guitar_0139_normalised\nguitar/train/guitar_0108_normalised\nguitar/train/guitar_0138_normalised\nguitar/train/guitar_0104_normalised\nguitar/train/guitar_0062_normalised\nguitar/train/guitar_0153_normalised\nguitar/train/guitar_0068_normalised\nguitar/train/guitar_0093_normalised\nguitar/train/guitar_0066_normalised\nguitar/train/guitar_0140_normalised\nguitar/train/guitar_0117_normalised\nguitar/train/guitar_0080_normalised\nguitar/train/guitar_0067_normalised\nguitar/train/guitar_0128_normalised\nguitar/train/guitar_0103_normalised\nguitar/train/guitar_0023_normalised\nguitar/train/guitar_0113_normalised\nguitar/train/guitar_0070_normalised\nguitar/train/guitar_0054_normalised\nguitar/train/guitar_0079_normalised\nguitar/train/guitar_0006_normalised\nguitar/train/guitar_0088_normalised\nguitar/train/guitar_0031_normalised\nguitar/train/guitar_0105_normalised\nguitar/train/guitar_0040_normalised\nguitar/train/guitar_0090_normalised\nguitar/train/guitar_0102_normalised\nguitar/train/guitar_0147_normalised\nguitar/train/guitar_0038_normalised\nguitar/train/guitar_0025_normalised\nguitar/train/guitar_0043_normalised\nguitar/train/guitar_0094_normalised\nguitar/train/guitar_0028_normalised\nguitar/train/guitar_0119_normalised\nguitar/train/guitar_0011_normalised\nguitar/train/guitar_0009_normalised\nguitar/train/guitar_0072_normalised\nguitar/train/guitar_0132_normalised\nguitar/train/guitar_0053_normalised\nguitar/train/guitar_0120_normalised\nguitar/train/guitar_0142_normalised\nguitar/train/guitar_0135_normalised\nguitar/train/guitar_0097_normalised\nguitar/train/guitar_0148_normalised\nguitar/train/guitar_0099_normalised\nguitar/train/guitar_0001_normalised\nguitar/train/guitar_0030_normalised\nguitar/train/guitar_0136_normalised\nguitar/train/guitar_0056_normalised\nguitar/train/guitar_0047_normalised\nguitar/train/guitar_0021_normalised\nguitar/train/guitar_0014_normalised\nguitar/train/guitar_0095_normalised\nguitar/train/guitar_0051_normalised\nguitar/train/guitar_0057_normalised\nguitar/train/guitar_0017_normalised\nguitar/train/guitar_0007_normalised\nguitar/train/guitar_0005_normalised\nguitar/train/guitar_0049_normalised\nguitar/train/guitar_0048_normalised\nguitar/train/guitar_0003_normalised\nguitar/train/guitar_0024_normalised\nguitar/train/guitar_0075_normalised\nguitar/train/guitar_0126_normalised\nguitar/train/guitar_0078_normalised\nguitar/train/guitar_0092_normalised\nguitar/train/guitar_0002_normalised\nguitar/train/guitar_0085_normalised\nguitar/train/guitar_0154_normalised\nguitar/train/guitar_0112_normalised\nguitar/train/guitar_0058_normalised\nguitar/train/guitar_0010_normalised\nguitar/train/guitar_0122_normalised\nguitar/train/guitar_0123_normalised\nguitar/train/guitar_0032_normalised\nguitar/train/guitar_0064_normalised\nguitar/train/guitar_0008_normalised\nguitar/train/guitar_0143_normalised\nguitar/train/guitar_0037_normalised\nguitar/train/guitar_0004_normalised\nguitar/train/guitar_0146_normalised\nguitar/train/guitar_0050_normalised\nguitar/train/guitar_0106_normalised\nguitar/train/guitar_0118_normalised\nguitar/train/guitar_0145_normalised\nguitar/train/guitar_0015_normalised\nguitar/train/guitar_0081_normalised\nguitar/train/guitar_0013_normalised\nguitar/train/guitar_0100_normalised\nguitar/train/guitar_0115_normalised\nguitar/train/guitar_0144_normalised\nguitar/train/guitar_0077_normalised\nguitar/train/guitar_0121_normalised\nguitar/train/guitar_0071_normalised\nguitar/train/guitar_0134_normalised\nguitar/train/guitar_0155_normalised\nguitar/train/guitar_0087_normalised\nguitar/train/guitar_0042_normalised\nguitar/train/guitar_0149_normalised\nguitar/train/guitar_0091_normalised\nguitar/test/guitar_0161_normalised\nguitar/test/guitar_0204_normalised\nguitar/test/guitar_0169_normalised\nguitar/test/guitar_0197_normalised\nguitar/test/guitar_0202_normalised\nguitar/test/guitar_0215_normalised\nguitar/test/guitar_0242_normalised\nguitar/test/guitar_0244_normalised\nguitar/test/guitar_0222_normalised\nguitar/test/guitar_0175_normalised\nguitar/test/guitar_0214_normalised\nguitar/test/guitar_0190_normalised\nguitar/test/guitar_0201_normalised\nguitar/test/guitar_0246_normalised\nguitar/test/guitar_0164_normalised\nguitar/test/guitar_0213_normalised\nguitar/test/guitar_0228_normalised\nguitar/test/guitar_0173_normalised\nguitar/test/guitar_0207_normalised\nguitar/test/guitar_0248_normalised\nguitar/test/guitar_0189_normalised\nguitar/test/guitar_0192_normalised\nguitar/test/guitar_0255_normalised\nguitar/test/guitar_0177_normalised\nguitar/test/guitar_0251_normalised\nguitar/test/guitar_0182_normalised\nguitar/test/guitar_0188_normalised\nguitar/test/guitar_0157_normalised\nguitar/test/guitar_0212_normalised\nguitar/test/guitar_0176_normalised\nguitar/test/guitar_0162_normalised\nguitar/test/guitar_0241_normalised\nguitar/test/guitar_0236_normalised\nguitar/test/guitar_0240_normalised\nguitar/test/guitar_0219_normalised\nguitar/test/guitar_0250_normalised\nguitar/test/guitar_0171_normalised\nguitar/test/guitar_0167_normalised\nguitar/test/guitar_0184_normalised\nguitar/test/guitar_0218_normalised\nguitar/test/guitar_0245_normalised\nguitar/test/guitar_0234_normalised\nguitar/test/guitar_0174_normalised\nguitar/test/guitar_0224_normalised\nguitar/test/guitar_0205_normalised\nguitar/test/guitar_0196_normalised\nguitar/test/guitar_0217_normalised\nguitar/test/guitar_0203_normalised\nguitar/test/guitar_0230_normalised\nguitar/test/guitar_0195_normalised\nguitar/test/guitar_0249_normalised\nguitar/test/guitar_0226_normalised\nguitar/test/guitar_0183_normalised\nguitar/test/guitar_0229_normalised\nguitar/test/guitar_0252_normalised\nguitar/test/guitar_0194_normalised\nguitar/test/guitar_0238_normalised\nguitar/test/guitar_0247_normalised\nguitar/test/guitar_0199_normalised\nguitar/test/guitar_0227_normalised\nguitar/test/guitar_0209_normalised\nguitar/test/guitar_0186_normalised\nguitar/test/guitar_0216_normalised\nguitar/test/guitar_0159_normalised\nguitar/test/guitar_0200_normalised\nguitar/test/guitar_0232_normalised\nguitar/test/guitar_0172_normalised\nguitar/test/guitar_0233_normalised\nguitar/test/guitar_0163_normalised\nguitar/test/guitar_0225_normalised\nguitar/test/guitar_0231_normalised\nguitar/test/guitar_0243_normalised\nguitar/test/guitar_0170_normalised\nguitar/test/guitar_0156_normalised\nguitar/test/guitar_0220_normalised\nguitar/test/guitar_0179_normalised\nguitar/test/guitar_0239_normalised\nguitar/test/guitar_0191_normalised\nguitar/test/guitar_0254_normalised\nguitar/test/guitar_0168_normalised\nguitar/test/guitar_0198_normalised\nguitar/test/guitar_0158_normalised\nguitar/test/guitar_0206_normalised\nguitar/test/guitar_0210_normalised\nguitar/test/guitar_0208_normalised\nguitar/test/guitar_0160_normalised\nguitar/test/guitar_0178_normalised\nguitar/test/guitar_0193_normalised\nguitar/test/guitar_0185_normalised\nguitar/test/guitar_0165_normalised\nguitar/test/guitar_0221_normalised\nguitar/test/guitar_0235_normalised\nguitar/test/guitar_0223_normalised\nguitar/test/guitar_0253_normalised\nguitar/test/guitar_0187_normalised\nguitar/test/guitar_0180_normalised\nguitar/test/guitar_0181_normalised\nguitar/test/guitar_0211_normalised\nguitar/test/guitar_0166_normalised\nguitar/test/guitar_0237_normalised\nnight_stand/train/night_stand_0163_normalised\nnight_stand/train/night_stand_0065_normalised\nnight_stand/train/night_stand_0069_normalised\nnight_stand/train/night_stand_0091_normalised\nnight_stand/train/night_stand_0067_normalised\nnight_stand/train/night_stand_0096_normalised\nnight_stand/train/night_stand_0129_normalised\nnight_stand/train/night_stand_0119_normalised\nnight_stand/train/night_stand_0095_normalised\nnight_stand/train/night_stand_0099_normalised\nnight_stand/train/night_stand_0110_normalised\nnight_stand/train/night_stand_0032_normalised\nnight_stand/train/night_stand_0183_normalised\nnight_stand/train/night_stand_0117_normalised\nnight_stand/train/night_stand_0134_normalised\nnight_stand/train/night_stand_0013_normalised\nnight_stand/train/night_stand_0145_normalised\nnight_stand/train/night_stand_0061_normalised\nnight_stand/train/night_stand_0177_normalised\nnight_stand/train/night_stand_0189_normalised\nnight_stand/train/night_stand_0175_normalised\nnight_stand/train/night_stand_0044_normalised\nnight_stand/train/night_stand_0004_normalised\nnight_stand/train/night_stand_0072_normalised\nnight_stand/train/night_stand_0015_normalised\nnight_stand/train/night_stand_0098_normalised\nnight_stand/train/night_stand_0086_normalised\nnight_stand/train/night_stand_0042_normalised\nnight_stand/train/night_stand_0041_normalised\nnight_stand/train/night_stand_0146_normalised\nnight_stand/train/night_stand_0006_normalised\nnight_stand/train/night_stand_0008_normalised\nnight_stand/train/night_stand_0046_normalised\nnight_stand/train/night_stand_0023_normalised\nnight_stand/train/night_stand_0172_normalised\nnight_stand/train/night_stand_0026_normalised\nnight_stand/train/night_stand_0007_normalised\nnight_stand/train/night_stand_0150_normalised\nnight_stand/train/night_stand_0100_normalised\nnight_stand/train/night_stand_0194_normalised\nnight_stand/train/night_stand_0155_normalised\nnight_stand/train/night_stand_0076_normalised\nnight_stand/train/night_stand_0154_normalised\nnight_stand/train/night_stand_0143_normalised\nnight_stand/train/night_stand_0149_normalised\nnight_stand/train/night_stand_0003_normalised\nnight_stand/train/night_stand_0055_normalised\nnight_stand/train/night_stand_0137_normalised\nnight_stand/train/night_stand_0171_normalised\nnight_stand/train/night_stand_0123_normalised\nnight_stand/train/night_stand_0002_normalised\nnight_stand/train/night_stand_0071_normalised\nnight_stand/train/night_stand_0092_normalised\nnight_stand/train/night_stand_0011_normalised\nnight_stand/train/night_stand_0084_normalised\nnight_stand/train/night_stand_0197_normalised\nnight_stand/train/night_stand_0118_normalised\nnight_stand/train/night_stand_0130_normalised\nnight_stand/train/night_stand_0187_normalised\nnight_stand/train/night_stand_0186_normalised\nnight_stand/train/night_stand_0038_normalised\nnight_stand/train/night_stand_0153_normalised\nnight_stand/train/night_stand_0120_normalised\nnight_stand/train/night_stand_0102_normalised\nnight_stand/train/night_stand_0126_normalised\nnight_stand/train/night_stand_0020_normalised\nnight_stand/train/night_stand_0115_normalised\nnight_stand/train/night_stand_0090_normalised\nnight_stand/train/night_stand_0162_normalised\nnight_stand/train/night_stand_0089_normalised\nnight_stand/train/night_stand_0063_normalised\nnight_stand/train/night_stand_0082_normalised\nnight_stand/train/night_stand_0027_normalised\nnight_stand/train/night_stand_0124_normalised\nnight_stand/train/night_stand_0018_normalised\nnight_stand/train/night_stand_0059_normalised\nnight_stand/train/night_stand_0048_normalised\nnight_stand/train/night_stand_0159_normalised\nnight_stand/train/night_stand_0093_normalised\nnight_stand/train/night_stand_0019_normalised\nnight_stand/train/night_stand_0022_normalised\nnight_stand/train/night_stand_0104_normalised\nnight_stand/train/night_stand_0085_normalised\nnight_stand/train/night_stand_0035_normalised\nnight_stand/train/night_stand_0168_normalised\nnight_stand/train/night_stand_0111_normalised\nnight_stand/train/night_stand_0128_normalised\nnight_stand/train/night_stand_0049_normalised\nnight_stand/train/night_stand_0152_normalised\nnight_stand/train/night_stand_0079_normalised\nnight_stand/train/night_stand_0057_normalised\nnight_stand/train/night_stand_0005_normalised\nnight_stand/train/night_stand_0198_normalised\nnight_stand/train/night_stand_0017_normalised\nnight_stand/train/night_stand_0188_normalised\nnight_stand/train/night_stand_0101_normalised\nnight_stand/train/night_stand_0068_normalised\nnight_stand/train/night_stand_0160_normalised\nnight_stand/train/night_stand_0073_normalised\nnight_stand/train/night_stand_0029_normalised\nnight_stand/train/night_stand_0052_normalised\nnight_stand/train/night_stand_0108_normalised\nnight_stand/train/night_stand_0151_normalised\nnight_stand/train/night_stand_0028_normalised\nnight_stand/train/night_stand_0121_normalised\nnight_stand/train/night_stand_0136_normalised\nnight_stand/train/night_stand_0107_normalised\nnight_stand/train/night_stand_0058_normalised\nnight_stand/train/night_stand_0097_normalised\nnight_stand/train/night_stand_0165_normalised\nnight_stand/train/night_stand_0167_normalised\nnight_stand/train/night_stand_0060_normalised\nnight_stand/train/night_stand_0050_normalised\nnight_stand/train/night_stand_0181_normalised\nnight_stand/train/night_stand_0012_normalised\nnight_stand/train/night_stand_0056_normalised\nnight_stand/train/night_stand_0087_normalised\nnight_stand/train/night_stand_0192_normalised\nnight_stand/train/night_stand_0105_normalised\nnight_stand/train/night_stand_0034_normalised\nnight_stand/train/night_stand_0156_normalised\nnight_stand/train/night_stand_0021_normalised\nnight_stand/train/night_stand_0040_normalised\nnight_stand/train/night_stand_0081_normalised\nnight_stand/train/night_stand_0064_normalised\nnight_stand/train/night_stand_0031_normalised\nnight_stand/train/night_stand_0088_normalised\nnight_stand/train/night_stand_0190_normalised\nnight_stand/train/night_stand_0033_normalised\nnight_stand/train/night_stand_0199_normalised\nnight_stand/train/night_stand_0070_normalised\nnight_stand/train/night_stand_0080_normalised\nnight_stand/train/night_stand_0122_normalised\nnight_stand/train/night_stand_0135_normalised\nnight_stand/train/night_stand_0078_normalised\nnight_stand/train/night_stand_0009_normalised\nnight_stand/train/night_stand_0182_normalised\nnight_stand/train/night_stand_0147_normalised\nnight_stand/train/night_stand_0184_normalised\nnight_stand/train/night_stand_0083_normalised\nnight_stand/train/night_stand_0170_normalised\nnight_stand/train/night_stand_0094_normalised\nnight_stand/train/night_stand_0173_normalised\nnight_stand/train/night_stand_0054_normalised\nnight_stand/train/night_stand_0045_normalised\nnight_stand/train/night_stand_0036_normalised\nnight_stand/train/night_stand_0075_normalised\nnight_stand/train/night_stand_0138_normalised\nnight_stand/train/night_stand_0142_normalised\nnight_stand/train/night_stand_0024_normalised\nnight_stand/train/night_stand_0164_normalised\nnight_stand/train/night_stand_0133_normalised\nnight_stand/train/night_stand_0010_normalised\nnight_stand/train/night_stand_0132_normalised\nnight_stand/train/night_stand_0140_normalised\nnight_stand/train/night_stand_0161_normalised\nnight_stand/train/night_stand_0109_normalised\nnight_stand/train/night_stand_0196_normalised\nnight_stand/train/night_stand_0166_normalised\nnight_stand/train/night_stand_0116_normalised\nnight_stand/train/night_stand_0174_normalised\nnight_stand/train/night_stand_0193_normalised\nnight_stand/train/night_stand_0169_normalised\nnight_stand/train/night_stand_0043_normalised\nnight_stand/train/night_stand_0176_normalised\nnight_stand/train/night_stand_0127_normalised\nnight_stand/train/night_stand_0062_normalised\nnight_stand/train/night_stand_0074_normalised\nnight_stand/train/night_stand_0039_normalised\nnight_stand/train/night_stand_0103_normalised\nnight_stand/train/night_stand_0016_normalised\nnight_stand/train/night_stand_0112_normalised\nnight_stand/train/night_stand_0053_normalised\nnight_stand/train/night_stand_0077_normalised\nnight_stand/train/night_stand_0179_normalised\nnight_stand/train/night_stand_0051_normalised\nnight_stand/train/night_stand_0191_normalised\nnight_stand/train/night_stand_0047_normalised\nnight_stand/train/night_stand_0066_normalised\nnight_stand/train/night_stand_0014_normalised\nnight_stand/train/night_stand_0157_normalised\nnight_stand/train/night_stand_0001_normalised\nnight_stand/train/night_stand_0178_normalised\nnight_stand/train/night_stand_0131_normalised\nnight_stand/train/night_stand_0200_normalised\nnight_stand/train/night_stand_0185_normalised\nnight_stand/train/night_stand_0158_normalised\nnight_stand/train/night_stand_0030_normalised\nnight_stand/train/night_stand_0106_normalised\nnight_stand/train/night_stand_0148_normalised\nnight_stand/train/night_stand_0037_normalised\nnight_stand/train/night_stand_0113_normalised\nnight_stand/train/night_stand_0025_normalised\nnight_stand/train/night_stand_0114_normalised\nnight_stand/train/night_stand_0139_normalised\nnight_stand/train/night_stand_0125_normalised\nnight_stand/train/night_stand_0180_normalised\nnight_stand/train/night_stand_0141_normalised\nnight_stand/train/night_stand_0144_normalised\nnight_stand/train/night_stand_0195_normalised\nnight_stand/test/night_stand_0280_normalised\nnight_stand/test/night_stand_0263_normalised\nnight_stand/test/night_stand_0256_normalised\nnight_stand/test/night_stand_0262_normalised\nnight_stand/test/night_stand_0233_normalised\nnight_stand/test/night_stand_0253_normalised\nnight_stand/test/night_stand_0279_normalised\nnight_stand/test/night_stand_0207_normalised\nnight_stand/test/night_stand_0273_normalised\nnight_stand/test/night_stand_0281_normalised\nnight_stand/test/night_stand_0252_normalised\nnight_stand/test/night_stand_0250_normalised\nnight_stand/test/night_stand_0278_normalised\nnight_stand/test/night_stand_0255_normalised\nnight_stand/test/night_stand_0204_normalised\nnight_stand/test/night_stand_0216_normalised\nnight_stand/test/night_stand_0221_normalised\nnight_stand/test/night_stand_0224_normalised\nnight_stand/test/night_stand_0213_normalised\nnight_stand/test/night_stand_0286_normalised\nnight_stand/test/night_stand_0229_normalised\nnight_stand/test/night_stand_0236_normalised\nnight_stand/test/night_stand_0235_normalised\nnight_stand/test/night_stand_0220_normalised\nnight_stand/test/night_stand_0265_normalised\nnight_stand/test/night_stand_0227_normalised\nnight_stand/test/night_stand_0259_normalised\nnight_stand/test/night_stand_0277_normalised\nnight_stand/test/night_stand_0247_normalised\nnight_stand/test/night_stand_0222_normalised\nnight_stand/test/night_stand_0212_normalised\nnight_stand/test/night_stand_0230_normalised\nnight_stand/test/night_stand_0269_normalised\nnight_stand/test/night_stand_0243_normalised\nnight_stand/test/night_stand_0272_normalised\nnight_stand/test/night_stand_0257_normalised\nnight_stand/test/night_stand_0223_normalised\nnight_stand/test/night_stand_0232_normalised\nnight_stand/test/night_stand_0206_normalised\nnight_stand/test/night_stand_0238_normalised\nnight_stand/test/night_stand_0264_normalised\nnight_stand/test/night_stand_0249_normalised\nnight_stand/test/night_stand_0202_normalised\nnight_stand/test/night_stand_0251_normalised\nnight_stand/test/night_stand_0248_normalised\nnight_stand/test/night_stand_0239_normalised\nnight_stand/test/night_stand_0268_normalised\nnight_stand/test/night_stand_0246_normalised\nnight_stand/test/night_stand_0258_normalised\nnight_stand/test/night_stand_0210_normalised\nnight_stand/test/night_stand_0219_normalised\nnight_stand/test/night_stand_0231_normalised\nnight_stand/test/night_stand_0242_normalised\nnight_stand/test/night_stand_0245_normalised\nnight_stand/test/night_stand_0214_normalised\nnight_stand/test/night_stand_0205_normalised\nnight_stand/test/night_stand_0201_normalised\nnight_stand/test/night_stand_0203_normalised\nnight_stand/test/night_stand_0228_normalised\nnight_stand/test/night_stand_0208_normalised\nnight_stand/test/night_stand_0282_normalised\nnight_stand/test/night_stand_0260_normalised\nnight_stand/test/night_stand_0226_normalised\nnight_stand/test/night_stand_0237_normalised\nnight_stand/test/night_stand_0240_normalised\nnight_stand/test/night_stand_0215_normalised\nnight_stand/test/night_stand_0211_normalised\nnight_stand/test/night_stand_0254_normalised\nnight_stand/test/night_stand_0276_normalised\nnight_stand/test/night_stand_0261_normalised\nnight_stand/test/night_stand_0270_normalised\nnight_stand/test/night_stand_0275_normalised\nnight_stand/test/night_stand_0271_normalised\nnight_stand/test/night_stand_0225_normalised\nnight_stand/test/night_stand_0267_normalised\nnight_stand/test/night_stand_0285_normalised\nnight_stand/test/night_stand_0209_normalised\nnight_stand/test/night_stand_0217_normalised\nnight_stand/test/night_stand_0266_normalised\nnight_stand/test/night_stand_0218_normalised\nnight_stand/test/night_stand_0284_normalised\nnight_stand/test/night_stand_0241_normalised\nnight_stand/test/night_stand_0244_normalised\nnight_stand/test/night_stand_0283_normalised\nnight_stand/test/night_stand_0234_normalised\nnight_stand/test/night_stand_0274_normalised\ntent/train/tent_0111_normalised\ntent/train/tent_0109_normalised\ntent/train/tent_0132_normalised\ntent/train/tent_0007_normalised\ntent/train/tent_0021_normalised\ntent/train/tent_0154_normalised\ntent/train/tent_0094_normalised\ntent/train/tent_0152_normalised\ntent/train/tent_0062_normalised\ntent/train/tent_0155_normalised\ntent/train/tent_0060_normalised\ntent/train/tent_0002_normalised\ntent/train/tent_0045_normalised\ntent/train/tent_0008_normalised\ntent/train/tent_0063_normalised\ntent/train/tent_0070_normalised\ntent/train/tent_0126_normalised\ntent/train/tent_0024_normalised\ntent/train/tent_0099_normalised\ntent/train/tent_0097_normalised\ntent/train/tent_0123_normalised\ntent/train/tent_0009_normalised\ntent/train/tent_0026_normalised\ntent/train/tent_0121_normalised\ntent/train/tent_0144_normalised\ntent/train/tent_0101_normalised\ntent/train/tent_0018_normalised\ntent/train/tent_0054_normalised\ntent/train/tent_0113_normalised\ntent/train/tent_0131_normalised\ntent/train/tent_0075_normalised\ntent/train/tent_0058_normalised\ntent/train/tent_0053_normalised\ntent/train/tent_0052_normalised\ntent/train/tent_0129_normalised\ntent/train/tent_0117_normalised\ntent/train/tent_0057_normalised\ntent/train/tent_0010_normalised\ntent/train/tent_0017_normalised\ntent/train/tent_0064_normalised\ntent/train/tent_0087_normalised\ntent/train/tent_0056_normalised\ntent/train/tent_0134_normalised\ntent/train/tent_0013_normalised\ntent/train/tent_0098_normalised\ntent/train/tent_0015_normalised\ntent/train/tent_0051_normalised\ntent/train/tent_0014_normalised\ntent/train/tent_0140_normalised\ntent/train/tent_0148_normalised\ntent/train/tent_0022_normalised\ntent/train/tent_0102_normalised\ntent/train/tent_0158_normalised\ntent/train/tent_0125_normalised\ntent/train/tent_0030_normalised\ntent/train/tent_0033_normalised\ntent/train/tent_0074_normalised\ntent/train/tent_0083_normalised\ntent/train/tent_0104_normalised\ntent/train/tent_0078_normalised\ntent/train/tent_0037_normalised\ntent/train/tent_0065_normalised\ntent/train/tent_0044_normalised\ntent/train/tent_0150_normalised\ntent/train/tent_0080_normalised\ntent/train/tent_0115_normalised\ntent/train/tent_0141_normalised\ntent/train/tent_0055_normalised\ntent/train/tent_0027_normalised\ntent/train/tent_0029_normalised\ntent/train/tent_0124_normalised\ntent/train/tent_0035_normalised\ntent/train/tent_0163_normalised\ntent/train/tent_0128_normalised\ntent/train/tent_0120_normalised\ntent/train/tent_0042_normalised\ntent/train/tent_0041_normalised\ntent/train/tent_0047_normalised\ntent/train/tent_0116_normalised\ntent/train/tent_0095_normalised\ntent/train/tent_0077_normalised\ntent/train/tent_0133_normalised\ntent/train/tent_0046_normalised\ntent/train/tent_0146_normalised\ntent/train/tent_0032_normalised\ntent/train/tent_0040_normalised\ntent/train/tent_0157_normalised\ntent/train/tent_0036_normalised\ntent/train/tent_0107_normalised\ntent/train/tent_0118_normalised\ntent/train/tent_0138_normalised\ntent/train/tent_0081_normalised\ntent/train/tent_0130_normalised\ntent/train/tent_0023_normalised\ntent/train/tent_0153_normalised\ntent/train/tent_0159_normalised\ntent/train/tent_0137_normalised\ntent/train/tent_0112_normalised\ntent/train/tent_0039_normalised\ntent/train/tent_0088_normalised\ntent/train/tent_0106_normalised\ntent/train/tent_0031_normalised\ntent/train/tent_0143_normalised\ntent/train/tent_0004_normalised\ntent/train/tent_0001_normalised\ntent/train/tent_0136_normalised\ntent/train/tent_0079_normalised\ntent/train/tent_0161_normalised\ntent/train/tent_0005_normalised\ntent/train/tent_0089_normalised\ntent/train/tent_0061_normalised\ntent/train/tent_0149_normalised\ntent/train/tent_0006_normalised\ntent/train/tent_0050_normalised\ntent/train/tent_0084_normalised\ntent/train/tent_0068_normalised\ntent/train/tent_0100_normalised\ntent/train/tent_0043_normalised\ntent/train/tent_0160_normalised\ntent/train/tent_0085_normalised\ntent/train/tent_0139_normalised\ntent/train/tent_0122_normalised\ntent/train/tent_0145_normalised\ntent/train/tent_0020_normalised\ntent/train/tent_0003_normalised\ntent/train/tent_0028_normalised\ntent/train/tent_0082_normalised\ntent/train/tent_0127_normalised\ntent/train/tent_0067_normalised\ntent/train/tent_0162_normalised\ntent/train/tent_0066_normalised\ntent/train/tent_0049_normalised\ntent/train/tent_0090_normalised\ntent/train/tent_0072_normalised\ntent/train/tent_0091_normalised\ntent/train/tent_0119_normalised\ntent/train/tent_0073_normalised\ntent/train/tent_0048_normalised\ntent/train/tent_0147_normalised\ntent/train/tent_0096_normalised\ntent/train/tent_0038_normalised\ntent/train/tent_0025_normalised\ntent/train/tent_0108_normalised\ntent/train/tent_0019_normalised\ntent/train/tent_0076_normalised\ntent/train/tent_0092_normalised\ntent/train/tent_0093_normalised\ntent/train/tent_0069_normalised\ntent/train/tent_0016_normalised\ntent/train/tent_0034_normalised\ntent/train/tent_0012_normalised\ntent/train/tent_0135_normalised\ntent/train/tent_0151_normalised\ntent/train/tent_0110_normalised\ntent/train/tent_0105_normalised\ntent/train/tent_0071_normalised\ntent/train/tent_0156_normalised\ntent/train/tent_0142_normalised\ntent/train/tent_0086_normalised\ntent/train/tent_0114_normalised\ntent/train/tent_0103_normalised\ntent/train/tent_0059_normalised\ntent/train/tent_0011_normalised\ntent/test/tent_0169_normalised\ntent/test/tent_0166_normalised\ntent/test/tent_0182_normalised\ntent/test/tent_0177_normalised\ntent/test/tent_0178_normalised\ntent/test/tent_0165_normalised\ntent/test/tent_0183_normalised\ntent/test/tent_0179_normalised\ntent/test/tent_0170_normalised\ntent/test/tent_0168_normalised\ntent/test/tent_0173_normalised\ntent/test/tent_0181_normalised\ntent/test/tent_0171_normalised\ntent/test/tent_0174_normalised\ntent/test/tent_0175_normalised\ntent/test/tent_0164_normalised\ntent/test/tent_0172_normalised\ntent/test/tent_0167_normalised\ntent/test/tent_0180_normalised\ntent/test/tent_0176_normalised\nbookshelf/train/bookshelf_0446_normalised\nbookshelf/train/bookshelf_0072_normalised\nbookshelf/train/bookshelf_0241_normalised\nbookshelf/train/bookshelf_0300_normalised\nbookshelf/train/bookshelf_0341_normalised\nbookshelf/train/bookshelf_0209_normalised\nbookshelf/train/bookshelf_0045_normalised\nbookshelf/train/bookshelf_0425_normalised\nbookshelf/train/bookshelf_0009_normalised\nbookshelf/train/bookshelf_0263_normalised\nbookshelf/train/bookshelf_0567_normalised\nbookshelf/train/bookshelf_0489_normalised\nbookshelf/train/bookshelf_0444_normalised\nbookshelf/train/bookshelf_0462_normalised\nbookshelf/train/bookshelf_0554_normalised\nbookshelf/train/bookshelf_0505_normalised\nbookshelf/train/bookshelf_0560_normalised\nbookshelf/train/bookshelf_0283_normalised\nbookshelf/train/bookshelf_0561_normalised\nbookshelf/train/bookshelf_0512_normalised\nbookshelf/train/bookshelf_0212_normalised\nbookshelf/train/bookshelf_0474_normalised\nbookshelf/train/bookshelf_0543_normalised\nbookshelf/train/bookshelf_0163_normalised\nbookshelf/train/bookshelf_0360_normalised\nbookshelf/train/bookshelf_0104_normalised\nbookshelf/train/bookshelf_0049_normalised\nbookshelf/train/bookshelf_0493_normalised\nbookshelf/train/bookshelf_0021_normalised\nbookshelf/train/bookshelf_0368_normalised\nbookshelf/train/bookshelf_0207_normalised\nbookshelf/train/bookshelf_0061_normalised\nbookshelf/train/bookshelf_0020_normalised\nbookshelf/train/bookshelf_0524_normalised\nbookshelf/train/bookshelf_0168_normalised\nbookshelf/train/bookshelf_0496_normalised\nbookshelf/train/bookshelf_0396_normalised\nbookshelf/train/bookshelf_0266_normalised\nbookshelf/train/bookshelf_0059_normalised\nbookshelf/train/bookshelf_0087_normalised\nbookshelf/train/bookshelf_0467_normalised\nbookshelf/train/bookshelf_0274_normalised\nbookshelf/train/bookshelf_0264_normalised\nbookshelf/train/bookshelf_0335_normalised\nbookshelf/train/bookshelf_0528_normalised\nbookshelf/train/bookshelf_0485_normalised\nbookshelf/train/bookshelf_0055_normalised\nbookshelf/train/bookshelf_0550_normalised\nbookshelf/train/bookshelf_0453_normalised\nbookshelf/train/bookshelf_0201_normalised\nbookshelf/train/bookshelf_0269_normalised\nbookshelf/train/bookshelf_0482_normalised\nbookshelf/train/bookshelf_0112_normalised\nbookshelf/train/bookshelf_0243_normalised\nbookshelf/train/bookshelf_0292_normalised\nbookshelf/train/bookshelf_0423_normalised\nbookshelf/train/bookshelf_0242_normalised\nbookshelf/train/bookshelf_0082_normalised\nbookshelf/train/bookshelf_0458_normalised\nbookshelf/train/bookshelf_0101_normalised\nbookshelf/train/bookshelf_0476_normalised\nbookshelf/train/bookshelf_0333_normalised\nbookshelf/train/bookshelf_0365_normalised\nbookshelf/train/bookshelf_0096_normalised\nbookshelf/train/bookshelf_0253_normalised\nbookshelf/train/bookshelf_0265_normalised\nbookshelf/train/bookshelf_0286_normalised\nbookshelf/train/bookshelf_0134_normalised\nbookshelf/train/bookshelf_0487_normalised\nbookshelf/train/bookshelf_0234_normalised\nbookshelf/train/bookshelf_0390_normalised\nbookshelf/train/bookshelf_0302_normalised\nbookshelf/train/bookshelf_0172_normalised\nbookshelf/train/bookshelf_0098_normalised\nbookshelf/train/bookshelf_0138_normalised\nbookshelf/train/bookshelf_0053_normalised\nbookshelf/train/bookshelf_0221_normalised\nbookshelf/train/bookshelf_0136_normalised\nbookshelf/train/bookshelf_0141_normalised\nbookshelf/train/bookshelf_0073_normalised\nbookshelf/train/bookshelf_0229_normalised\nbookshelf/train/bookshelf_0030_normalised\nbookshelf/train/bookshelf_0132_normalised\nbookshelf/train/bookshelf_0314_normalised\nbookshelf/train/bookshelf_0247_normalised\nbookshelf/train/bookshelf_0256_normalised\nbookshelf/train/bookshelf_0455_normalised\nbookshelf/train/bookshelf_0413_normalised\nbookshelf/train/bookshelf_0491_normalised\nbookshelf/train/bookshelf_0410_normalised\nbookshelf/train/bookshelf_0133_normalised\nbookshelf/train/bookshelf_0532_normalised\nbookshelf/train/bookshelf_0025_normalised\nbookshelf/train/bookshelf_0051_normalised\nbookshelf/train/bookshelf_0004_normalised\nbookshelf/train/bookshelf_0116_normalised\nbookshelf/train/bookshelf_0279_normalised\nbookshelf/train/bookshelf_0366_normalised\nbookshelf/train/bookshelf_0220_normalised\nbookshelf/train/bookshelf_0572_normalised\nbookshelf/train/bookshelf_0161_normalised\nbookshelf/train/bookshelf_0432_normalised\nbookshelf/train/bookshelf_0420_normalised\nbookshelf/train/bookshelf_0215_normalised\nbookshelf/train/bookshelf_0094_normalised\nbookshelf/train/bookshelf_0529_normalised\nbookshelf/train/bookshelf_0507_normalised\nbookshelf/train/bookshelf_0131_normalised\nbookshelf/train/bookshelf_0541_normalised\nbookshelf/train/bookshelf_0454_normalised\nbookshelf/train/bookshelf_0478_normalised\nbookshelf/train/bookshelf_0411_normalised\nbookshelf/train/bookshelf_0427_normalised\nbookshelf/train/bookshelf_0565_normalised\nbookshelf/train/bookshelf_0005_normalised\nbookshelf/train/bookshelf_0296_normalised\nbookshelf/train/bookshelf_0277_normalised\nbookshelf/train/bookshelf_0237_normalised\nbookshelf/train/bookshelf_0437_normalised\nbookshelf/train/bookshelf_0210_normalised\nbookshelf/train/bookshelf_0349_normalised\nbookshelf/train/bookshelf_0352_normalised\nbookshelf/train/bookshelf_0504_normalised\nbookshelf/train/bookshelf_0515_normalised\nbookshelf/train/bookshelf_0378_normalised\nbookshelf/train/bookshelf_0447_normalised\nbookshelf/train/bookshelf_0003_normalised\nbookshelf/train/bookshelf_0522_normalised\nbookshelf/train/bookshelf_0475_normalised\nbookshelf/train/bookshelf_0316_normalised\nbookshelf/train/bookshelf_0170_normalised\nbookshelf/train/bookshelf_0223_normalised\nbookshelf/train/bookshelf_0367_normalised\nbookshelf/train/bookshelf_0436_normalised\nbookshelf/train/bookshelf_0081_normalised\nbookshelf/train/bookshelf_0569_normalised\nbookshelf/train/bookshelf_0236_normalised\nbookshelf/train/bookshelf_0033_normalised\nbookshelf/train/bookshelf_0488_normalised\nbookshelf/train/bookshelf_0211_normalised\nbookshelf/train/bookshelf_0176_normalised\nbookshelf/train/bookshelf_0304_normalised\nbookshelf/train/bookshelf_0115_normalised\nbookshelf/train/bookshelf_0065_normalised\nbookshelf/train/bookshelf_0208_normalised\nbookshelf/train/bookshelf_0329_normalised\nbookshelf/train/bookshelf_0042_normalised\nbookshelf/train/bookshelf_0240_normalised\nbookshelf/train/bookshelf_0318_normalised\nbookshelf/train/bookshelf_0060_normalised\nbookshelf/train/bookshelf_0439_normalised\nbookshelf/train/bookshelf_0026_normalised\nbookshelf/train/bookshelf_0175_normalised\nbookshelf/train/bookshelf_0158_normalised\nbookshelf/train/bookshelf_0521_normalised\nbookshelf/train/bookshelf_0202_normalised\nbookshelf/train/bookshelf_0250_normalised\nbookshelf/train/bookshelf_0492_normalised\nbookshelf/train/bookshelf_0315_normalised\nbookshelf/train/bookshelf_0469_normalised\nbookshelf/train/bookshelf_0200_normalised\nbookshelf/train/bookshelf_0525_normalised\nbookshelf/train/bookshelf_0232_normalised\nbookshelf/train/bookshelf_0058_normalised\nbookshelf/train/bookshelf_0151_normalised\nbookshelf/train/bookshelf_0537_normalised\nbookshelf/train/bookshelf_0443_normalised\nbookshelf/train/bookshelf_0120_normalised\nbookshelf/train/bookshelf_0260_normalised\nbookshelf/train/bookshelf_0520_normalised\nbookshelf/train/bookshelf_0433_normalised\nbookshelf/train/bookshelf_0480_normalised\nbookshelf/train/bookshelf_0412_normalised\nbookshelf/train/bookshelf_0459_normalised\nbookshelf/train/bookshelf_0281_normalised\nbookshelf/train/bookshelf_0108_normalised\nbookshelf/train/bookshelf_0249_normalised\nbookshelf/train/bookshelf_0010_normalised\nbookshelf/train/bookshelf_0177_normalised\nbookshelf/train/bookshelf_0409_normalised\nbookshelf/train/bookshelf_0006_normalised\nbookshelf/train/bookshelf_0092_normalised\nbookshelf/train/bookshelf_0320_normalised\nbookshelf/train/bookshelf_0146_normalised\nbookshelf/train/bookshelf_0203_normalised\nbookshelf/train/bookshelf_0192_normalised\nbookshelf/train/bookshelf_0022_normalised\nbookshelf/train/bookshelf_0039_normalised\nbookshelf/train/bookshelf_0019_normalised\nbookshelf/train/bookshelf_0287_normalised\nbookshelf/train/bookshelf_0080_normalised\nbookshelf/train/bookshelf_0519_normalised\nbookshelf/train/bookshelf_0150_normalised\nbookshelf/train/bookshelf_0500_normalised\nbookshelf/train/bookshelf_0222_normalised\nbookshelf/train/bookshelf_0416_normalised\nbookshelf/train/bookshelf_0165_normalised\nbookshelf/train/bookshelf_0514_normalised\nbookshelf/train/bookshelf_0012_normalised\nbookshelf/train/bookshelf_0307_normalised\nbookshelf/train/bookshelf_0063_normalised\nbookshelf/train/bookshelf_0355_normalised\nbookshelf/train/bookshelf_0350_normalised\nbookshelf/train/bookshelf_0181_normalised\nbookshelf/train/bookshelf_0156_normalised\nbookshelf/train/bookshelf_0245_normalised\nbookshelf/train/bookshelf_0028_normalised\nbookshelf/train/bookshelf_0278_normalised\nbookshelf/train/bookshelf_0336_normalised\nbookshelf/train/bookshelf_0509_normalised\nbookshelf/train/bookshelf_0374_normalised\nbookshelf/train/bookshelf_0531_normalised\nbookshelf/train/bookshelf_0547_normalised\nbookshelf/train/bookshelf_0331_normalised\nbookshelf/train/bookshelf_0017_normalised\nbookshelf/train/bookshelf_0357_normalised\nbookshelf/train/bookshelf_0312_normalised\nbookshelf/train/bookshelf_0205_normalised\nbookshelf/train/bookshelf_0516_normalised\nbookshelf/train/bookshelf_0145_normalised\nbookshelf/train/bookshelf_0075_normalised\nbookshelf/train/bookshelf_0308_normalised\nbookshelf/train/bookshelf_0546_normalised\nbookshelf/train/bookshelf_0299_normalised\nbookshelf/train/bookshelf_0503_normalised\nbookshelf/train/bookshelf_0470_normalised\nbookshelf/train/bookshelf_0456_normalised\nbookshelf/train/bookshelf_0190_normalised\nbookshelf/train/bookshelf_0381_normalised\nbookshelf/train/bookshelf_0291_normalised\nbookshelf/train/bookshelf_0479_normalised\nbookshelf/train/bookshelf_0068_normalised\nbookshelf/train/bookshelf_0421_normalised\nbookshelf/train/bookshelf_0323_normalised\nbookshelf/train/bookshelf_0517_normalised\nbookshelf/train/bookshelf_0035_normalised\nbookshelf/train/bookshelf_0139_normalised\nbookshelf/train/bookshelf_0113_normalised\nbookshelf/train/bookshelf_0347_normalised\nbookshelf/train/bookshelf_0261_normalised\nbookshelf/train/bookshelf_0235_normalised\nbookshelf/train/bookshelf_0346_normalised\nbookshelf/train/bookshelf_0549_normalised\nbookshelf/train/bookshelf_0442_normalised\nbookshelf/train/bookshelf_0557_normalised\nbookshelf/train/bookshelf_0555_normalised\nbookshelf/train/bookshelf_0385_normalised\nbookshelf/train/bookshelf_0067_normalised\nbookshelf/train/bookshelf_0380_normalised\nbookshelf/train/bookshelf_0166_normalised\nbookshelf/train/bookshelf_0252_normalised\nbookshelf/train/bookshelf_0193_normalised\nbookshelf/train/bookshelf_0334_normalised\nbookshelf/train/bookshelf_0226_normalised\nbookshelf/train/bookshelf_0169_normalised\nbookshelf/train/bookshelf_0371_normalised\nbookshelf/train/bookshelf_0384_normalised\nbookshelf/train/bookshelf_0182_normalised\nbookshelf/train/bookshelf_0415_normalised\nbookshelf/train/bookshelf_0428_normalised\nbookshelf/train/bookshelf_0216_normalised\nbookshelf/train/bookshelf_0123_normalised\nbookshelf/train/bookshelf_0159_normalised\nbookshelf/train/bookshelf_0481_normalised\nbookshelf/train/bookshelf_0194_normalised\nbookshelf/train/bookshelf_0535_normalised\nbookshelf/train/bookshelf_0257_normalised\nbookshelf/train/bookshelf_0276_normalised\nbookshelf/train/bookshelf_0394_normalised\nbookshelf/train/bookshelf_0348_normalised\nbookshelf/train/bookshelf_0391_normalised\nbookshelf/train/bookshelf_0506_normalised\nbookshelf/train/bookshelf_0056_normalised\nbookshelf/train/bookshelf_0140_normalised\nbookshelf/train/bookshelf_0050_normalised\nbookshelf/train/bookshelf_0363_normalised\nbookshelf/train/bookshelf_0126_normalised\nbookshelf/train/bookshelf_0027_normalised\nbookshelf/train/bookshelf_0107_normalised\nbookshelf/train/bookshelf_0127_normalised\nbookshelf/train/bookshelf_0461_normalised\nbookshelf/train/bookshelf_0536_normalised\nbookshelf/train/bookshelf_0219_normalised\nbookshelf/train/bookshelf_0187_normalised\nbookshelf/train/bookshelf_0301_normalised\nbookshelf/train/bookshelf_0038_normalised\nbookshelf/train/bookshelf_0183_normalised\nbookshelf/train/bookshelf_0457_normalised\nbookshelf/train/bookshelf_0111_normalised\nbookshelf/train/bookshelf_0217_normalised\nbookshelf/train/bookshelf_0280_normalised\nbookshelf/train/bookshelf_0085_normalised\nbookshelf/train/bookshelf_0001_normalised\nbookshelf/train/bookshelf_0343_normalised\nbookshelf/train/bookshelf_0290_normalised\nbookshelf/train/bookshelf_0157_normalised\nbookshelf/train/bookshelf_0339_normalised\nbookshelf/train/bookshelf_0508_normalised\nbookshelf/train/bookshelf_0062_normalised\nbookshelf/train/bookshelf_0018_normalised\nbookshelf/train/bookshelf_0321_normalised\nbookshelf/train/bookshelf_0389_normalised\nbookshelf/train/bookshelf_0426_normalised\nbookshelf/train/bookshelf_0070_normalised\nbookshelf/train/bookshelf_0289_normalised\nbookshelf/train/bookshelf_0074_normalised\nbookshelf/train/bookshelf_0400_normalised\nbookshelf/train/bookshelf_0069_normalised\nbookshelf/train/bookshelf_0117_normalised\nbookshelf/train/bookshelf_0089_normalised\nbookshelf/train/bookshelf_0495_normalised\nbookshelf/train/bookshelf_0399_normalised\nbookshelf/train/bookshelf_0306_normalised\nbookshelf/train/bookshelf_0254_normalised\nbookshelf/train/bookshelf_0570_normalised\nbookshelf/train/bookshelf_0137_normalised\nbookshelf/train/bookshelf_0023_normalised\nbookshelf/train/bookshelf_0356_normalised\nbookshelf/train/bookshelf_0171_normalised\nbookshelf/train/bookshelf_0548_normalised\nbookshelf/train/bookshelf_0066_normalised\nbookshelf/train/bookshelf_0084_normalised\nbookshelf/train/bookshelf_0558_normalised\nbookshelf/train/bookshelf_0393_normalised\nbookshelf/train/bookshelf_0523_normalised\nbookshelf/train/bookshelf_0272_normalised\nbookshelf/train/bookshelf_0011_normalised\nbookshelf/train/bookshelf_0419_normalised\nbookshelf/train/bookshelf_0077_normalised\nbookshelf/train/bookshelf_0147_normalised\nbookshelf/train/bookshelf_0527_normalised\nbookshelf/train/bookshelf_0408_normalised\nbookshelf/train/bookshelf_0486_normalised\nbookshelf/train/bookshelf_0319_normalised\nbookshelf/train/bookshelf_0228_normalised\nbookshelf/train/bookshelf_0303_normalised\nbookshelf/train/bookshelf_0484_normalised\nbookshelf/train/bookshelf_0556_normalised\nbookshelf/train/bookshelf_0358_normalised\nbookshelf/train/bookshelf_0501_normalised\nbookshelf/train/bookshelf_0465_normalised\nbookshelf/train/bookshelf_0297_normalised\nbookshelf/train/bookshelf_0040_normalised\nbookshelf/train/bookshelf_0354_normalised\nbookshelf/train/bookshelf_0559_normalised\nbookshelf/train/bookshelf_0013_normalised\nbookshelf/train/bookshelf_0430_normalised\nbookshelf/train/bookshelf_0148_normalised\nbookshelf/train/bookshelf_0054_normalised\nbookshelf/train/bookshelf_0293_normalised\nbookshelf/train/bookshelf_0414_normalised\nbookshelf/train/bookshelf_0441_normalised\nbookshelf/train/bookshelf_0083_normalised\nbookshelf/train/bookshelf_0392_normalised\nbookshelf/train/bookshelf_0324_normalised\nbookshelf/train/bookshelf_0328_normalised\nbookshelf/train/bookshelf_0483_normalised\nbookshelf/train/bookshelf_0539_normalised\nbookshelf/train/bookshelf_0093_normalised\nbookshelf/train/bookshelf_0449_normalised\nbookshelf/train/bookshelf_0552_normalised\nbookshelf/train/bookshelf_0450_normalised\nbookshelf/train/bookshelf_0032_normalised\nbookshelf/train/bookshelf_0251_normalised\nbookshelf/train/bookshelf_0553_normalised\nbookshelf/train/bookshelf_0499_normalised\nbookshelf/train/bookshelf_0227_normalised\nbookshelf/train/bookshelf_0340_normalised\nbookshelf/train/bookshelf_0233_normalised\nbookshelf/train/bookshelf_0370_normalised\nbookshelf/train/bookshelf_0511_normalised\nbookshelf/train/bookshelf_0402_normalised\nbookshelf/train/bookshelf_0518_normalised\nbookshelf/train/bookshelf_0199_normalised\nbookshelf/train/bookshelf_0332_normalised\nbookshelf/train/bookshelf_0317_normalised\nbookshelf/train/bookshelf_0007_normalised\nbookshelf/train/bookshelf_0102_normalised\nbookshelf/train/bookshelf_0196_normalised\nbookshelf/train/bookshelf_0387_normalised\nbookshelf/train/bookshelf_0173_normalised\nbookshelf/train/bookshelf_0064_normalised\nbookshelf/train/bookshelf_0472_normalised\nbookshelf/train/bookshelf_0397_normalised\nbookshelf/train/bookshelf_0105_normalised\nbookshelf/train/bookshelf_0036_normalised\nbookshelf/train/bookshelf_0121_normalised\nbookshelf/train/bookshelf_0154_normalised\nbookshelf/train/bookshelf_0502_normalised\nbookshelf/train/bookshelf_0031_normalised\nbookshelf/train/bookshelf_0188_normalised\nbookshelf/train/bookshelf_0533_normalised\nbookshelf/train/bookshelf_0473_normalised\nbookshelf/train/bookshelf_0498_normalised\nbookshelf/train/bookshelf_0401_normalised\nbookshelf/train/bookshelf_0534_normalised\nbookshelf/train/bookshelf_0195_normalised\nbookshelf/train/bookshelf_0398_normalised\nbookshelf/train/bookshelf_0110_normalised\nbookshelf/train/bookshelf_0130_normalised\nbookshelf/train/bookshelf_0015_normalised\nbookshelf/train/bookshelf_0268_normalised\nbookshelf/train/bookshelf_0191_normalised\nbookshelf/train/bookshelf_0135_normalised\nbookshelf/train/bookshelf_0149_normalised\nbookshelf/train/bookshelf_0305_normalised\nbookshelf/train/bookshelf_0353_normalised\nbookshelf/train/bookshelf_0142_normalised\nbookshelf/train/bookshelf_0174_normalised\nbookshelf/train/bookshelf_0383_normalised\nbookshelf/train/bookshelf_0034_normalised\nbookshelf/train/bookshelf_0345_normalised\nbookshelf/train/bookshelf_0424_normalised\nbookshelf/train/bookshelf_0422_normalised\nbookshelf/train/bookshelf_0282_normalised\nbookshelf/train/bookshelf_0271_normalised\nbookshelf/train/bookshelf_0494_normalised\nbookshelf/train/bookshelf_0373_normalised\nbookshelf/train/bookshelf_0184_normalised\nbookshelf/train/bookshelf_0218_normalised\nbookshelf/train/bookshelf_0571_normalised\nbookshelf/train/bookshelf_0125_normalised\nbookshelf/train/bookshelf_0285_normalised\nbookshelf/train/bookshelf_0122_normalised\nbookshelf/train/bookshelf_0545_normalised\nbookshelf/train/bookshelf_0267_normalised\nbookshelf/train/bookshelf_0406_normalised\nbookshelf/train/bookshelf_0008_normalised\nbookshelf/train/bookshelf_0259_normalised\nbookshelf/train/bookshelf_0189_normalised\nbookshelf/train/bookshelf_0284_normalised\nbookshelf/train/bookshelf_0311_normalised\nbookshelf/train/bookshelf_0014_normalised\nbookshelf/train/bookshelf_0162_normalised\nbookshelf/train/bookshelf_0468_normalised\nbookshelf/train/bookshelf_0542_normalised\nbookshelf/train/bookshelf_0448_normalised\nbookshelf/train/bookshelf_0497_normalised\nbookshelf/train/bookshelf_0326_normalised\nbookshelf/train/bookshelf_0099_normalised\nbookshelf/train/bookshelf_0566_normalised\nbookshelf/train/bookshelf_0288_normalised\nbookshelf/train/bookshelf_0100_normalised\nbookshelf/train/bookshelf_0016_normalised\nbookshelf/train/bookshelf_0563_normalised\nbookshelf/train/bookshelf_0167_normalised\nbookshelf/train/bookshelf_0185_normalised\nbookshelf/train/bookshelf_0452_normalised\nbookshelf/train/bookshelf_0361_normalised\nbookshelf/train/bookshelf_0344_normalised\nbookshelf/train/bookshelf_0225_normalised\nbookshelf/train/bookshelf_0197_normalised\nbookshelf/train/bookshelf_0568_normalised\nbookshelf/train/bookshelf_0434_normalised\nbookshelf/train/bookshelf_0178_normalised\nbookshelf/train/bookshelf_0337_normalised\nbookshelf/train/bookshelf_0403_normalised\nbookshelf/train/bookshelf_0180_normalised\nbookshelf/train/bookshelf_0438_normalised\nbookshelf/train/bookshelf_0128_normalised\nbookshelf/train/bookshelf_0445_normalised\nbookshelf/train/bookshelf_0231_normalised\nbookshelf/train/bookshelf_0198_normalised\nbookshelf/train/bookshelf_0179_normalised\nbookshelf/train/bookshelf_0464_normalised\nbookshelf/train/bookshelf_0510_normalised\nbookshelf/train/bookshelf_0460_normalised\nbookshelf/train/bookshelf_0230_normalised\nbookshelf/train/bookshelf_0429_normalised\nbookshelf/train/bookshelf_0244_normalised\nbookshelf/train/bookshelf_0386_normalised\nbookshelf/train/bookshelf_0466_normalised\nbookshelf/train/bookshelf_0048_normalised\nbookshelf/train/bookshelf_0041_normalised\nbookshelf/train/bookshelf_0052_normalised\nbookshelf/train/bookshelf_0129_normalised\nbookshelf/train/bookshelf_0530_normalised\nbookshelf/train/bookshelf_0024_normalised\nbookshelf/train/bookshelf_0372_normalised\nbookshelf/train/bookshelf_0239_normalised\nbookshelf/train/bookshelf_0418_normalised\nbookshelf/train/bookshelf_0351_normalised\nbookshelf/train/bookshelf_0213_normalised\nbookshelf/train/bookshelf_0327_normalised\nbookshelf/train/bookshelf_0206_normalised\nbookshelf/train/bookshelf_0310_normalised\nbookshelf/train/bookshelf_0124_normalised\nbookshelf/train/bookshelf_0238_normalised\nbookshelf/train/bookshelf_0275_normalised\nbookshelf/train/bookshelf_0204_normalised\nbookshelf/train/bookshelf_0118_normalised\nbookshelf/train/bookshelf_0143_normalised\nbookshelf/train/bookshelf_0295_normalised\nbookshelf/train/bookshelf_0029_normalised\nbookshelf/train/bookshelf_0309_normalised\nbookshelf/train/bookshelf_0044_normalised\nbookshelf/train/bookshelf_0103_normalised\nbookshelf/train/bookshelf_0152_normalised\nbookshelf/train/bookshelf_0369_normalised\nbookshelf/train/bookshelf_0262_normalised\nbookshelf/train/bookshelf_0057_normalised\nbookshelf/train/bookshelf_0088_normalised\nbookshelf/train/bookshelf_0155_normalised\nbookshelf/train/bookshelf_0160_normalised\nbookshelf/train/bookshelf_0379_normalised\nbookshelf/train/bookshelf_0431_normalised\nbookshelf/train/bookshelf_0224_normalised\nbookshelf/train/bookshelf_0544_normalised\nbookshelf/train/bookshelf_0076_normalised\nbookshelf/train/bookshelf_0540_normalised\nbookshelf/train/bookshelf_0071_normalised\nbookshelf/train/bookshelf_0562_normalised\nbookshelf/train/bookshelf_0417_normalised\nbookshelf/train/bookshelf_0338_normalised\nbookshelf/train/bookshelf_0298_normalised\nbookshelf/train/bookshelf_0047_normalised\nbookshelf/train/bookshelf_0086_normalised\nbookshelf/train/bookshelf_0359_normalised\nbookshelf/train/bookshelf_0325_normalised\nbookshelf/train/bookshelf_0270_normalised\nbookshelf/train/bookshelf_0046_normalised\nbookshelf/train/bookshelf_0564_normalised\nbookshelf/train/bookshelf_0377_normalised\nbookshelf/train/bookshelf_0364_normalised\nbookshelf/train/bookshelf_0342_normalised\nbookshelf/train/bookshelf_0435_normalised\nbookshelf/train/bookshelf_0246_normalised\nbookshelf/train/bookshelf_0407_normalised\nbookshelf/train/bookshelf_0091_normalised\nbookshelf/train/bookshelf_0404_normalised\nbookshelf/train/bookshelf_0463_normalised\nbookshelf/train/bookshelf_0037_normalised\nbookshelf/train/bookshelf_0490_normalised\nbookshelf/train/bookshelf_0382_normalised\nbookshelf/train/bookshelf_0322_normalised\nbookshelf/train/bookshelf_0258_normalised\nbookshelf/train/bookshelf_0144_normalised\nbookshelf/train/bookshelf_0440_normalised\nbookshelf/train/bookshelf_0294_normalised\nbookshelf/train/bookshelf_0538_normalised\nbookshelf/train/bookshelf_0395_normalised\nbookshelf/train/bookshelf_0214_normalised\nbookshelf/train/bookshelf_0376_normalised\nbookshelf/train/bookshelf_0090_normalised\nbookshelf/train/bookshelf_0097_normalised\nbookshelf/train/bookshelf_0388_normalised\nbookshelf/train/bookshelf_0526_normalised\nbookshelf/train/bookshelf_0119_normalised\nbookshelf/train/bookshelf_0375_normalised\nbookshelf/train/bookshelf_0002_normalised\nbookshelf/train/bookshelf_0153_normalised\nbookshelf/train/bookshelf_0451_normalised\nbookshelf/train/bookshelf_0255_normalised\nbookshelf/train/bookshelf_0471_normalised\nbookshelf/train/bookshelf_0164_normalised\nbookshelf/train/bookshelf_0551_normalised\nbookshelf/train/bookshelf_0248_normalised\nbookshelf/train/bookshelf_0043_normalised\nbookshelf/train/bookshelf_0405_normalised\nbookshelf/train/bookshelf_0330_normalised\nbookshelf/train/bookshelf_0095_normalised\nbookshelf/train/bookshelf_0477_normalised\nbookshelf/train/bookshelf_0186_normalised\nbookshelf/train/bookshelf_0362_normalised\nbookshelf/train/bookshelf_0313_normalised\nbookshelf/train/bookshelf_0513_normalised\nbookshelf/train/bookshelf_0109_normalised\nbookshelf/train/bookshelf_0106_normalised\nbookshelf/train/bookshelf_0078_normalised\nbookshelf/train/bookshelf_0079_normalised\nbookshelf/train/bookshelf_0114_normalised\nbookshelf/train/bookshelf_0273_normalised\nbookshelf/test/bookshelf_0593_normalised\nbookshelf/test/bookshelf_0641_normalised\nbookshelf/test/bookshelf_0661_normalised\nbookshelf/test/bookshelf_0586_normalised\nbookshelf/test/bookshelf_0576_normalised\nbookshelf/test/bookshelf_0636_normalised\nbookshelf/test/bookshelf_0580_normalised\nbookshelf/test/bookshelf_0655_normalised\nbookshelf/test/bookshelf_0606_normalised\nbookshelf/test/bookshelf_0582_normalised\nbookshelf/test/bookshelf_0634_normalised\nbookshelf/test/bookshelf_0583_normalised\nbookshelf/test/bookshelf_0660_normalised\nbookshelf/test/bookshelf_0578_normalised\nbookshelf/test/bookshelf_0653_normalised\nbookshelf/test/bookshelf_0605_normalised\nbookshelf/test/bookshelf_0625_normalised\nbookshelf/test/bookshelf_0642_normalised\nbookshelf/test/bookshelf_0631_normalised\nbookshelf/test/bookshelf_0652_normalised\nbookshelf/test/bookshelf_0672_normalised\nbookshelf/test/bookshelf_0633_normalised\nbookshelf/test/bookshelf_0585_normalised\nbookshelf/test/bookshelf_0665_normalised\nbookshelf/test/bookshelf_0626_normalised\nbookshelf/test/bookshelf_0623_normalised\nbookshelf/test/bookshelf_0591_normalised\nbookshelf/test/bookshelf_0615_normalised\nbookshelf/test/bookshelf_0656_normalised\nbookshelf/test/bookshelf_0599_normalised\nbookshelf/test/bookshelf_0670_normalised\nbookshelf/test/bookshelf_0596_normalised\nbookshelf/test/bookshelf_0619_normalised\nbookshelf/test/bookshelf_0592_normalised\nbookshelf/test/bookshelf_0646_normalised\nbookshelf/test/bookshelf_0657_normalised\nbookshelf/test/bookshelf_0640_normalised\nbookshelf/test/bookshelf_0662_normalised\nbookshelf/test/bookshelf_0643_normalised\nbookshelf/test/bookshelf_0666_normalised\nbookshelf/test/bookshelf_0603_normalised\nbookshelf/test/bookshelf_0618_normalised\nbookshelf/test/bookshelf_0645_normalised\nbookshelf/test/bookshelf_0663_normalised\nbookshelf/test/bookshelf_0573_normalised\nbookshelf/test/bookshelf_0613_normalised\nbookshelf/test/bookshelf_0664_normalised\nbookshelf/test/bookshelf_0581_normalised\nbookshelf/test/bookshelf_0669_normalised\nbookshelf/test/bookshelf_0629_normalised\nbookshelf/test/bookshelf_0609_normalised\nbookshelf/test/bookshelf_0590_normalised\nbookshelf/test/bookshelf_0651_normalised\nbookshelf/test/bookshelf_0659_normalised\nbookshelf/test/bookshelf_0635_normalised\nbookshelf/test/bookshelf_0639_normalised\nbookshelf/test/bookshelf_0579_normalised\nbookshelf/test/bookshelf_0647_normalised\nbookshelf/test/bookshelf_0602_normalised\nbookshelf/test/bookshelf_0671_normalised\nbookshelf/test/bookshelf_0617_normalised\nbookshelf/test/bookshelf_0598_normalised\nbookshelf/test/bookshelf_0614_normalised\nbookshelf/test/bookshelf_0611_normalised\nbookshelf/test/bookshelf_0607_normalised\nbookshelf/test/bookshelf_0638_normalised\nbookshelf/test/bookshelf_0616_normalised\nbookshelf/test/bookshelf_0587_normalised\nbookshelf/test/bookshelf_0589_normalised\nbookshelf/test/bookshelf_0627_normalised\nbookshelf/test/bookshelf_0600_normalised\nbookshelf/test/bookshelf_0648_normalised\nbookshelf/test/bookshelf_0575_normalised\nbookshelf/test/bookshelf_0610_normalised\nbookshelf/test/bookshelf_0637_normalised\nbookshelf/test/bookshelf_0654_normalised\nbookshelf/test/bookshelf_0630_normalised\nbookshelf/test/bookshelf_0597_normalised\nbookshelf/test/bookshelf_0612_normalised\nbookshelf/test/bookshelf_0628_normalised\nbookshelf/test/bookshelf_0667_normalised\nbookshelf/test/bookshelf_0588_normalised\nbookshelf/test/bookshelf_0604_normalised\nbookshelf/test/bookshelf_0601_normalised\nbookshelf/test/bookshelf_0649_normalised\nbookshelf/test/bookshelf_0584_normalised\nbookshelf/test/bookshelf_0658_normalised\nbookshelf/test/bookshelf_0650_normalised\nbookshelf/test/bookshelf_0632_normalised\nbookshelf/test/bookshelf_0668_normalised\nbookshelf/test/bookshelf_0621_normalised\nbookshelf/test/bookshelf_0574_normalised\nbookshelf/test/bookshelf_0622_normalised\nbookshelf/test/bookshelf_0624_normalised\nbookshelf/test/bookshelf_0608_normalised\nbookshelf/test/bookshelf_0595_normalised\nbookshelf/test/bookshelf_0644_normalised\nbookshelf/test/bookshelf_0577_normalised\nbookshelf/test/bookshelf_0620_normalised\nbookshelf/test/bookshelf_0594_normalised\nrange_hood/train/range_hood_0007_normalised\nrange_hood/train/range_hood_0004_normalised\nrange_hood/train/range_hood_0071_normalised\nrange_hood/train/range_hood_0085_normalised\nrange_hood/train/range_hood_0082_normalised\nrange_hood/train/range_hood_0062_normalised\nrange_hood/train/range_hood_0094_normalised\nrange_hood/train/range_hood_0070_normalised\nrange_hood/train/range_hood_0086_normalised\nrange_hood/train/range_hood_0100_normalised\nrange_hood/train/range_hood_0078_normalised\nrange_hood/train/range_hood_0073_normalised\nrange_hood/train/range_hood_0002_normalised\nrange_hood/train/range_hood_0010_normalised\nrange_hood/train/range_hood_0059_normalised\nrange_hood/train/range_hood_0111_normalised\nrange_hood/train/range_hood_0009_normalised\nrange_hood/train/range_hood_0081_normalised\nrange_hood/train/range_hood_0058_normalised\nrange_hood/train/range_hood_0074_normalised\nrange_hood/train/range_hood_0075_normalised\nrange_hood/train/range_hood_0099_normalised\nrange_hood/train/range_hood_0034_normalised\nrange_hood/train/range_hood_0108_normalised\nrange_hood/train/range_hood_0106_normalised\nrange_hood/train/range_hood_0112_normalised\nrange_hood/train/range_hood_0026_normalised\nrange_hood/train/range_hood_0023_normalised\nrange_hood/train/range_hood_0088_normalised\nrange_hood/train/range_hood_0030_normalised\nrange_hood/train/range_hood_0057_normalised\nrange_hood/train/range_hood_0008_normalised\nrange_hood/train/range_hood_0063_normalised\nrange_hood/train/range_hood_0068_normalised\nrange_hood/train/range_hood_0056_normalised\nrange_hood/train/range_hood_0115_normalised\nrange_hood/train/range_hood_0084_normalised\nrange_hood/train/range_hood_0113_normalised\nrange_hood/train/range_hood_0098_normalised\nrange_hood/train/range_hood_0067_normalised\nrange_hood/train/range_hood_0079_normalised\nrange_hood/train/range_hood_0095_normalised\nrange_hood/train/range_hood_0047_normalised\nrange_hood/train/range_hood_0038_normalised\nrange_hood/train/range_hood_0083_normalised\nrange_hood/train/range_hood_0045_normalised\nrange_hood/train/range_hood_0065_normalised\nrange_hood/train/range_hood_0014_normalised\nrange_hood/train/range_hood_0043_normalised\nrange_hood/train/range_hood_0041_normalised\nrange_hood/train/range_hood_0022_normalised\nrange_hood/train/range_hood_0064_normalised\nrange_hood/train/range_hood_0114_normalised\nrange_hood/train/range_hood_0091_normalised\nrange_hood/train/range_hood_0087_normalised\nrange_hood/train/range_hood_0016_normalised\nrange_hood/train/range_hood_0049_normalised\nrange_hood/train/range_hood_0072_normalised\nrange_hood/train/range_hood_0066_normalised\nrange_hood/train/range_hood_0097_normalised\nrange_hood/train/range_hood_0051_normalised\nrange_hood/train/range_hood_0046_normalised\nrange_hood/train/range_hood_0040_normalised\nrange_hood/train/range_hood_0054_normalised\nrange_hood/train/range_hood_0017_normalised\nrange_hood/train/range_hood_0025_normalised\nrange_hood/train/range_hood_0048_normalised\nrange_hood/train/range_hood_0027_normalised\nrange_hood/train/range_hood_0101_normalised\nrange_hood/train/range_hood_0052_normalised\nrange_hood/train/range_hood_0102_normalised\nrange_hood/train/range_hood_0018_normalised\nrange_hood/train/range_hood_0060_normalised\nrange_hood/train/range_hood_0042_normalised\nrange_hood/train/range_hood_0105_normalised\nrange_hood/train/range_hood_0080_normalised\nrange_hood/train/range_hood_0001_normalised\nrange_hood/train/range_hood_0076_normalised\nrange_hood/train/range_hood_0107_normalised\nrange_hood/train/range_hood_0019_normalised\nrange_hood/train/range_hood_0003_normalised\nrange_hood/train/range_hood_0035_normalised\nrange_hood/train/range_hood_0044_normalised\nrange_hood/train/range_hood_0103_normalised\nrange_hood/train/range_hood_0104_normalised\nrange_hood/train/range_hood_0110_normalised\nrange_hood/train/range_hood_0055_normalised\nrange_hood/train/range_hood_0031_normalised\nrange_hood/train/range_hood_0021_normalised\nrange_hood/train/range_hood_0077_normalised\nrange_hood/train/range_hood_0037_normalised\nrange_hood/train/range_hood_0024_normalised\nrange_hood/train/range_hood_0061_normalised\nrange_hood/train/range_hood_0013_normalised\nrange_hood/train/range_hood_0036_normalised\nrange_hood/train/range_hood_0089_normalised\nrange_hood/train/range_hood_0053_normalised\nrange_hood/train/range_hood_0006_normalised\nrange_hood/train/range_hood_0096_normalised\nrange_hood/train/range_hood_0020_normalised\nrange_hood/train/range_hood_0069_normalised\nrange_hood/train/range_hood_0050_normalised\nrange_hood/train/range_hood_0039_normalised\nrange_hood/train/range_hood_0015_normalised\nrange_hood/train/range_hood_0028_normalised\nrange_hood/train/range_hood_0011_normalised\nrange_hood/train/range_hood_0032_normalised\nrange_hood/train/range_hood_0012_normalised\nrange_hood/train/range_hood_0090_normalised\nrange_hood/train/range_hood_0092_normalised\nrange_hood/train/range_hood_0029_normalised\nrange_hood/train/range_hood_0033_normalised\nrange_hood/train/range_hood_0005_normalised\nrange_hood/train/range_hood_0109_normalised\nrange_hood/train/range_hood_0093_normalised\nrange_hood/test/range_hood_0147_normalised\nrange_hood/test/range_hood_0191_normalised\nrange_hood/test/range_hood_0177_normalised\nrange_hood/test/range_hood_0175_normalised\nrange_hood/test/range_hood_0158_normalised\nrange_hood/test/range_hood_0199_normalised\nrange_hood/test/range_hood_0197_normalised\nrange_hood/test/range_hood_0132_normalised\nrange_hood/test/range_hood_0195_normalised\nrange_hood/test/range_hood_0213_normalised\nrange_hood/test/range_hood_0176_normalised\nrange_hood/test/range_hood_0211_normalised\nrange_hood/test/range_hood_0172_normalised\nrange_hood/test/range_hood_0193_normalised\nrange_hood/test/range_hood_0187_normalised\nrange_hood/test/range_hood_0135_normalised\nrange_hood/test/range_hood_0201_normalised\nrange_hood/test/range_hood_0128_normalised\nrange_hood/test/range_hood_0168_normalised\nrange_hood/test/range_hood_0136_normalised\nrange_hood/test/range_hood_0162_normalised\nrange_hood/test/range_hood_0131_normalised\nrange_hood/test/range_hood_0138_normalised\nrange_hood/test/range_hood_0208_normalised\nrange_hood/test/range_hood_0146_normalised\nrange_hood/test/range_hood_0188_normalised\nrange_hood/test/range_hood_0167_normalised\nrange_hood/test/range_hood_0182_normalised\nrange_hood/test/range_hood_0129_normalised\nrange_hood/test/range_hood_0181_normalised\nrange_hood/test/range_hood_0170_normalised\nrange_hood/test/range_hood_0203_normalised\nrange_hood/test/range_hood_0123_normalised\nrange_hood/test/range_hood_0127_normalised\nrange_hood/test/range_hood_0121_normalised\nrange_hood/test/range_hood_0154_normalised\nrange_hood/test/range_hood_0134_normalised\nrange_hood/test/range_hood_0156_normalised\nrange_hood/test/range_hood_0185_normalised\nrange_hood/test/range_hood_0133_normalised\nrange_hood/test/range_hood_0215_normalised\nrange_hood/test/range_hood_0173_normalised\nrange_hood/test/range_hood_0120_normalised\nrange_hood/test/range_hood_0184_normalised\nrange_hood/test/range_hood_0148_normalised\nrange_hood/test/range_hood_0165_normalised\nrange_hood/test/range_hood_0119_normalised\nrange_hood/test/range_hood_0166_normalised\nrange_hood/test/range_hood_0143_normalised\nrange_hood/test/range_hood_0153_normalised\nrange_hood/test/range_hood_0152_normalised\nrange_hood/test/range_hood_0212_normalised\nrange_hood/test/range_hood_0186_normalised\nrange_hood/test/range_hood_0137_normalised\nrange_hood/test/range_hood_0116_normalised\nrange_hood/test/range_hood_0125_normalised\nrange_hood/test/range_hood_0141_normalised\nrange_hood/test/range_hood_0117_normalised\nrange_hood/test/range_hood_0210_normalised\nrange_hood/test/range_hood_0163_normalised\nrange_hood/test/range_hood_0206_normalised\nrange_hood/test/range_hood_0196_normalised\nrange_hood/test/range_hood_0161_normalised\nrange_hood/test/range_hood_0160_normalised\nrange_hood/test/range_hood_0118_normalised\nrange_hood/test/range_hood_0150_normalised\nrange_hood/test/range_hood_0178_normalised\nrange_hood/test/range_hood_0207_normalised\nrange_hood/test/range_hood_0122_normalised\nrange_hood/test/range_hood_0174_normalised\nrange_hood/test/range_hood_0169_normalised\nrange_hood/test/range_hood_0204_normalised\nrange_hood/test/range_hood_0192_normalised\nrange_hood/test/range_hood_0205_normalised\nrange_hood/test/range_hood_0202_normalised\nrange_hood/test/range_hood_0214_normalised\nrange_hood/test/range_hood_0126_normalised\nrange_hood/test/range_hood_0183_normalised\nrange_hood/test/range_hood_0171_normalised\nrange_hood/test/range_hood_0189_normalised\nrange_hood/test/range_hood_0124_normalised\nrange_hood/test/range_hood_0130_normalised\nrange_hood/test/range_hood_0190_normalised\nrange_hood/test/range_hood_0140_normalised\nrange_hood/test/range_hood_0149_normalised\nrange_hood/test/range_hood_0144_normalised\nrange_hood/test/range_hood_0200_normalised\nrange_hood/test/range_hood_0159_normalised\nrange_hood/test/range_hood_0151_normalised\nrange_hood/test/range_hood_0198_normalised\nrange_hood/test/range_hood_0164_normalised\nrange_hood/test/range_hood_0157_normalised\nrange_hood/test/range_hood_0142_normalised\nrange_hood/test/range_hood_0139_normalised\nrange_hood/test/range_hood_0145_normalised\nrange_hood/test/range_hood_0180_normalised\nrange_hood/test/range_hood_0209_normalised\nrange_hood/test/range_hood_0194_normalised\nrange_hood/test/range_hood_0179_normalised\nrange_hood/test/range_hood_0155_normalised\ncurtain/train/curtain_0112_normalised\ncurtain/train/curtain_0120_normalised\ncurtain/train/curtain_0017_normalised\ncurtain/train/curtain_0085_normalised\ncurtain/train/curtain_0040_normalised\ncurtain/train/curtain_0070_normalised\ncurtain/train/curtain_0035_normalised\ncurtain/train/curtain_0045_normalised\ncurtain/train/curtain_0057_normalised\ncurtain/train/curtain_0029_normalised\ncurtain/train/curtain_0062_normalised\ncurtain/train/curtain_0049_normalised\ncurtain/train/curtain_0064_normalised\ncurtain/train/curtain_0109_normalised\ncurtain/train/curtain_0126_normalised\ncurtain/train/curtain_0113_normalised\ncurtain/train/curtain_0059_normalised\ncurtain/train/curtain_0013_normalised\ncurtain/train/curtain_0079_normalised\ncurtain/train/curtain_0006_normalised\ncurtain/train/curtain_0076_normalised\ncurtain/train/curtain_0004_normalised\ncurtain/train/curtain_0005_normalised\ncurtain/train/curtain_0131_normalised\ncurtain/train/curtain_0106_normalised\ncurtain/train/curtain_0023_normalised\ncurtain/train/curtain_0127_normalised\ncurtain/train/curtain_0134_normalised\ncurtain/train/curtain_0010_normalised\ncurtain/train/curtain_0003_normalised\ncurtain/train/curtain_0025_normalised\ncurtain/train/curtain_0055_normalised\ncurtain/train/curtain_0038_normalised\ncurtain/train/curtain_0100_normalised\ncurtain/train/curtain_0110_normalised\ncurtain/train/curtain_0051_normalised\ncurtain/train/curtain_0119_normalised\ncurtain/train/curtain_0081_normalised\ncurtain/train/curtain_0090_normalised\ncurtain/train/curtain_0101_normalised\ncurtain/train/curtain_0033_normalised\ncurtain/train/curtain_0103_normalised\ncurtain/train/curtain_0111_normalised\ncurtain/train/curtain_0125_normalised\ncurtain/train/curtain_0044_normalised\ncurtain/train/curtain_0014_normalised\ncurtain/train/curtain_0077_normalised\ncurtain/train/curtain_0097_normalised\ncurtain/train/curtain_0030_normalised\ncurtain/train/curtain_0034_normalised\ncurtain/train/curtain_0105_normalised\ncurtain/train/curtain_0063_normalised\ncurtain/train/curtain_0130_normalised\ncurtain/train/curtain_0115_normalised\ncurtain/train/curtain_0020_normalised\ncurtain/train/curtain_0102_normalised\ncurtain/train/curtain_0080_normalised\ncurtain/train/curtain_0123_normalised\ncurtain/train/curtain_0069_normalised\ncurtain/train/curtain_0118_normalised\ncurtain/train/curtain_0091_normalised\ncurtain/train/curtain_0031_normalised\ncurtain/train/curtain_0015_normalised\ncurtain/train/curtain_0022_normalised\ncurtain/train/curtain_0032_normalised\ncurtain/train/curtain_0009_normalised\ncurtain/train/curtain_0104_normalised\ncurtain/train/curtain_0007_normalised\ncurtain/train/curtain_0067_normalised\ncurtain/train/curtain_0065_normalised\ncurtain/train/curtain_0018_normalised\ncurtain/train/curtain_0053_normalised\ncurtain/train/curtain_0066_normalised\ncurtain/train/curtain_0050_normalised\ncurtain/train/curtain_0072_normalised\ncurtain/train/curtain_0060_normalised\ncurtain/train/curtain_0078_normalised\ncurtain/train/curtain_0089_normalised\ncurtain/train/curtain_0046_normalised\ncurtain/train/curtain_0129_normalised\ncurtain/train/curtain_0021_normalised\ncurtain/train/curtain_0073_normalised\ncurtain/train/curtain_0107_normalised\ncurtain/train/curtain_0099_normalised\ncurtain/train/curtain_0132_normalised\ncurtain/train/curtain_0094_normalised\ncurtain/train/curtain_0002_normalised\ncurtain/train/curtain_0012_normalised\ncurtain/train/curtain_0117_normalised\ncurtain/train/curtain_0086_normalised\ncurtain/train/curtain_0121_normalised\ncurtain/train/curtain_0019_normalised\ncurtain/train/curtain_0052_normalised\ncurtain/train/curtain_0028_normalised\ncurtain/train/curtain_0037_normalised\ncurtain/train/curtain_0092_normalised\ncurtain/train/curtain_0088_normalised\ncurtain/train/curtain_0137_normalised\ncurtain/train/curtain_0133_normalised\ncurtain/train/curtain_0096_normalised\ncurtain/train/curtain_0054_normalised\ncurtain/train/curtain_0047_normalised\ncurtain/train/curtain_0136_normalised\ncurtain/train/curtain_0016_normalised\ncurtain/train/curtain_0128_normalised\ncurtain/train/curtain_0042_normalised\ncurtain/train/curtain_0056_normalised\ncurtain/train/curtain_0108_normalised\ncurtain/train/curtain_0093_normalised\ncurtain/train/curtain_0026_normalised\ncurtain/train/curtain_0043_normalised\ncurtain/train/curtain_0074_normalised\ncurtain/train/curtain_0082_normalised\ncurtain/train/curtain_0061_normalised\ncurtain/train/curtain_0122_normalised\ncurtain/train/curtain_0116_normalised\ncurtain/train/curtain_0027_normalised\ncurtain/train/curtain_0084_normalised\ncurtain/train/curtain_0068_normalised\ncurtain/train/curtain_0024_normalised\ncurtain/train/curtain_0001_normalised\ncurtain/train/curtain_0058_normalised\ncurtain/train/curtain_0087_normalised\ncurtain/train/curtain_0039_normalised\ncurtain/train/curtain_0008_normalised\ncurtain/train/curtain_0124_normalised\ncurtain/train/curtain_0071_normalised\ncurtain/train/curtain_0048_normalised\ncurtain/train/curtain_0011_normalised\ncurtain/train/curtain_0098_normalised\ncurtain/train/curtain_0135_normalised\ncurtain/train/curtain_0075_normalised\ncurtain/train/curtain_0041_normalised\ncurtain/train/curtain_0114_normalised\ncurtain/train/curtain_0083_normalised\ncurtain/train/curtain_0138_normalised\ncurtain/train/curtain_0095_normalised\ncurtain/train/curtain_0036_normalised\ncurtain/test/curtain_0155_normalised\ncurtain/test/curtain_0148_normalised\ncurtain/test/curtain_0147_normalised\ncurtain/test/curtain_0156_normalised\ncurtain/test/curtain_0151_normalised\ncurtain/test/curtain_0145_normalised\ncurtain/test/curtain_0140_normalised\ncurtain/test/curtain_0139_normalised\ncurtain/test/curtain_0146_normalised\ncurtain/test/curtain_0157_normalised\ncurtain/test/curtain_0149_normalised\ncurtain/test/curtain_0150_normalised\ncurtain/test/curtain_0158_normalised\ncurtain/test/curtain_0142_normalised\ncurtain/test/curtain_0143_normalised\ncurtain/test/curtain_0152_normalised\ncurtain/test/curtain_0144_normalised\ncurtain/test/curtain_0141_normalised\ncurtain/test/curtain_0153_normalised\ncurtain/test/curtain_0154_normalised\nlamp/train/lamp_0080_normalised\nlamp/train/lamp_0097_normalised\nlamp/train/lamp_0003_normalised\nlamp/train/lamp_0067_normalised\nlamp/train/lamp_0064_normalised\nlamp/train/lamp_0096_normalised\nlamp/train/lamp_0086_normalised\nlamp/train/lamp_0025_normalised\nlamp/train/lamp_0010_normalised\nlamp/train/lamp_0069_normalised\nlamp/train/lamp_0021_normalised\nlamp/train/lamp_0081_normalised\nlamp/train/lamp_0065_normalised\nlamp/train/lamp_0014_normalised\nlamp/train/lamp_0050_normalised\nlamp/train/lamp_0005_normalised\nlamp/train/lamp_0049_normalised\nlamp/train/lamp_0104_normalised\nlamp/train/lamp_0115_normalised\nlamp/train/lamp_0071_normalised\nlamp/train/lamp_0002_normalised\nlamp/train/lamp_0092_normalised\nlamp/train/lamp_0118_normalised\nlamp/train/lamp_0026_normalised\nlamp/train/lamp_0033_normalised\nlamp/train/lamp_0121_normalised\nlamp/train/lamp_0023_normalised\nlamp/train/lamp_0112_normalised\nlamp/train/lamp_0113_normalised\nlamp/train/lamp_0108_normalised\nlamp/train/lamp_0011_normalised\nlamp/train/lamp_0109_normalised\nlamp/train/lamp_0004_normalised\nlamp/train/lamp_0106_normalised\nlamp/train/lamp_0060_normalised\nlamp/train/lamp_0123_normalised\nlamp/train/lamp_0043_normalised\nlamp/train/lamp_0099_normalised\nlamp/train/lamp_0034_normalised\nlamp/train/lamp_0012_normalised\nlamp/train/lamp_0070_normalised\nlamp/train/lamp_0039_normalised\nlamp/train/lamp_0101_normalised\nlamp/train/lamp_0015_normalised\nlamp/train/lamp_0045_normalised\nlamp/train/lamp_0020_normalised\nlamp/train/lamp_0105_normalised\nlamp/train/lamp_0051_normalised\nlamp/train/lamp_0055_normalised\nlamp/train/lamp_0124_normalised\nlamp/train/lamp_0075_normalised\nlamp/train/lamp_0040_normalised\nlamp/train/lamp_0046_normalised\nlamp/train/lamp_0114_normalised\nlamp/train/lamp_0116_normalised\nlamp/train/lamp_0052_normalised\nlamp/train/lamp_0035_normalised\nlamp/train/lamp_0077_normalised\nlamp/train/lamp_0062_normalised\nlamp/train/lamp_0042_normalised\nlamp/train/lamp_0009_normalised\nlamp/train/lamp_0074_normalised\nlamp/train/lamp_0028_normalised\nlamp/train/lamp_0054_normalised\nlamp/train/lamp_0122_normalised\nlamp/train/lamp_0044_normalised\nlamp/train/lamp_0036_normalised\nlamp/train/lamp_0102_normalised\nlamp/train/lamp_0001_normalised\nlamp/train/lamp_0037_normalised\nlamp/train/lamp_0117_normalised\nlamp/train/lamp_0018_normalised\nlamp/train/lamp_0022_normalised\nlamp/train/lamp_0017_normalised\nlamp/train/lamp_0058_normalised\nlamp/train/lamp_0119_normalised\nlamp/train/lamp_0076_normalised\nlamp/train/lamp_0082_normalised\nlamp/train/lamp_0007_normalised\nlamp/train/lamp_0029_normalised\nlamp/train/lamp_0041_normalised\nlamp/train/lamp_0024_normalised\nlamp/train/lamp_0089_normalised\nlamp/train/lamp_0061_normalised\nlamp/train/lamp_0031_normalised\nlamp/train/lamp_0059_normalised\nlamp/train/lamp_0088_normalised\nlamp/train/lamp_0006_normalised\nlamp/train/lamp_0120_normalised\nlamp/train/lamp_0072_normalised\nlamp/train/lamp_0016_normalised\nlamp/train/lamp_0053_normalised\nlamp/train/lamp_0079_normalised\nlamp/train/lamp_0093_normalised\nlamp/train/lamp_0063_normalised\nlamp/train/lamp_0103_normalised\nlamp/train/lamp_0056_normalised\nlamp/train/lamp_0094_normalised\nlamp/train/lamp_0090_normalised\nlamp/train/lamp_0048_normalised\nlamp/train/lamp_0066_normalised\nlamp/train/lamp_0057_normalised\nlamp/train/lamp_0068_normalised\nlamp/train/lamp_0084_normalised\nlamp/train/lamp_0110_normalised\nlamp/train/lamp_0100_normalised\nlamp/train/lamp_0013_normalised\nlamp/train/lamp_0008_normalised\nlamp/train/lamp_0030_normalised\nlamp/train/lamp_0107_normalised\nlamp/train/lamp_0047_normalised\nlamp/train/lamp_0073_normalised\nlamp/train/lamp_0038_normalised\nlamp/train/lamp_0095_normalised\nlamp/train/lamp_0019_normalised\nlamp/train/lamp_0087_normalised\nlamp/train/lamp_0027_normalised\nlamp/train/lamp_0098_normalised\nlamp/train/lamp_0078_normalised\nlamp/train/lamp_0111_normalised\nlamp/train/lamp_0085_normalised\nlamp/train/lamp_0032_normalised\nlamp/train/lamp_0083_normalised\nlamp/train/lamp_0091_normalised\nlamp/test/lamp_0139_normalised\nlamp/test/lamp_0137_normalised\nlamp/test/lamp_0141_normalised\nlamp/test/lamp_0128_normalised\nlamp/test/lamp_0135_normalised\nlamp/test/lamp_0136_normalised\nlamp/test/lamp_0129_normalised\nlamp/test/lamp_0130_normalised\nlamp/test/lamp_0134_normalised\nlamp/test/lamp_0125_normalised\nlamp/test/lamp_0133_normalised\nlamp/test/lamp_0143_normalised\nlamp/test/lamp_0132_normalised\nlamp/test/lamp_0140_normalised\nlamp/test/lamp_0127_normalised\nlamp/test/lamp_0138_normalised\nlamp/test/lamp_0131_normalised\nlamp/test/lamp_0126_normalised\nlamp/test/lamp_0142_normalised\nlamp/test/lamp_0144_normalised\nsofa/train/sofa_0524_normalised\nsofa/train/sofa_0266_normalised\nsofa/train/sofa_0231_normalised\nsofa/train/sofa_0603_normalised\nsofa/train/sofa_0213_normalised\nsofa/train/sofa_0302_normalised\nsofa/train/sofa_0363_normalised\nsofa/train/sofa_0321_normalised\nsofa/train/sofa_0250_normalised\nsofa/train/sofa_0580_normalised\nsofa/train/sofa_0500_normalised\nsofa/train/sofa_0598_normalised\nsofa/train/sofa_0254_normalised\nsofa/train/sofa_0138_normalised\nsofa/train/sofa_0563_normalised\nsofa/train/sofa_0523_normalised\nsofa/train/sofa_0463_normalised\nsofa/train/sofa_0480_normalised\nsofa/train/sofa_0495_normalised\nsofa/train/sofa_0600_normalised\nsofa/train/sofa_0605_normalised\nsofa/train/sofa_0537_normalised\nsofa/train/sofa_0064_normalised\nsofa/train/sofa_0437_normalised\nsofa/train/sofa_0140_normalised\nsofa/train/sofa_0207_normalised\nsofa/train/sofa_0271_normalised\nsofa/train/sofa_0420_normalised\nsofa/train/sofa_0583_normalised\nsofa/train/sofa_0101_normalised\nsofa/train/sofa_0335_normalised\nsofa/train/sofa_0072_normalised\nsofa/train/sofa_0385_normalised\nsofa/train/sofa_0134_normalised\nsofa/train/sofa_0499_normalised\nsofa/train/sofa_0431_normalised\nsofa/train/sofa_0505_normalised\nsofa/train/sofa_0105_normalised\nsofa/train/sofa_0085_normalised\nsofa/train/sofa_0533_normalised\nsofa/train/sofa_0285_normalised\nsofa/train/sofa_0208_normalised\nsofa/train/sofa_0453_normalised\nsofa/train/sofa_0538_normalised\nsofa/train/sofa_0375_normalised\nsofa/train/sofa_0651_normalised\nsofa/train/sofa_0123_normalised\nsofa/train/sofa_0568_normalised\nsofa/train/sofa_0345_normalised\nsofa/train/sofa_0159_normalised\nsofa/train/sofa_0104_normalised\nsofa/train/sofa_0057_normalised\nsofa/train/sofa_0676_normalised\nsofa/train/sofa_0026_normalised\nsofa/train/sofa_0680_normalised\nsofa/train/sofa_0476_normalised\nsofa/train/sofa_0395_normalised\nsofa/train/sofa_0181_normalised\nsofa/train/sofa_0392_normalised\nsofa/train/sofa_0263_normalised\nsofa/train/sofa_0403_normalised\nsofa/train/sofa_0016_normalised\nsofa/train/sofa_0434_normalised\nsofa/train/sofa_0402_normalised\nsofa/train/sofa_0135_normalised\nsofa/train/sofa_0358_normalised\nsofa/train/sofa_0655_normalised\nsofa/train/sofa_0005_normalised\nsofa/train/sofa_0577_normalised\nsofa/train/sofa_0474_normalised\nsofa/train/sofa_0338_normalised\nsofa/train/sofa_0118_normalised\nsofa/train/sofa_0667_normalised\nsofa/train/sofa_0212_normalised\nsofa/train/sofa_0449_normalised\nsofa/train/sofa_0226_normalised\nsofa/train/sofa_0107_normalised\nsofa/train/sofa_0171_normalised\nsofa/train/sofa_0289_normalised\nsofa/train/sofa_0306_normalised\nsofa/train/sofa_0531_normalised\nsofa/train/sofa_0184_normalised\nsofa/train/sofa_0498_normalised\nsofa/train/sofa_0071_normalised\nsofa/train/sofa_0004_normalised\nsofa/train/sofa_0478_normalised\nsofa/train/sofa_0633_normalised\nsofa/train/sofa_0574_normalised\nsofa/train/sofa_0415_normalised\nsofa/train/sofa_0643_normalised\nsofa/train/sofa_0006_normalised\nsofa/train/sofa_0047_normalised\nsofa/train/sofa_0336_normalised\nsofa/train/sofa_0330_normalised\nsofa/train/sofa_0548_normalised\nsofa/train/sofa_0187_normalised\nsofa/train/sofa_0354_normalised\nsofa/train/sofa_0236_normalised\nsofa/train/sofa_0353_normalised\nsofa/train/sofa_0562_normalised\nsofa/train/sofa_0086_normalised\nsofa/train/sofa_0364_normalised\nsofa/train/sofa_0074_normalised\nsofa/train/sofa_0111_normalised\nsofa/train/sofa_0219_normalised\nsofa/train/sofa_0002_normalised\nsofa/train/sofa_0240_normalised\nsofa/train/sofa_0235_normalised\nsofa/train/sofa_0220_normalised\nsofa/train/sofa_0146_normalised\nsofa/train/sofa_0648_normalised\nsofa/train/sofa_0114_normalised\nsofa/train/sofa_0261_normalised\nsofa/train/sofa_0397_normalised\nsofa/train/sofa_0625_normalised\nsofa/train/sofa_0435_normalised\nsofa/train/sofa_0063_normalised\nsofa/train/sofa_0637_normalised\nsofa/train/sofa_0339_normalised\nsofa/train/sofa_0060_normalised\nsofa/train/sofa_0329_normalised\nsofa/train/sofa_0148_normalised\nsofa/train/sofa_0630_normalised\nsofa/train/sofa_0645_normalised\nsofa/train/sofa_0209_normalised\nsofa/train/sofa_0416_normalised\nsofa/train/sofa_0546_normalised\nsofa/train/sofa_0445_normalised\nsofa/train/sofa_0594_normalised\nsofa/train/sofa_0305_normalised\nsofa/train/sofa_0639_normalised\nsofa/train/sofa_0507_normalised\nsofa/train/sofa_0555_normalised\nsofa/train/sofa_0422_normalised\nsofa/train/sofa_0620_normalised\nsofa/train/sofa_0539_normalised\nsofa/train/sofa_0659_normalised\nsofa/train/sofa_0334_normalised\nsofa/train/sofa_0485_normalised\nsofa/train/sofa_0188_normalised\nsofa/train/sofa_0356_normalised\nsofa/train/sofa_0095_normalised\nsofa/train/sofa_0242_normalised\nsofa/train/sofa_0526_normalised\nsofa/train/sofa_0227_normalised\nsofa/train/sofa_0357_normalised\nsofa/train/sofa_0052_normalised\nsofa/train/sofa_0039_normalised\nsofa/train/sofa_0493_normalised\nsofa/train/sofa_0458_normalised\nsofa/train/sofa_0679_normalised\nsofa/train/sofa_0650_normalised\nsofa/train/sofa_0253_normalised\nsofa/train/sofa_0588_normalised\nsofa/train/sofa_0021_normalised\nsofa/train/sofa_0670_normalised\nsofa/train/sofa_0618_normalised\nsofa/train/sofa_0328_normalised\nsofa/train/sofa_0280_normalised\nsofa/train/sofa_0319_normalised\nsofa/train/sofa_0121_normalised\nsofa/train/sofa_0178_normalised\nsofa/train/sofa_0582_normalised\nsofa/train/sofa_0668_normalised\nsofa/train/sofa_0264_normalised\nsofa/train/sofa_0126_normalised\nsofa/train/sofa_0469_normalised\nsofa/train/sofa_0077_normalised\nsofa/train/sofa_0491_normalised\nsofa/train/sofa_0003_normalised\nsofa/train/sofa_0542_normalised\nsofa/train/sofa_0438_normalised\nsofa/train/sofa_0108_normalised\nsofa/train/sofa_0520_normalised\nsofa/train/sofa_0015_normalised\nsofa/train/sofa_0406_normalised\nsofa/train/sofa_0619_normalised\nsofa/train/sofa_0366_normalised\nsofa/train/sofa_0087_normalised\nsofa/train/sofa_0565_normalised\nsofa/train/sofa_0622_normalised\nsofa/train/sofa_0534_normalised\nsofa/train/sofa_0599_normalised\nsofa/train/sofa_0048_normalised\nsofa/train/sofa_0669_normalised\nsofa/train/sofa_0545_normalised\nsofa/train/sofa_0607_normalised\nsofa/train/sofa_0117_normalised\nsofa/train/sofa_0233_normalised\nsofa/train/sofa_0200_normalised\nsofa/train/sofa_0251_normalised\nsofa/train/sofa_0125_normalised\nsofa/train/sofa_0404_normalised\nsofa/train/sofa_0094_normalised\nsofa/train/sofa_0008_normalised\nsofa/train/sofa_0410_normalised\nsofa/train/sofa_0165_normalised\nsofa/train/sofa_0279_normalised\nsofa/train/sofa_0372_normalised\nsofa/train/sofa_0059_normalised\nsofa/train/sofa_0230_normalised\nsofa/train/sofa_0528_normalised\nsofa/train/sofa_0036_normalised\nsofa/train/sofa_0567_normalised\nsofa/train/sofa_0274_normalised\nsofa/train/sofa_0082_normalised\nsofa/train/sofa_0061_normalised\nsofa/train/sofa_0044_normalised\nsofa/train/sofa_0023_normalised\nsofa/train/sofa_0423_normalised\nsofa/train/sofa_0647_normalised\nsofa/train/sofa_0483_normalised\nsofa/train/sofa_0326_normalised\nsofa/train/sofa_0624_normalised\nsofa/train/sofa_0193_normalised\nsofa/train/sofa_0374_normalised\nsofa/train/sofa_0183_normalised\nsofa/train/sofa_0443_normalised\nsofa/train/sofa_0065_normalised\nsofa/train/sofa_0079_normalised\nsofa/train/sofa_0459_normalised\nsofa/train/sofa_0020_normalised\nsofa/train/sofa_0387_normalised\nsofa/train/sofa_0382_normalised\nsofa/train/sofa_0653_normalised\nsofa/train/sofa_0166_normalised\nsofa/train/sofa_0649_normalised\nsofa/train/sofa_0391_normalised\nsofa/train/sofa_0228_normalised\nsofa/train/sofa_0269_normalised\nsofa/train/sofa_0216_normalised\nsofa/train/sofa_0475_normalised\nsofa/train/sofa_0652_normalised\nsofa/train/sofa_0572_normalised\nsofa/train/sofa_0056_normalised\nsofa/train/sofa_0656_normalised\nsofa/train/sofa_0465_normalised\nsofa/train/sofa_0013_normalised\nsofa/train/sofa_0284_normalised\nsofa/train/sofa_0073_normalised\nsofa/train/sofa_0189_normalised\nsofa/train/sofa_0031_normalised\nsofa/train/sofa_0610_normalised\nsofa/train/sofa_0303_normalised\nsofa/train/sofa_0540_normalised\nsofa/train/sofa_0185_normalised\nsofa/train/sofa_0393_normalised\nsofa/train/sofa_0448_normalised\nsofa/train/sofa_0578_normalised\nsofa/train/sofa_0130_normalised\nsofa/train/sofa_0611_normalised\nsofa/train/sofa_0143_normalised\nsofa/train/sofa_0541_normalised\nsofa/train/sofa_0218_normalised\nsofa/train/sofa_0313_normalised\nsofa/train/sofa_0509_normalised\nsofa/train/sofa_0199_normalised\nsofa/train/sofa_0139_normalised\nsofa/train/sofa_0232_normalised\nsofa/train/sofa_0112_normalised\nsofa/train/sofa_0055_normalised\nsofa/train/sofa_0262_normalised\nsofa/train/sofa_0592_normalised\nsofa/train/sofa_0311_normalised\nsofa/train/sofa_0037_normalised\nsofa/train/sofa_0497_normalised\nsofa/train/sofa_0151_normalised\nsofa/train/sofa_0535_normalised\nsofa/train/sofa_0191_normalised\nsofa/train/sofa_0051_normalised\nsofa/train/sofa_0482_normalised\nsofa/train/sofa_0045_normalised\nsofa/train/sofa_0040_normalised\nsofa/train/sofa_0247_normalised\nsofa/train/sofa_0342_normalised\nsofa/train/sofa_0341_normalised\nsofa/train/sofa_0672_normalised\nsofa/train/sofa_0384_normalised\nsofa/train/sofa_0564_normalised\nsofa/train/sofa_0323_normalised\nsofa/train/sofa_0286_normalised\nsofa/train/sofa_0029_normalised\nsofa/train/sofa_0355_normalised\nsofa/train/sofa_0514_normalised\nsofa/train/sofa_0456_normalised\nsofa/train/sofa_0506_normalised\nsofa/train/sofa_0025_normalised\nsofa/train/sofa_0246_normalised\nsofa/train/sofa_0634_normalised\nsofa/train/sofa_0440_normalised\nsofa/train/sofa_0383_normalised\nsofa/train/sofa_0359_normalised\nsofa/train/sofa_0141_normalised\nsofa/train/sofa_0642_normalised\nsofa/train/sofa_0549_normalised\nsofa/train/sofa_0615_normalised\nsofa/train/sofa_0129_normalised\nsofa/train/sofa_0237_normalised\nsofa/train/sofa_0333_normalised\nsofa/train/sofa_0593_normalised\nsofa/train/sofa_0462_normalised\nsofa/train/sofa_0373_normalised\nsofa/train/sofa_0490_normalised\nsofa/train/sofa_0277_normalised\nsofa/train/sofa_0194_normalised\nsofa/train/sofa_0602_normalised\nsofa/train/sofa_0290_normalised\nsofa/train/sofa_0217_normalised\nsofa/train/sofa_0124_normalised\nsofa/train/sofa_0042_normalised\nsofa/train/sofa_0252_normalised\nsofa/train/sofa_0612_normalised\nsofa/train/sofa_0557_normalised\nsofa/train/sofa_0584_normalised\nsofa/train/sofa_0314_normalised\nsofa/train/sofa_0152_normalised\nsofa/train/sofa_0024_normalised\nsofa/train/sofa_0128_normalised\nsofa/train/sofa_0674_normalised\nsofa/train/sofa_0346_normalised\nsofa/train/sofa_0399_normalised\nsofa/train/sofa_0489_normalised\nsofa/train/sofa_0267_normalised\nsofa/train/sofa_0521_normalised\nsofa/train/sofa_0309_normalised\nsofa/train/sofa_0405_normalised\nsofa/train/sofa_0283_normalised\nsofa/train/sofa_0433_normalised\nsofa/train/sofa_0481_normalised\nsofa/train/sofa_0089_normalised\nsofa/train/sofa_0041_normalised\nsofa/train/sofa_0110_normalised\nsofa/train/sofa_0627_normalised\nsofa/train/sofa_0424_normalised\nsofa/train/sofa_0102_normalised\nsofa/train/sofa_0075_normalised\nsofa/train/sofa_0398_normalised\nsofa/train/sofa_0512_normalised\nsofa/train/sofa_0278_normalised\nsofa/train/sofa_0367_normalised\nsofa/train/sofa_0062_normalised\nsofa/train/sofa_0461_normalised\nsofa/train/sofa_0205_normalised\nsofa/train/sofa_0394_normalised\nsofa/train/sofa_0202_normalised\nsofa/train/sofa_0249_normalised\nsofa/train/sofa_0517_normalised\nsofa/train/sofa_0597_normalised\nsofa/train/sofa_0487_normalised\nsofa/train/sofa_0457_normalised\nsofa/train/sofa_0030_normalised\nsofa/train/sofa_0093_normalised\nsofa/train/sofa_0631_normalised\nsofa/train/sofa_0477_normalised\nsofa/train/sofa_0301_normalised\nsofa/train/sofa_0516_normalised\nsofa/train/sofa_0617_normalised\nsofa/train/sofa_0378_normalised\nsofa/train/sofa_0273_normalised\nsofa/train/sofa_0221_normalised\nsofa/train/sofa_0147_normalised\nsofa/train/sofa_0558_normalised\nsofa/train/sofa_0629_normalised\nsofa/train/sofa_0070_normalised\nsofa/train/sofa_0590_normalised\nsofa/train/sofa_0100_normalised\nsofa/train/sofa_0408_normalised\nsofa/train/sofa_0352_normalised\nsofa/train/sofa_0197_normalised\nsofa/train/sofa_0662_normalised\nsofa/train/sofa_0310_normalised\nsofa/train/sofa_0164_normalised\nsofa/train/sofa_0362_normalised\nsofa/train/sofa_0360_normalised\nsofa/train/sofa_0451_normalised\nsofa/train/sofa_0131_normalised\nsofa/train/sofa_0376_normalised\nsofa/train/sofa_0556_normalised\nsofa/train/sofa_0587_normalised\nsofa/train/sofa_0413_normalised\nsofa/train/sofa_0348_normalised\nsofa/train/sofa_0054_normalised\nsofa/train/sofa_0017_normalised\nsofa/train/sofa_0479_normalised\nsofa/train/sofa_0460_normalised\nsofa/train/sofa_0494_normalised\nsofa/train/sofa_0179_normalised\nsofa/train/sofa_0613_normalised\nsofa/train/sofa_0419_normalised\nsofa/train/sofa_0503_normalised\nsofa/train/sofa_0007_normalised\nsofa/train/sofa_0661_normalised\nsofa/train/sofa_0340_normalised\nsofa/train/sofa_0081_normalised\nsofa/train/sofa_0349_normalised\nsofa/train/sofa_0604_normalised\nsofa/train/sofa_0043_normalised\nsofa/train/sofa_0665_normalised\nsofa/train/sofa_0069_normalised\nsofa/train/sofa_0088_normalised\nsofa/train/sofa_0400_normalised\nsofa/train/sofa_0484_normalised\nsofa/train/sofa_0522_normalised\nsofa/train/sofa_0170_normalised\nsofa/train/sofa_0255_normalised\nsofa/train/sofa_0673_normalised\nsofa/train/sofa_0272_normalised\nsofa/train/sofa_0421_normalised\nsofa/train/sofa_0259_normalised\nsofa/train/sofa_0174_normalised\nsofa/train/sofa_0244_normalised\nsofa/train/sofa_0436_normalised\nsofa/train/sofa_0426_normalised\nsofa/train/sofa_0473_normalised\nsofa/train/sofa_0163_normalised\nsofa/train/sofa_0215_normalised\nsofa/train/sofa_0245_normalised\nsofa/train/sofa_0192_normalised\nsofa/train/sofa_0257_normalised\nsofa/train/sofa_0173_normalised\nsofa/train/sofa_0229_normalised\nsofa/train/sofa_0115_normalised\nsofa/train/sofa_0103_normalised\nsofa/train/sofa_0586_normalised\nsofa/train/sofa_0097_normalised\nsofa/train/sofa_0204_normalised\nsofa/train/sofa_0132_normalised\nsofa/train/sofa_0621_normalised\nsofa/train/sofa_0136_normalised\nsofa/train/sofa_0109_normalised\nsofa/train/sofa_0053_normalised\nsofa/train/sofa_0282_normalised\nsofa/train/sofa_0569_normalised\nsofa/train/sofa_0502_normalised\nsofa/train/sofa_0452_normalised\nsofa/train/sofa_0033_normalised\nsofa/train/sofa_0084_normalised\nsofa/train/sofa_0401_normalised\nsofa/train/sofa_0609_normalised\nsofa/train/sofa_0046_normalised\nsofa/train/sofa_0090_normalised\nsofa/train/sofa_0315_normalised\nsofa/train/sofa_0122_normalised\nsofa/train/sofa_0468_normalised\nsofa/train/sofa_0447_normalised\nsofa/train/sofa_0022_normalised\nsofa/train/sofa_0012_normalised\nsofa/train/sofa_0585_normalised\nsofa/train/sofa_0488_normalised\nsofa/train/sofa_0646_normalised\nsofa/train/sofa_0552_normalised\nsofa/train/sofa_0028_normalised\nsofa/train/sofa_0496_normalised\nsofa/train/sofa_0096_normalised\nsofa/train/sofa_0144_normalised\nsofa/train/sofa_0471_normalised\nsofa/train/sofa_0553_normalised\nsofa/train/sofa_0579_normalised\nsofa/train/sofa_0331_normalised\nsofa/train/sofa_0145_normalised\nsofa/train/sofa_0492_normalised\nsofa/train/sofa_0455_normalised\nsofa/train/sofa_0176_normalised\nsofa/train/sofa_0570_normalised\nsofa/train/sofa_0172_normalised\nsofa/train/sofa_0317_normalised\nsofa/train/sofa_0550_normalised\nsofa/train/sofa_0304_normalised\nsofa/train/sofa_0291_normalised\nsofa/train/sofa_0377_normalised\nsofa/train/sofa_0196_normalised\nsofa/train/sofa_0238_normalised\nsofa/train/sofa_0616_normalised\nsofa/train/sofa_0657_normalised\nsofa/train/sofa_0265_normalised\nsofa/train/sofa_0091_normalised\nsofa/train/sofa_0160_normalised\nsofa/train/sofa_0596_normalised\nsofa/train/sofa_0295_normalised\nsofa/train/sofa_0268_normalised\nsofa/train/sofa_0581_normalised\nsofa/train/sofa_0554_normalised\nsofa/train/sofa_0677_normalised\nsofa/train/sofa_0560_normalised\nsofa/train/sofa_0472_normalised\nsofa/train/sofa_0142_normalised\nsofa/train/sofa_0664_normalised\nsofa/train/sofa_0010_normalised\nsofa/train/sofa_0425_normalised\nsofa/train/sofa_0258_normalised\nsofa/train/sofa_0429_normalised\nsofa/train/sofa_0396_normalised\nsofa/train/sofa_0066_normalised\nsofa/train/sofa_0276_normalised\nsofa/train/sofa_0660_normalised\nsofa/train/sofa_0351_normalised\nsofa/train/sofa_0466_normalised\nsofa/train/sofa_0098_normalised\nsofa/train/sofa_0544_normalised\nsofa/train/sofa_0663_normalised\nsofa/train/sofa_0626_normalised\nsofa/train/sofa_0380_normalised\nsofa/train/sofa_0379_normalised\nsofa/train/sofa_0292_normalised\nsofa/train/sofa_0195_normalised\nsofa/train/sofa_0641_normalised\nsofa/train/sofa_0049_normalised\nsofa/train/sofa_0606_normalised\nsofa/train/sofa_0644_normalised\nsofa/train/sofa_0133_normalised\nsofa/train/sofa_0322_normalised\nsofa/train/sofa_0636_normalised\nsofa/train/sofa_0337_normalised\nsofa/train/sofa_0409_normalised\nsofa/train/sofa_0654_normalised\nsofa/train/sofa_0324_normalised\nsofa/train/sofa_0318_normalised\nsofa/train/sofa_0518_normalised\nsofa/train/sofa_0470_normalised\nsofa/train/sofa_0510_normalised\nsofa/train/sofa_0666_normalised\nsofa/train/sofa_0344_normalised\nsofa/train/sofa_0224_normalised\nsofa/train/sofa_0325_normalised\nsofa/train/sofa_0464_normalised\nsofa/train/sofa_0083_normalised\nsofa/train/sofa_0559_normalised\nsofa/train/sofa_0038_normalised\nsofa/train/sofa_0206_normalised\nsofa/train/sofa_0106_normalised\nsofa/train/sofa_0001_normalised\nsofa/train/sofa_0671_normalised\nsofa/train/sofa_0361_normalised\nsofa/train/sofa_0035_normalised\nsofa/train/sofa_0256_normalised\nsofa/train/sofa_0407_normalised\nsofa/train/sofa_0180_normalised\nsofa/train/sofa_0234_normalised\nsofa/train/sofa_0628_normalised\nsofa/train/sofa_0561_normalised\nsofa/train/sofa_0508_normalised\nsofa/train/sofa_0211_normalised\nsofa/train/sofa_0511_normalised\nsofa/train/sofa_0248_normalised\nsofa/train/sofa_0299_normalised\nsofa/train/sofa_0504_normalised\nsofa/train/sofa_0182_normalised\nsofa/train/sofa_0446_normalised\nsofa/train/sofa_0149_normalised\nsofa/train/sofa_0225_normalised\nsofa/train/sofa_0638_normalised\nsofa/train/sofa_0412_normalised\nsofa/train/sofa_0536_normalised\nsofa/train/sofa_0177_normalised\nsofa/train/sofa_0241_normalised\nsofa/train/sofa_0127_normalised\nsofa/train/sofa_0203_normalised\nsofa/train/sofa_0614_normalised\nsofa/train/sofa_0078_normalised\nsofa/train/sofa_0427_normalised\nsofa/train/sofa_0167_normalised\nsofa/train/sofa_0566_normalised\nsofa/train/sofa_0525_normalised\nsofa/train/sofa_0486_normalised\nsofa/train/sofa_0009_normalised\nsofa/train/sofa_0386_normalised\nsofa/train/sofa_0595_normalised\nsofa/train/sofa_0067_normalised\nsofa/train/sofa_0608_normalised\nsofa/train/sofa_0370_normalised\nsofa/train/sofa_0576_normalised\nsofa/train/sofa_0307_normalised\nsofa/train/sofa_0034_normalised\nsofa/train/sofa_0371_normalised\nsofa/train/sofa_0388_normalised\nsofa/train/sofa_0210_normalised\nsofa/train/sofa_0418_normalised\nsofa/train/sofa_0092_normalised\nsofa/train/sofa_0300_normalised\nsofa/train/sofa_0260_normalised\nsofa/train/sofa_0454_normalised\nsofa/train/sofa_0365_normalised\nsofa/train/sofa_0154_normalised\nsofa/train/sofa_0529_normalised\nsofa/train/sofa_0158_normalised\nsofa/train/sofa_0501_normalised\nsofa/train/sofa_0519_normalised\nsofa/train/sofa_0411_normalised\nsofa/train/sofa_0543_normalised\nsofa/train/sofa_0186_normalised\nsofa/train/sofa_0113_normalised\nsofa/train/sofa_0287_normalised\nsofa/train/sofa_0678_normalised\nsofa/train/sofa_0120_normalised\nsofa/train/sofa_0050_normalised\nsofa/train/sofa_0417_normalised\nsofa/train/sofa_0532_normalised\nsofa/train/sofa_0281_normalised\nsofa/train/sofa_0018_normalised\nsofa/train/sofa_0161_normalised\nsofa/train/sofa_0601_normalised\nsofa/train/sofa_0442_normalised\nsofa/train/sofa_0347_normalised\nsofa/train/sofa_0156_normalised\nsofa/train/sofa_0175_normalised\nsofa/train/sofa_0573_normalised\nsofa/train/sofa_0116_normalised\nsofa/train/sofa_0513_normalised\nsofa/train/sofa_0428_normalised\nsofa/train/sofa_0439_normalised\nsofa/train/sofa_0623_normalised\nsofa/train/sofa_0011_normalised\nsofa/train/sofa_0239_normalised\nsofa/train/sofa_0275_normalised\nsofa/train/sofa_0288_normalised\nsofa/train/sofa_0589_normalised\nsofa/train/sofa_0293_normalised\nsofa/train/sofa_0308_normalised\nsofa/train/sofa_0223_normalised\nsofa/train/sofa_0270_normalised\nsofa/train/sofa_0389_normalised\nsofa/train/sofa_0316_normalised\nsofa/train/sofa_0153_normalised\nsofa/train/sofa_0530_normalised\nsofa/train/sofa_0201_normalised\nsofa/train/sofa_0327_normalised\nsofa/train/sofa_0169_normalised\nsofa/train/sofa_0591_normalised\nsofa/train/sofa_0441_normalised\nsofa/train/sofa_0320_normalised\nsofa/train/sofa_0168_normalised\nsofa/train/sofa_0551_normalised\nsofa/train/sofa_0155_normalised\nsofa/train/sofa_0332_normalised\nsofa/train/sofa_0150_normalised\nsofa/train/sofa_0369_normalised\nsofa/train/sofa_0467_normalised\nsofa/train/sofa_0019_normalised\nsofa/train/sofa_0032_normalised\nsofa/train/sofa_0222_normalised\nsofa/train/sofa_0527_normalised\nsofa/train/sofa_0635_normalised\nsofa/train/sofa_0430_normalised\nsofa/train/sofa_0444_normalised\nsofa/train/sofa_0190_normalised\nsofa/train/sofa_0099_normalised\nsofa/train/sofa_0632_normalised\nsofa/train/sofa_0571_normalised\nsofa/train/sofa_0137_normalised\nsofa/train/sofa_0298_normalised\nsofa/train/sofa_0214_normalised\nsofa/train/sofa_0068_normalised\nsofa/train/sofa_0368_normalised\nsofa/train/sofa_0658_normalised\nsofa/train/sofa_0414_normalised\nsofa/train/sofa_0381_normalised\nsofa/train/sofa_0547_normalised\nsofa/train/sofa_0390_normalised\nsofa/train/sofa_0432_normalised\nsofa/train/sofa_0675_normalised\nsofa/train/sofa_0312_normalised\nsofa/train/sofa_0162_normalised\nsofa/train/sofa_0076_normalised\nsofa/train/sofa_0294_normalised\nsofa/train/sofa_0297_normalised\nsofa/train/sofa_0350_normalised\nsofa/train/sofa_0243_normalised\nsofa/train/sofa_0014_normalised\nsofa/train/sofa_0080_normalised\nsofa/train/sofa_0450_normalised\nsofa/train/sofa_0575_normalised\nsofa/train/sofa_0157_normalised\nsofa/train/sofa_0027_normalised\nsofa/train/sofa_0119_normalised\nsofa/train/sofa_0296_normalised\nsofa/train/sofa_0343_normalised\nsofa/train/sofa_0198_normalised\nsofa/train/sofa_0058_normalised\nsofa/train/sofa_0515_normalised\nsofa/train/sofa_0640_normalised\nsofa/test/sofa_0772_normalised\nsofa/test/sofa_0776_normalised\nsofa/test/sofa_0716_normalised\nsofa/test/sofa_0768_normalised\nsofa/test/sofa_0748_normalised\nsofa/test/sofa_0758_normalised\nsofa/test/sofa_0727_normalised\nsofa/test/sofa_0732_normalised\nsofa/test/sofa_0715_normalised\nsofa/test/sofa_0756_normalised\nsofa/test/sofa_0746_normalised\nsofa/test/sofa_0742_normalised\nsofa/test/sofa_0702_normalised\nsofa/test/sofa_0688_normalised\nsofa/test/sofa_0769_normalised\nsofa/test/sofa_0696_normalised\nsofa/test/sofa_0744_normalised\nsofa/test/sofa_0681_normalised\nsofa/test/sofa_0767_normalised\nsofa/test/sofa_0749_normalised\nsofa/test/sofa_0694_normalised\nsofa/test/sofa_0712_normalised\nsofa/test/sofa_0736_normalised\nsofa/test/sofa_0773_normalised\nsofa/test/sofa_0684_normalised\nsofa/test/sofa_0762_normalised\nsofa/test/sofa_0697_normalised\nsofa/test/sofa_0764_normalised\nsofa/test/sofa_0738_normalised\nsofa/test/sofa_0754_normalised\nsofa/test/sofa_0775_normalised\nsofa/test/sofa_0720_normalised\nsofa/test/sofa_0745_normalised\nsofa/test/sofa_0771_normalised\nsofa/test/sofa_0719_normalised\nsofa/test/sofa_0710_normalised\nsofa/test/sofa_0774_normalised\nsofa/test/sofa_0692_normalised\nsofa/test/sofa_0778_normalised\nsofa/test/sofa_0709_normalised\nsofa/test/sofa_0760_normalised\nsofa/test/sofa_0699_normalised\nsofa/test/sofa_0714_normalised\nsofa/test/sofa_0734_normalised\nsofa/test/sofa_0777_normalised\nsofa/test/sofa_0713_normalised\nsofa/test/sofa_0698_normalised\nsofa/test/sofa_0780_normalised\nsofa/test/sofa_0779_normalised\nsofa/test/sofa_0705_normalised\nsofa/test/sofa_0750_normalised\nsofa/test/sofa_0726_normalised\nsofa/test/sofa_0689_normalised\nsofa/test/sofa_0691_normalised\nsofa/test/sofa_0721_normalised\nsofa/test/sofa_0770_normalised\nsofa/test/sofa_0761_normalised\nsofa/test/sofa_0741_normalised\nsofa/test/sofa_0733_normalised\nsofa/test/sofa_0693_normalised\nsofa/test/sofa_0740_normalised\nsofa/test/sofa_0683_normalised\nsofa/test/sofa_0751_normalised\nsofa/test/sofa_0682_normalised\nsofa/test/sofa_0722_normalised\nsofa/test/sofa_0704_normalised\nsofa/test/sofa_0703_normalised\nsofa/test/sofa_0747_normalised\nsofa/test/sofa_0686_normalised\nsofa/test/sofa_0728_normalised\nsofa/test/sofa_0701_normalised\nsofa/test/sofa_0735_normalised\nsofa/test/sofa_0690_normalised\nsofa/test/sofa_0755_normalised\nsofa/test/sofa_0757_normalised\nsofa/test/sofa_0695_normalised\nsofa/test/sofa_0718_normalised\nsofa/test/sofa_0730_normalised\nsofa/test/sofa_0723_normalised\nsofa/test/sofa_0725_normalised\nsofa/test/sofa_0759_normalised\nsofa/test/sofa_0711_normalised\nsofa/test/sofa_0763_normalised\nsofa/test/sofa_0731_normalised\nsofa/test/sofa_0739_normalised\nsofa/test/sofa_0707_normalised\nsofa/test/sofa_0765_normalised\nsofa/test/sofa_0753_normalised\nsofa/test/sofa_0717_normalised\nsofa/test/sofa_0743_normalised\nsofa/test/sofa_0766_normalised\nsofa/test/sofa_0729_normalised\nsofa/test/sofa_0706_normalised\nsofa/test/sofa_0752_normalised\nsofa/test/sofa_0687_normalised\nsofa/test/sofa_0724_normalised\nsofa/test/sofa_0700_normalised\nsofa/test/sofa_0685_normalised\nsofa/test/sofa_0708_normalised\nsofa/test/sofa_0737_normalised\nwardrobe/train/wardrobe_0074_normalised\nwardrobe/train/wardrobe_0086_normalised\nwardrobe/train/wardrobe_0012_normalised\nwardrobe/train/wardrobe_0010_normalised\nwardrobe/train/wardrobe_0015_normalised\nwardrobe/train/wardrobe_0077_normalised\nwardrobe/train/wardrobe_0085_normalised\nwardrobe/train/wardrobe_0014_normalised\nwardrobe/train/wardrobe_0019_normalised\nwardrobe/train/wardrobe_0024_normalised\nwardrobe/train/wardrobe_0007_normalised\nwardrobe/train/wardrobe_0061_normalised\nwardrobe/train/wardrobe_0072_normalised\nwardrobe/train/wardrobe_0023_normalised\nwardrobe/train/wardrobe_0049_normalised\nwardrobe/train/wardrobe_0037_normalised\nwardrobe/train/wardrobe_0082_normalised\nwardrobe/train/wardrobe_0033_normalised\nwardrobe/train/wardrobe_0016_normalised\nwardrobe/train/wardrobe_0047_normalised\nwardrobe/train/wardrobe_0050_normalised\nwardrobe/train/wardrobe_0028_normalised\nwardrobe/train/wardrobe_0025_normalised\nwardrobe/train/wardrobe_0038_normalised\nwardrobe/train/wardrobe_0079_normalised\nwardrobe/train/wardrobe_0066_normalised\nwardrobe/train/wardrobe_0045_normalised\nwardrobe/train/wardrobe_0070_normalised\nwardrobe/train/wardrobe_0055_normalised\nwardrobe/train/wardrobe_0043_normalised\nwardrobe/train/wardrobe_0059_normalised\nwardrobe/train/wardrobe_0075_normalised\nwardrobe/train/wardrobe_0040_normalised\nwardrobe/train/wardrobe_0060_normalised\nwardrobe/train/wardrobe_0056_normalised\nwardrobe/train/wardrobe_0002_normalised\nwardrobe/train/wardrobe_0036_normalised\nwardrobe/train/wardrobe_0084_normalised\nwardrobe/train/wardrobe_0022_normalised\nwardrobe/train/wardrobe_0062_normalised\nwardrobe/train/wardrobe_0065_normalised\nwardrobe/train/wardrobe_0001_normalised\nwardrobe/train/wardrobe_0013_normalised\nwardrobe/train/wardrobe_0020_normalised\nwardrobe/train/wardrobe_0021_normalised\nwardrobe/train/wardrobe_0034_normalised\nwardrobe/train/wardrobe_0017_normalised\nwardrobe/train/wardrobe_0067_normalised\nwardrobe/train/wardrobe_0026_normalised\nwardrobe/train/wardrobe_0004_normalised\nwardrobe/train/wardrobe_0054_normalised\nwardrobe/train/wardrobe_0032_normalised\nwardrobe/train/wardrobe_0081_normalised\nwardrobe/train/wardrobe_0027_normalised\nwardrobe/train/wardrobe_0078_normalised\nwardrobe/train/wardrobe_0058_normalised\nwardrobe/train/wardrobe_0057_normalised\nwardrobe/train/wardrobe_0046_normalised\nwardrobe/train/wardrobe_0052_normalised\nwardrobe/train/wardrobe_0005_normalised\nwardrobe/train/wardrobe_0029_normalised\nwardrobe/train/wardrobe_0053_normalised\nwardrobe/train/wardrobe_0018_normalised\nwardrobe/train/wardrobe_0006_normalised\nwardrobe/train/wardrobe_0064_normalised\nwardrobe/train/wardrobe_0030_normalised\nwardrobe/train/wardrobe_0051_normalised\nwardrobe/train/wardrobe_0073_normalised\nwardrobe/train/wardrobe_0009_normalised\nwardrobe/train/wardrobe_0003_normalised\nwardrobe/train/wardrobe_0048_normalised\nwardrobe/train/wardrobe_0044_normalised\nwardrobe/train/wardrobe_0071_normalised\nwardrobe/train/wardrobe_0031_normalised\nwardrobe/train/wardrobe_0042_normalised\nwardrobe/train/wardrobe_0035_normalised\nwardrobe/train/wardrobe_0011_normalised\nwardrobe/train/wardrobe_0080_normalised\nwardrobe/train/wardrobe_0087_normalised\nwardrobe/train/wardrobe_0039_normalised\nwardrobe/train/wardrobe_0068_normalised\nwardrobe/train/wardrobe_0041_normalised\nwardrobe/train/wardrobe_0083_normalised\nwardrobe/train/wardrobe_0008_normalised\nwardrobe/train/wardrobe_0076_normalised\nwardrobe/train/wardrobe_0069_normalised\nwardrobe/train/wardrobe_0063_normalised\nwardrobe/test/wardrobe_0091_normalised\nwardrobe/test/wardrobe_0094_normalised\nwardrobe/test/wardrobe_0099_normalised\nwardrobe/test/wardrobe_0101_normalised\nwardrobe/test/wardrobe_0096_normalised\nwardrobe/test/wardrobe_0100_normalised\nwardrobe/test/wardrobe_0104_normalised\nwardrobe/test/wardrobe_0092_normalised\nwardrobe/test/wardrobe_0090_normalised\nwardrobe/test/wardrobe_0093_normalised\nwardrobe/test/wardrobe_0097_normalised\nwardrobe/test/wardrobe_0098_normalised\nwardrobe/test/wardrobe_0088_normalised\nwardrobe/test/wardrobe_0105_normalised\nwardrobe/test/wardrobe_0095_normalised\nwardrobe/test/wardrobe_0103_normalised\nwardrobe/test/wardrobe_0106_normalised\nwardrobe/test/wardrobe_0089_normalised\nwardrobe/test/wardrobe_0107_normalised\nwardrobe/test/wardrobe_0102_normalised\nradio/train/radio_0022_normalised\nradio/train/radio_0072_normalised\nradio/train/radio_0042_normalised\nradio/train/radio_0060_normalised\nradio/train/radio_0078_normalised\nradio/train/radio_0047_normalised\nradio/train/radio_0013_normalised\nradio/train/radio_0057_normalised\nradio/train/radio_0053_normalised\nradio/train/radio_0011_normalised\nradio/train/radio_0069_normalised\nradio/train/radio_0050_normalised\nradio/train/radio_0081_normalised\nradio/train/radio_0071_normalised\nradio/train/radio_0064_normalised\nradio/train/radio_0087_normalised\nradio/train/radio_0076_normalised\nradio/train/radio_0009_normalised\nradio/train/radio_0018_normalised\nradio/train/radio_0096_normalised\nradio/train/radio_0049_normalised\nradio/train/radio_0093_normalised\nradio/train/radio_0017_normalised\nradio/train/radio_0027_normalised\nradio/train/radio_0070_normalised\nradio/train/radio_0051_normalised\nradio/train/radio_0065_normalised\nradio/train/radio_0073_normalised\nradio/train/radio_0041_normalised\nradio/train/radio_0068_normalised\nradio/train/radio_0037_normalised\nradio/train/radio_0010_normalised\nradio/train/radio_0089_normalised\nradio/train/radio_0092_normalised\nradio/train/radio_0094_normalised\nradio/train/radio_0025_normalised\nradio/train/radio_0036_normalised\nradio/train/radio_0062_normalised\nradio/train/radio_0035_normalised\nradio/train/radio_0032_normalised\nradio/train/radio_0012_normalised\nradio/train/radio_0067_normalised\nradio/train/radio_0052_normalised\nradio/train/radio_0014_normalised\nradio/train/radio_0034_normalised\nradio/train/radio_0088_normalised\nradio/train/radio_0048_normalised\nradio/train/radio_0005_normalised\nradio/train/radio_0100_normalised\nradio/train/radio_0055_normalised\nradio/train/radio_0075_normalised\nradio/train/radio_0084_normalised\nradio/train/radio_0097_normalised\nradio/train/radio_0061_normalised\nradio/train/radio_0024_normalised\nradio/train/radio_0045_normalised\nradio/train/radio_0031_normalised\nradio/train/radio_0002_normalised\nradio/train/radio_0038_normalised\nradio/train/radio_0020_normalised\nradio/train/radio_0079_normalised\nradio/train/radio_0026_normalised\nradio/train/radio_0101_normalised\nradio/train/radio_0040_normalised\nradio/train/radio_0044_normalised\nradio/train/radio_0015_normalised\nradio/train/radio_0063_normalised\nradio/train/radio_0016_normalised\nradio/train/radio_0023_normalised\nradio/train/radio_0104_normalised\nradio/train/radio_0029_normalised\nradio/train/radio_0056_normalised\nradio/train/radio_0030_normalised\nradio/train/radio_0043_normalised\nradio/train/radio_0028_normalised\nradio/train/radio_0006_normalised\nradio/train/radio_0082_normalised\nradio/train/radio_0086_normalised\nradio/train/radio_0008_normalised\nradio/train/radio_0091_normalised\nradio/train/radio_0095_normalised\nradio/train/radio_0083_normalised\nradio/train/radio_0004_normalised\nradio/train/radio_0001_normalised\nradio/train/radio_0066_normalised\nradio/train/radio_0102_normalised\nradio/train/radio_0033_normalised\nradio/train/radio_0080_normalised\nradio/train/radio_0090_normalised\nradio/train/radio_0021_normalised\nradio/train/radio_0074_normalised\nradio/train/radio_0007_normalised\nradio/train/radio_0103_normalised\nradio/train/radio_0077_normalised\nradio/train/radio_0099_normalised\nradio/train/radio_0085_normalised\nradio/train/radio_0098_normalised\nradio/train/radio_0058_normalised\nradio/train/radio_0039_normalised\nradio/train/radio_0019_normalised\nradio/train/radio_0059_normalised\nradio/train/radio_0054_normalised\nradio/train/radio_0046_normalised\nradio/train/radio_0003_normalised\nradio/test/radio_0107_normalised\nradio/test/radio_0105_normalised\nradio/test/radio_0122_normalised\nradio/test/radio_0118_normalised\nradio/test/radio_0120_normalised\nradio/test/radio_0115_normalised\nradio/test/radio_0110_normalised\nradio/test/radio_0111_normalised\nradio/test/radio_0112_normalised\nradio/test/radio_0106_normalised\nradio/test/radio_0117_normalised\nradio/test/radio_0119_normalised\nradio/test/radio_0116_normalised\nradio/test/radio_0113_normalised\nradio/test/radio_0124_normalised\nradio/test/radio_0121_normalised\nradio/test/radio_0109_normalised\nradio/test/radio_0123_normalised\nradio/test/radio_0114_normalised\nradio/test/radio_0108_normalised\ndesk/train/desk_0186_normalised\ndesk/train/desk_0118_normalised\ndesk/train/desk_0033_normalised\ndesk/train/desk_0150_normalised\ndesk/train/desk_0134_normalised\ndesk/train/desk_0108_normalised\ndesk/train/desk_0054_normalised\ndesk/train/desk_0135_normalised\ndesk/train/desk_0085_normalised\ndesk/train/desk_0195_normalised\ndesk/train/desk_0055_normalised\ndesk/train/desk_0151_normalised\ndesk/train/desk_0167_normalised\ndesk/train/desk_0194_normalised\ndesk/train/desk_0073_normalised\ndesk/train/desk_0072_normalised\ndesk/train/desk_0007_normalised\ndesk/train/desk_0193_normalised\ndesk/train/desk_0173_normalised\ndesk/train/desk_0096_normalised\ndesk/train/desk_0030_normalised\ndesk/train/desk_0017_normalised\ndesk/train/desk_0112_normalised\ndesk/train/desk_0076_normalised\ndesk/train/desk_0180_normalised\ndesk/train/desk_0149_normalised\ndesk/train/desk_0025_normalised\ndesk/train/desk_0058_normalised\ndesk/train/desk_0046_normalised\ndesk/train/desk_0075_normalised\ndesk/train/desk_0120_normalised\ndesk/train/desk_0015_normalised\ndesk/train/desk_0115_normalised\ndesk/train/desk_0061_normalised\ndesk/train/desk_0140_normalised\ndesk/train/desk_0021_normalised\ndesk/train/desk_0121_normalised\ndesk/train/desk_0141_normalised\ndesk/train/desk_0164_normalised\ndesk/train/desk_0091_normalised\ndesk/train/desk_0142_normalised\ndesk/train/desk_0041_normalised\ndesk/train/desk_0093_normalised\ndesk/train/desk_0089_normalised\ndesk/train/desk_0138_normalised\ndesk/train/desk_0044_normalised\ndesk/train/desk_0132_normalised\ndesk/train/desk_0047_normalised\ndesk/train/desk_0152_normalised\ndesk/train/desk_0080_normalised\ndesk/train/desk_0040_normalised\ndesk/train/desk_0029_normalised\ndesk/train/desk_0181_normalised\ndesk/train/desk_0156_normalised\ndesk/train/desk_0070_normalised\ndesk/train/desk_0069_normalised\ndesk/train/desk_0155_normalised\ndesk/train/desk_0063_normalised\ndesk/train/desk_0146_normalised\ndesk/train/desk_0147_normalised\ndesk/train/desk_0107_normalised\ndesk/train/desk_0159_normalised\ndesk/train/desk_0060_normalised\ndesk/train/desk_0162_normalised\ndesk/train/desk_0187_normalised\ndesk/train/desk_0067_normalised\ndesk/train/desk_0011_normalised\ndesk/train/desk_0090_normalised\ndesk/train/desk_0006_normalised\ndesk/train/desk_0013_normalised\ndesk/train/desk_0008_normalised\ndesk/train/desk_0144_normalised\ndesk/train/desk_0192_normalised\ndesk/train/desk_0074_normalised\ndesk/train/desk_0104_normalised\ndesk/train/desk_0010_normalised\ndesk/train/desk_0016_normalised\ndesk/train/desk_0071_normalised\ndesk/train/desk_0053_normalised\ndesk/train/desk_0145_normalised\ndesk/train/desk_0003_normalised\ndesk/train/desk_0028_normalised\ndesk/train/desk_0139_normalised\ndesk/train/desk_0137_normalised\ndesk/train/desk_0117_normalised\ndesk/train/desk_0099_normalised\ndesk/train/desk_0189_normalised\ndesk/train/desk_0034_normalised\ndesk/train/desk_0157_normalised\ndesk/train/desk_0051_normalised\ndesk/train/desk_0166_normalised\ndesk/train/desk_0042_normalised\ndesk/train/desk_0065_normalised\ndesk/train/desk_0116_normalised\ndesk/train/desk_0178_normalised\ndesk/train/desk_0057_normalised\ndesk/train/desk_0123_normalised\ndesk/train/desk_0056_normalised\ndesk/train/desk_0004_normalised\ndesk/train/desk_0161_normalised\ndesk/train/desk_0185_normalised\ndesk/train/desk_0168_normalised\ndesk/train/desk_0066_normalised\ndesk/train/desk_0174_normalised\ndesk/train/desk_0172_normalised\ndesk/train/desk_0027_normalised\ndesk/train/desk_0111_normalised\ndesk/train/desk_0095_normalised\ndesk/train/desk_0110_normalised\ndesk/train/desk_0024_normalised\ndesk/train/desk_0086_normalised\ndesk/train/desk_0133_normalised\ndesk/train/desk_0188_normalised\ndesk/train/desk_0052_normalised\ndesk/train/desk_0154_normalised\ndesk/train/desk_0128_normalised\ndesk/train/desk_0098_normalised\ndesk/train/desk_0169_normalised\ndesk/train/desk_0196_normalised\ndesk/train/desk_0002_normalised\ndesk/train/desk_0106_normalised\ndesk/train/desk_0198_normalised\ndesk/train/desk_0023_normalised\ndesk/train/desk_0020_normalised\ndesk/train/desk_0005_normalised\ndesk/train/desk_0026_normalised\ndesk/train/desk_0114_normalised\ndesk/train/desk_0190_normalised\ndesk/train/desk_0082_normalised\ndesk/train/desk_0163_normalised\ndesk/train/desk_0200_normalised\ndesk/train/desk_0126_normalised\ndesk/train/desk_0177_normalised\ndesk/train/desk_0009_normalised\ndesk/train/desk_0045_normalised\ndesk/train/desk_0038_normalised\ndesk/train/desk_0092_normalised\ndesk/train/desk_0131_normalised\ndesk/train/desk_0001_normalised\ndesk/train/desk_0083_normalised\ndesk/train/desk_0059_normalised\ndesk/train/desk_0102_normalised\ndesk/train/desk_0100_normalised\ndesk/train/desk_0018_normalised\ndesk/train/desk_0014_normalised\ndesk/train/desk_0032_normalised\ndesk/train/desk_0165_normalised\ndesk/train/desk_0048_normalised\ndesk/train/desk_0022_normalised\ndesk/train/desk_0077_normalised\ndesk/train/desk_0068_normalised\ndesk/train/desk_0122_normalised\ndesk/train/desk_0019_normalised\ndesk/train/desk_0160_normalised\ndesk/train/desk_0136_normalised\ndesk/train/desk_0050_normalised\ndesk/train/desk_0153_normalised\ndesk/train/desk_0191_normalised\ndesk/train/desk_0143_normalised\ndesk/train/desk_0094_normalised\ndesk/train/desk_0199_normalised\ndesk/train/desk_0012_normalised\ndesk/train/desk_0097_normalised\ndesk/train/desk_0037_normalised\ndesk/train/desk_0062_normalised\ndesk/train/desk_0039_normalised\ndesk/train/desk_0158_normalised\ndesk/train/desk_0049_normalised\ndesk/train/desk_0130_normalised\ndesk/train/desk_0182_normalised\ndesk/train/desk_0064_normalised\ndesk/train/desk_0125_normalised\ndesk/train/desk_0129_normalised\ndesk/train/desk_0170_normalised\ndesk/train/desk_0084_normalised\ndesk/train/desk_0109_normalised\ndesk/train/desk_0148_normalised\ndesk/train/desk_0079_normalised\ndesk/train/desk_0124_normalised\ndesk/train/desk_0043_normalised\ndesk/train/desk_0197_normalised\ndesk/train/desk_0087_normalised\ndesk/train/desk_0088_normalised\ndesk/train/desk_0031_normalised\ndesk/train/desk_0081_normalised\ndesk/train/desk_0113_normalised\ndesk/train/desk_0103_normalised\ndesk/train/desk_0127_normalised\ndesk/train/desk_0036_normalised\ndesk/train/desk_0176_normalised\ndesk/train/desk_0175_normalised\ndesk/train/desk_0101_normalised\ndesk/train/desk_0171_normalised\ndesk/train/desk_0119_normalised\ndesk/train/desk_0105_normalised\ndesk/train/desk_0035_normalised\ndesk/train/desk_0179_normalised\ndesk/train/desk_0078_normalised\ndesk/train/desk_0184_normalised\ndesk/train/desk_0183_normalised\ndesk/test/desk_0284_normalised\ndesk/test/desk_0207_normalised\ndesk/test/desk_0206_normalised\ndesk/test/desk_0225_normalised\ndesk/test/desk_0232_normalised\ndesk/test/desk_0233_normalised\ndesk/test/desk_0230_normalised\ndesk/test/desk_0286_normalised\ndesk/test/desk_0215_normalised\ndesk/test/desk_0266_normalised\ndesk/test/desk_0265_normalised\ndesk/test/desk_0223_normalised\ndesk/test/desk_0231_normalised\ndesk/test/desk_0242_normalised\ndesk/test/desk_0262_normalised\ndesk/test/desk_0282_normalised\ndesk/test/desk_0210_normalised\ndesk/test/desk_0213_normalised\ndesk/test/desk_0261_normalised\ndesk/test/desk_0280_normalised\ndesk/test/desk_0276_normalised\ndesk/test/desk_0243_normalised\ndesk/test/desk_0221_normalised\ndesk/test/desk_0235_normalised\ndesk/test/desk_0249_normalised\ndesk/test/desk_0205_normalised\ndesk/test/desk_0267_normalised\ndesk/test/desk_0256_normalised\ndesk/test/desk_0255_normalised\ndesk/test/desk_0278_normalised\ndesk/test/desk_0229_normalised\ndesk/test/desk_0245_normalised\ndesk/test/desk_0250_normalised\ndesk/test/desk_0239_normalised\ndesk/test/desk_0263_normalised\ndesk/test/desk_0244_normalised\ndesk/test/desk_0271_normalised\ndesk/test/desk_0264_normalised\ndesk/test/desk_0257_normalised\ndesk/test/desk_0202_normalised\ndesk/test/desk_0279_normalised\ndesk/test/desk_0252_normalised\ndesk/test/desk_0238_normalised\ndesk/test/desk_0220_normalised\ndesk/test/desk_0237_normalised\ndesk/test/desk_0277_normalised\ndesk/test/desk_0224_normalised\ndesk/test/desk_0227_normalised\ndesk/test/desk_0272_normalised\ndesk/test/desk_0240_normalised\ndesk/test/desk_0234_normalised\ndesk/test/desk_0273_normalised\ndesk/test/desk_0269_normalised\ndesk/test/desk_0254_normalised\ndesk/test/desk_0283_normalised\ndesk/test/desk_0260_normalised\ndesk/test/desk_0217_normalised\ndesk/test/desk_0209_normalised\ndesk/test/desk_0241_normalised\ndesk/test/desk_0204_normalised\ndesk/test/desk_0247_normalised\ndesk/test/desk_0222_normalised\ndesk/test/desk_0253_normalised\ndesk/test/desk_0219_normalised\ndesk/test/desk_0251_normalised\ndesk/test/desk_0208_normalised\ndesk/test/desk_0248_normalised\ndesk/test/desk_0281_normalised\ndesk/test/desk_0285_normalised\ndesk/test/desk_0246_normalised\ndesk/test/desk_0228_normalised\ndesk/test/desk_0270_normalised\ndesk/test/desk_0216_normalised\ndesk/test/desk_0226_normalised\ndesk/test/desk_0211_normalised\ndesk/test/desk_0259_normalised\ndesk/test/desk_0236_normalised\ndesk/test/desk_0258_normalised\ndesk/test/desk_0275_normalised\ndesk/test/desk_0212_normalised\ndesk/test/desk_0201_normalised\ndesk/test/desk_0268_normalised\ndesk/test/desk_0218_normalised\ndesk/test/desk_0203_normalised\ndesk/test/desk_0274_normalised\ndesk/test/desk_0214_normalised\nbench/train/bench_0063_normalised\nbench/train/bench_0144_normalised\nbench/train/bench_0019_normalised\nbench/train/bench_0105_normalised\nbench/train/bench_0168_normalised\nbench/train/bench_0031_normalised\nbench/train/bench_0150_normalised\nbench/train/bench_0037_normalised\nbench/train/bench_0104_normalised\nbench/train/bench_0092_normalised\nbench/train/bench_0064_normalised\nbench/train/bench_0161_normalised\nbench/train/bench_0079_normalised\nbench/train/bench_0044_normalised\nbench/train/bench_0159_normalised\nbench/train/bench_0007_normalised\nbench/train/bench_0084_normalised\nbench/train/bench_0162_normalised\nbench/train/bench_0123_normalised\nbench/train/bench_0001_normalised\nbench/train/bench_0032_normalised\nbench/train/bench_0076_normalised\nbench/train/bench_0061_normalised\nbench/train/bench_0110_normalised\nbench/train/bench_0028_normalised\nbench/train/bench_0027_normalised\nbench/train/bench_0067_normalised\nbench/train/bench_0117_normalised\nbench/train/bench_0033_normalised\nbench/train/bench_0109_normalised\nbench/train/bench_0060_normalised\nbench/train/bench_0021_normalised\nbench/train/bench_0081_normalised\nbench/train/bench_0003_normalised\nbench/train/bench_0170_normalised\nbench/train/bench_0121_normalised\nbench/train/bench_0077_normalised\nbench/train/bench_0053_normalised\nbench/train/bench_0035_normalised\nbench/train/bench_0038_normalised\nbench/train/bench_0062_normalised\nbench/train/bench_0074_normalised\nbench/train/bench_0142_normalised\nbench/train/bench_0139_normalised\nbench/train/bench_0154_normalised\nbench/train/bench_0112_normalised\nbench/train/bench_0138_normalised\nbench/train/bench_0041_normalised\nbench/train/bench_0029_normalised\nbench/train/bench_0127_normalised\nbench/train/bench_0120_normalised\nbench/train/bench_0030_normalised\nbench/train/bench_0080_normalised\nbench/train/bench_0111_normalised\nbench/train/bench_0141_normalised\nbench/train/bench_0039_normalised\nbench/train/bench_0075_normalised\nbench/train/bench_0164_normalised\nbench/train/bench_0069_normalised\nbench/train/bench_0088_normalised\nbench/train/bench_0136_normalised\nbench/train/bench_0086_normalised\nbench/train/bench_0051_normalised\nbench/train/bench_0114_normalised\nbench/train/bench_0129_normalised\nbench/train/bench_0113_normalised\nbench/train/bench_0101_normalised\nbench/train/bench_0010_normalised\nbench/train/bench_0128_normalised\nbench/train/bench_0055_normalised\nbench/train/bench_0025_normalised\nbench/train/bench_0135_normalised\nbench/train/bench_0071_normalised\nbench/train/bench_0146_normalised\nbench/train/bench_0002_normalised\nbench/train/bench_0026_normalised\nbench/train/bench_0005_normalised\nbench/train/bench_0068_normalised\nbench/train/bench_0169_normalised\nbench/train/bench_0148_normalised\nbench/train/bench_0022_normalised\nbench/train/bench_0059_normalised\nbench/train/bench_0158_normalised\nbench/train/bench_0049_normalised\nbench/train/bench_0100_normalised\nbench/train/bench_0057_normalised\nbench/train/bench_0134_normalised\nbench/train/bench_0004_normalised\nbench/train/bench_0133_normalised\nbench/train/bench_0153_normalised\nbench/train/bench_0118_normalised\nbench/train/bench_0045_normalised\nbench/train/bench_0165_normalised\nbench/train/bench_0006_normalised\nbench/train/bench_0107_normalised\nbench/train/bench_0125_normalised\nbench/train/bench_0155_normalised\nbench/train/bench_0151_normalised\nbench/train/bench_0008_normalised\nbench/train/bench_0157_normalised\nbench/train/bench_0073_normalised\nbench/train/bench_0012_normalised\nbench/train/bench_0172_normalised\nbench/train/bench_0013_normalised\nbench/train/bench_0137_normalised\nbench/train/bench_0108_normalised\nbench/train/bench_0023_normalised\nbench/train/bench_0095_normalised\nbench/train/bench_0072_normalised\nbench/train/bench_0089_normalised\nbench/train/bench_0091_normalised\nbench/train/bench_0046_normalised\nbench/train/bench_0147_normalised\nbench/train/bench_0098_normalised\nbench/train/bench_0016_normalised\nbench/train/bench_0011_normalised\nbench/train/bench_0152_normalised\nbench/train/bench_0160_normalised\nbench/train/bench_0103_normalised\nbench/train/bench_0082_normalised\nbench/train/bench_0171_normalised\nbench/train/bench_0042_normalised\nbench/train/bench_0099_normalised\nbench/train/bench_0078_normalised\nbench/train/bench_0050_normalised\nbench/train/bench_0173_normalised\nbench/train/bench_0102_normalised\nbench/train/bench_0052_normalised\nbench/train/bench_0167_normalised\nbench/train/bench_0131_normalised\nbench/train/bench_0149_normalised\nbench/train/bench_0070_normalised\nbench/train/bench_0119_normalised\nbench/train/bench_0058_normalised\nbench/train/bench_0126_normalised\nbench/train/bench_0017_normalised\nbench/train/bench_0036_normalised\nbench/train/bench_0093_normalised\nbench/train/bench_0065_normalised\nbench/train/bench_0143_normalised\nbench/train/bench_0106_normalised\nbench/train/bench_0048_normalised\nbench/train/bench_0020_normalised\nbench/train/bench_0115_normalised\nbench/train/bench_0015_normalised\nbench/train/bench_0122_normalised\nbench/train/bench_0094_normalised\nbench/train/bench_0083_normalised\nbench/train/bench_0116_normalised\nbench/train/bench_0132_normalised\nbench/train/bench_0040_normalised\nbench/train/bench_0054_normalised\nbench/train/bench_0163_normalised\nbench/train/bench_0018_normalised\nbench/train/bench_0047_normalised\nbench/train/bench_0066_normalised\nbench/train/bench_0156_normalised\nbench/train/bench_0043_normalised\nbench/train/bench_0056_normalised\nbench/train/bench_0090_normalised\nbench/train/bench_0087_normalised\nbench/train/bench_0085_normalised\nbench/train/bench_0024_normalised\nbench/train/bench_0009_normalised\nbench/train/bench_0124_normalised\nbench/train/bench_0014_normalised\nbench/train/bench_0097_normalised\nbench/train/bench_0096_normalised\nbench/train/bench_0166_normalised\nbench/train/bench_0034_normalised\nbench/train/bench_0140_normalised\nbench/train/bench_0130_normalised\nbench/train/bench_0145_normalised\nbench/test/bench_0185_normalised\nbench/test/bench_0187_normalised\nbench/test/bench_0188_normalised\nbench/test/bench_0175_normalised\nbench/test/bench_0191_normalised\nbench/test/bench_0177_normalised\nbench/test/bench_0189_normalised\nbench/test/bench_0193_normalised\nbench/test/bench_0184_normalised\nbench/test/bench_0181_normalised\nbench/test/bench_0178_normalised\nbench/test/bench_0183_normalised\nbench/test/bench_0192_normalised\nbench/test/bench_0182_normalised\nbench/test/bench_0186_normalised\nbench/test/bench_0174_normalised\nbench/test/bench_0176_normalised\nbench/test/bench_0180_normalised\nbench/test/bench_0190_normalised\nbench/test/bench_0179_normalised\nglass_box/train/glass_box_0102_normalised\nglass_box/train/glass_box_0067_normalised\nglass_box/train/glass_box_0097_normalised\nglass_box/train/glass_box_0164_normalised\nglass_box/train/glass_box_0129_normalised\nglass_box/train/glass_box_0028_normalised\nglass_box/train/glass_box_0146_normalised\nglass_box/train/glass_box_0014_normalised\nglass_box/train/glass_box_0095_normalised\nglass_box/train/glass_box_0110_normalised\nglass_box/train/glass_box_0012_normalised\nglass_box/train/glass_box_0112_normalised\nglass_box/train/glass_box_0064_normalised\nglass_box/train/glass_box_0143_normalised\nglass_box/train/glass_box_0073_normalised\nglass_box/train/glass_box_0091_normalised\nglass_box/train/glass_box_0034_normalised\nglass_box/train/glass_box_0043_normalised\nglass_box/train/glass_box_0153_normalised\nglass_box/train/glass_box_0145_normalised\nglass_box/train/glass_box_0054_normalised\nglass_box/train/glass_box_0128_normalised\nglass_box/train/glass_box_0055_normalised\nglass_box/train/glass_box_0010_normalised\nglass_box/train/glass_box_0045_normalised\nglass_box/train/glass_box_0025_normalised\nglass_box/train/glass_box_0044_normalised\nglass_box/train/glass_box_0060_normalised\nglass_box/train/glass_box_0160_normalised\nglass_box/train/glass_box_0011_normalised\nglass_box/train/glass_box_0133_normalised\nglass_box/train/glass_box_0068_normalised\nglass_box/train/glass_box_0047_normalised\nglass_box/train/glass_box_0116_normalised\nglass_box/train/glass_box_0070_normalised\nglass_box/train/glass_box_0057_normalised\nglass_box/train/glass_box_0168_normalised\nglass_box/train/glass_box_0032_normalised\nglass_box/train/glass_box_0078_normalised\nglass_box/train/glass_box_0001_normalised\nglass_box/train/glass_box_0086_normalised\nglass_box/train/glass_box_0120_normalised\nglass_box/train/glass_box_0131_normalised\nglass_box/train/glass_box_0138_normalised\nglass_box/train/glass_box_0111_normalised\nglass_box/train/glass_box_0050_normalised\nglass_box/train/glass_box_0104_normalised\nglass_box/train/glass_box_0135_normalised\nglass_box/train/glass_box_0088_normalised\nglass_box/train/glass_box_0109_normalised\nglass_box/train/glass_box_0121_normalised\nglass_box/train/glass_box_0106_normalised\nglass_box/train/glass_box_0094_normalised\nglass_box/train/glass_box_0136_normalised\nglass_box/train/glass_box_0051_normalised\nglass_box/train/glass_box_0006_normalised\nglass_box/train/glass_box_0130_normalised\nglass_box/train/glass_box_0027_normalised\nglass_box/train/glass_box_0161_normalised\nglass_box/train/glass_box_0148_normalised\nglass_box/train/glass_box_0018_normalised\nglass_box/train/glass_box_0020_normalised\nglass_box/train/glass_box_0141_normalised\nglass_box/train/glass_box_0167_normalised\nglass_box/train/glass_box_0035_normalised\nglass_box/train/glass_box_0132_normalised\nglass_box/train/glass_box_0118_normalised\nglass_box/train/glass_box_0125_normalised\nglass_box/train/glass_box_0056_normalised\nglass_box/train/glass_box_0037_normalised\nglass_box/train/glass_box_0165_normalised\nglass_box/train/glass_box_0009_normalised\nglass_box/train/glass_box_0147_normalised\nglass_box/train/glass_box_0126_normalised\nglass_box/train/glass_box_0123_normalised\nglass_box/train/glass_box_0149_normalised\nglass_box/train/glass_box_0085_normalised\nglass_box/train/glass_box_0092_normalised\nglass_box/train/glass_box_0157_normalised\nglass_box/train/glass_box_0099_normalised\nglass_box/train/glass_box_0005_normalised\nglass_box/train/glass_box_0049_normalised\nglass_box/train/glass_box_0140_normalised\nglass_box/train/glass_box_0053_normalised\nglass_box/train/glass_box_0019_normalised\nglass_box/train/glass_box_0156_normalised\nglass_box/train/glass_box_0058_normalised\nglass_box/train/glass_box_0062_normalised\nglass_box/train/glass_box_0013_normalised\nglass_box/train/glass_box_0087_normalised\nglass_box/train/glass_box_0038_normalised\nglass_box/train/glass_box_0096_normalised\nglass_box/train/glass_box_0040_normalised\nglass_box/train/glass_box_0101_normalised\nglass_box/train/glass_box_0036_normalised\nglass_box/train/glass_box_0021_normalised\nglass_box/train/glass_box_0003_normalised\nglass_box/train/glass_box_0100_normalised\nglass_box/train/glass_box_0155_normalised\nglass_box/train/glass_box_0124_normalised\nglass_box/train/glass_box_0079_normalised\nglass_box/train/glass_box_0139_normalised\nglass_box/train/glass_box_0127_normalised\nglass_box/train/glass_box_0016_normalised\nglass_box/train/glass_box_0090_normalised\nglass_box/train/glass_box_0084_normalised\nglass_box/train/glass_box_0075_normalised\nglass_box/train/glass_box_0115_normalised\nglass_box/train/glass_box_0082_normalised\nglass_box/train/glass_box_0048_normalised\nglass_box/train/glass_box_0076_normalised\nglass_box/train/glass_box_0066_normalised\nglass_box/train/glass_box_0170_normalised\nglass_box/train/glass_box_0105_normalised\nglass_box/train/glass_box_0113_normalised\nglass_box/train/glass_box_0029_normalised\nglass_box/train/glass_box_0030_normalised\nglass_box/train/glass_box_0142_normalised\nglass_box/train/glass_box_0083_normalised\nglass_box/train/glass_box_0004_normalised\nglass_box/train/glass_box_0144_normalised\nglass_box/train/glass_box_0152_normalised\nglass_box/train/glass_box_0081_normalised\nglass_box/train/glass_box_0151_normalised\nglass_box/train/glass_box_0169_normalised\nglass_box/train/glass_box_0046_normalised\nglass_box/train/glass_box_0089_normalised\nglass_box/train/glass_box_0134_normalised\nglass_box/train/glass_box_0069_normalised\nglass_box/train/glass_box_0158_normalised\nglass_box/train/glass_box_0166_normalised\nglass_box/train/glass_box_0002_normalised\nglass_box/train/glass_box_0162_normalised\nglass_box/train/glass_box_0119_normalised\nglass_box/train/glass_box_0017_normalised\nglass_box/train/glass_box_0098_normalised\nglass_box/train/glass_box_0072_normalised\nglass_box/train/glass_box_0077_normalised\nglass_box/train/glass_box_0026_normalised\nglass_box/train/glass_box_0071_normalised\nglass_box/train/glass_box_0107_normalised\nglass_box/train/glass_box_0042_normalised\nglass_box/train/glass_box_0015_normalised\nglass_box/train/glass_box_0039_normalised\nglass_box/train/glass_box_0052_normalised\nglass_box/train/glass_box_0093_normalised\nglass_box/train/glass_box_0033_normalised\nglass_box/train/glass_box_0063_normalised\nglass_box/train/glass_box_0150_normalised\nglass_box/train/glass_box_0074_normalised\nglass_box/train/glass_box_0171_normalised\nglass_box/train/glass_box_0024_normalised\nglass_box/train/glass_box_0041_normalised\nglass_box/train/glass_box_0103_normalised\nglass_box/train/glass_box_0007_normalised\nglass_box/train/glass_box_0114_normalised\nglass_box/train/glass_box_0108_normalised\nglass_box/train/glass_box_0122_normalised\nglass_box/train/glass_box_0022_normalised\nglass_box/train/glass_box_0008_normalised\nglass_box/train/glass_box_0059_normalised\nglass_box/train/glass_box_0154_normalised\nglass_box/train/glass_box_0080_normalised\nglass_box/train/glass_box_0137_normalised\nglass_box/train/glass_box_0159_normalised\nglass_box/train/glass_box_0117_normalised\nglass_box/train/glass_box_0065_normalised\nglass_box/train/glass_box_0061_normalised\nglass_box/train/glass_box_0163_normalised\nglass_box/train/glass_box_0031_normalised\nglass_box/train/glass_box_0023_normalised\nglass_box/test/glass_box_0248_normalised\nglass_box/test/glass_box_0194_normalised\nglass_box/test/glass_box_0236_normalised\nglass_box/test/glass_box_0234_normalised\nglass_box/test/glass_box_0249_normalised\nglass_box/test/glass_box_0178_normalised\nglass_box/test/glass_box_0244_normalised\nglass_box/test/glass_box_0243_normalised\nglass_box/test/glass_box_0192_normalised\nglass_box/test/glass_box_0200_normalised\nglass_box/test/glass_box_0263_normalised\nglass_box/test/glass_box_0267_normalised\nglass_box/test/glass_box_0270_normalised\nglass_box/test/glass_box_0221_normalised\nglass_box/test/glass_box_0257_normalised\nglass_box/test/glass_box_0226_normalised\nglass_box/test/glass_box_0176_normalised\nglass_box/test/glass_box_0182_normalised\nglass_box/test/glass_box_0254_normalised\nglass_box/test/glass_box_0224_normalised\nglass_box/test/glass_box_0232_normalised\nglass_box/test/glass_box_0213_normalised\nglass_box/test/glass_box_0247_normalised\nglass_box/test/glass_box_0225_normalised\nglass_box/test/glass_box_0231_normalised\nglass_box/test/glass_box_0205_normalised\nglass_box/test/glass_box_0198_normalised\nglass_box/test/glass_box_0260_normalised\nglass_box/test/glass_box_0174_normalised\nglass_box/test/glass_box_0262_normalised\nglass_box/test/glass_box_0269_normalised\nglass_box/test/glass_box_0229_normalised\nglass_box/test/glass_box_0172_normalised\nglass_box/test/glass_box_0210_normalised\nglass_box/test/glass_box_0251_normalised\nglass_box/test/glass_box_0265_normalised\nglass_box/test/glass_box_0238_normalised\nglass_box/test/glass_box_0183_normalised\nglass_box/test/glass_box_0204_normalised\nglass_box/test/glass_box_0252_normalised\nglass_box/test/glass_box_0206_normalised\nglass_box/test/glass_box_0175_normalised\nglass_box/test/glass_box_0235_normalised\nglass_box/test/glass_box_0237_normalised\nglass_box/test/glass_box_0214_normalised\nglass_box/test/glass_box_0179_normalised\nglass_box/test/glass_box_0228_normalised\nglass_box/test/glass_box_0181_normalised\nglass_box/test/glass_box_0242_normalised\nglass_box/test/glass_box_0193_normalised\nglass_box/test/glass_box_0261_normalised\nglass_box/test/glass_box_0268_normalised\nglass_box/test/glass_box_0180_normalised\nglass_box/test/glass_box_0255_normalised\nglass_box/test/glass_box_0245_normalised\nglass_box/test/glass_box_0203_normalised\nglass_box/test/glass_box_0202_normalised\nglass_box/test/glass_box_0266_normalised\nglass_box/test/glass_box_0208_normalised\nglass_box/test/glass_box_0230_normalised\nglass_box/test/glass_box_0216_normalised\nglass_box/test/glass_box_0184_normalised\nglass_box/test/glass_box_0190_normalised\nglass_box/test/glass_box_0222_normalised\nglass_box/test/glass_box_0240_normalised\nglass_box/test/glass_box_0271_normalised\nglass_box/test/glass_box_0253_normalised\nglass_box/test/glass_box_0212_normalised\nglass_box/test/glass_box_0197_normalised\nglass_box/test/glass_box_0199_normalised\nglass_box/test/glass_box_0188_normalised\nglass_box/test/glass_box_0241_normalised\nglass_box/test/glass_box_0227_normalised\nglass_box/test/glass_box_0196_normalised\nglass_box/test/glass_box_0219_normalised\nglass_box/test/glass_box_0223_normalised\nglass_box/test/glass_box_0187_normalised\nglass_box/test/glass_box_0211_normalised\nglass_box/test/glass_box_0217_normalised\nglass_box/test/glass_box_0220_normalised\nglass_box/test/glass_box_0177_normalised\nglass_box/test/glass_box_0258_normalised\nglass_box/test/glass_box_0246_normalised\nglass_box/test/glass_box_0195_normalised\nglass_box/test/glass_box_0250_normalised\nglass_box/test/glass_box_0256_normalised\nglass_box/test/glass_box_0173_normalised\nglass_box/test/glass_box_0215_normalised\nglass_box/test/glass_box_0239_normalised\nglass_box/test/glass_box_0185_normalised\nglass_box/test/glass_box_0209_normalised\nglass_box/test/glass_box_0207_normalised\nglass_box/test/glass_box_0264_normalised\nglass_box/test/glass_box_0259_normalised\nglass_box/test/glass_box_0191_normalised\nglass_box/test/glass_box_0233_normalised\nglass_box/test/glass_box_0189_normalised\nglass_box/test/glass_box_0218_normalised\nglass_box/test/glass_box_0201_normalised\nglass_box/test/glass_box_0186_normalised\nlaptop/train/laptop_0057_normalised\nlaptop/train/laptop_0054_normalised\nlaptop/train/laptop_0037_normalised\nlaptop/train/laptop_0036_normalised\nlaptop/train/laptop_0141_normalised\nlaptop/train/laptop_0091_normalised\nlaptop/train/laptop_0117_normalised\nlaptop/train/laptop_0060_normalised\nlaptop/train/laptop_0132_normalised\nlaptop/train/laptop_0038_normalised\nlaptop/train/laptop_0148_normalised\nlaptop/train/laptop_0085_normalised\nlaptop/train/laptop_0084_normalised\nlaptop/train/laptop_0118_normalised\nlaptop/train/laptop_0107_normalised\nlaptop/train/laptop_0120_normalised\nlaptop/train/laptop_0015_normalised\nlaptop/train/laptop_0131_normalised\nlaptop/train/laptop_0064_normalised\nlaptop/train/laptop_0097_normalised\nlaptop/train/laptop_0014_normalised\nlaptop/train/laptop_0012_normalised\nlaptop/train/laptop_0069_normalised\nlaptop/train/laptop_0047_normalised\nlaptop/train/laptop_0101_normalised\nlaptop/train/laptop_0100_normalised\nlaptop/train/laptop_0125_normalised\nlaptop/train/laptop_0041_normalised\nlaptop/train/laptop_0026_normalised\nlaptop/train/laptop_0113_normalised\nlaptop/train/laptop_0035_normalised\nlaptop/train/laptop_0077_normalised\nlaptop/train/laptop_0090_normalised\nlaptop/train/laptop_0080_normalised\nlaptop/train/laptop_0010_normalised\nlaptop/train/laptop_0095_normalised\nlaptop/train/laptop_0006_normalised\nlaptop/train/laptop_0098_normalised\nlaptop/train/laptop_0031_normalised\nlaptop/train/laptop_0094_normalised\nlaptop/train/laptop_0059_normalised\nlaptop/train/laptop_0106_normalised\nlaptop/train/laptop_0110_normalised\nlaptop/train/laptop_0074_normalised\nlaptop/train/laptop_0023_normalised\nlaptop/train/laptop_0121_normalised\nlaptop/train/laptop_0045_normalised\nlaptop/train/laptop_0053_normalised\nlaptop/train/laptop_0096_normalised\nlaptop/train/laptop_0016_normalised\nlaptop/train/laptop_0022_normalised\nlaptop/train/laptop_0109_normalised\nlaptop/train/laptop_0018_normalised\nlaptop/train/laptop_0149_normalised\nlaptop/train/laptop_0092_normalised\nlaptop/train/laptop_0004_normalised\nlaptop/train/laptop_0008_normalised\nlaptop/train/laptop_0116_normalised\nlaptop/train/laptop_0028_normalised\nlaptop/train/laptop_0020_normalised\nlaptop/train/laptop_0102_normalised\nlaptop/train/laptop_0083_normalised\nlaptop/train/laptop_0055_normalised\nlaptop/train/laptop_0007_normalised\nlaptop/train/laptop_0044_normalised\nlaptop/train/laptop_0071_normalised\nlaptop/train/laptop_0136_normalised\nlaptop/train/laptop_0039_normalised\nlaptop/train/laptop_0133_normalised\nlaptop/train/laptop_0009_normalised\nlaptop/train/laptop_0139_normalised\nlaptop/train/laptop_0013_normalised\nlaptop/train/laptop_0089_normalised\nlaptop/train/laptop_0087_normalised\nlaptop/train/laptop_0128_normalised\nlaptop/train/laptop_0067_normalised\nlaptop/train/laptop_0070_normalised\nlaptop/train/laptop_0073_normalised\nlaptop/train/laptop_0078_normalised\nlaptop/train/laptop_0003_normalised\nlaptop/train/laptop_0017_normalised\nlaptop/train/laptop_0052_normalised\nlaptop/train/laptop_0119_normalised\nlaptop/train/laptop_0140_normalised\nlaptop/train/laptop_0088_normalised\nlaptop/train/laptop_0126_normalised\nlaptop/train/laptop_0081_normalised\nlaptop/train/laptop_0021_normalised\nlaptop/train/laptop_0112_normalised\nlaptop/train/laptop_0033_normalised\nlaptop/train/laptop_0099_normalised\nlaptop/train/laptop_0049_normalised\nlaptop/train/laptop_0063_normalised\nlaptop/train/laptop_0103_normalised\nlaptop/train/laptop_0123_normalised\nlaptop/train/laptop_0142_normalised\nlaptop/train/laptop_0043_normalised\nlaptop/train/laptop_0061_normalised\nlaptop/train/laptop_0048_normalised\nlaptop/train/laptop_0076_normalised\nlaptop/train/laptop_0145_normalised\nlaptop/train/laptop_0122_normalised\nlaptop/train/laptop_0115_normalised\nlaptop/train/laptop_0104_normalised\nlaptop/train/laptop_0034_normalised\nlaptop/train/laptop_0019_normalised\nlaptop/train/laptop_0130_normalised\nlaptop/train/laptop_0050_normalised\nlaptop/train/laptop_0051_normalised\nlaptop/train/laptop_0056_normalised\nlaptop/train/laptop_0001_normalised\nlaptop/train/laptop_0135_normalised\nlaptop/train/laptop_0082_normalised\nlaptop/train/laptop_0093_normalised\nlaptop/train/laptop_0068_normalised\nlaptop/train/laptop_0137_normalised\nlaptop/train/laptop_0029_normalised\nlaptop/train/laptop_0042_normalised\nlaptop/train/laptop_0147_normalised\nlaptop/train/laptop_0075_normalised\nlaptop/train/laptop_0058_normalised\nlaptop/train/laptop_0108_normalised\nlaptop/train/laptop_0134_normalised\nlaptop/train/laptop_0030_normalised\nlaptop/train/laptop_0002_normalised\nlaptop/train/laptop_0143_normalised\nlaptop/train/laptop_0072_normalised\nlaptop/train/laptop_0129_normalised\nlaptop/train/laptop_0025_normalised\nlaptop/train/laptop_0024_normalised\nlaptop/train/laptop_0040_normalised\nlaptop/train/laptop_0114_normalised\nlaptop/train/laptop_0127_normalised\nlaptop/train/laptop_0027_normalised\nlaptop/train/laptop_0011_normalised\nlaptop/train/laptop_0032_normalised\nlaptop/train/laptop_0079_normalised\nlaptop/train/laptop_0066_normalised\nlaptop/train/laptop_0124_normalised\nlaptop/train/laptop_0065_normalised\nlaptop/train/laptop_0138_normalised\nlaptop/train/laptop_0144_normalised\nlaptop/train/laptop_0062_normalised\nlaptop/train/laptop_0146_normalised\nlaptop/train/laptop_0111_normalised\nlaptop/train/laptop_0046_normalised\nlaptop/train/laptop_0086_normalised\nlaptop/train/laptop_0105_normalised\nlaptop/train/laptop_0005_normalised\nlaptop/test/laptop_0153_normalised\nlaptop/test/laptop_0165_normalised\nlaptop/test/laptop_0158_normalised\nlaptop/test/laptop_0154_normalised\nlaptop/test/laptop_0150_normalised\nlaptop/test/laptop_0162_normalised\nlaptop/test/laptop_0169_normalised\nlaptop/test/laptop_0159_normalised\nlaptop/test/laptop_0168_normalised\nlaptop/test/laptop_0164_normalised\nlaptop/test/laptop_0156_normalised\nlaptop/test/laptop_0167_normalised\nlaptop/test/laptop_0155_normalised\nlaptop/test/laptop_0152_normalised\nlaptop/test/laptop_0160_normalised\nlaptop/test/laptop_0161_normalised\nlaptop/test/laptop_0163_normalised\nlaptop/test/laptop_0166_normalised\nlaptop/test/laptop_0151_normalised\nlaptop/test/laptop_0157_normalised\nstairs/train/stairs_0011_normalised\nstairs/train/stairs_0058_normalised\nstairs/train/stairs_0014_normalised\nstairs/train/stairs_0006_normalised\nstairs/train/stairs_0110_normalised\nstairs/train/stairs_0095_normalised\nstairs/train/stairs_0019_normalised\nstairs/train/stairs_0024_normalised\nstairs/train/stairs_0031_normalised\nstairs/train/stairs_0002_normalised\nstairs/train/stairs_0077_normalised\nstairs/train/stairs_0048_normalised\nstairs/train/stairs_0029_normalised\nstairs/train/stairs_0123_normalised\nstairs/train/stairs_0059_normalised\nstairs/train/stairs_0111_normalised\nstairs/train/stairs_0004_normalised\nstairs/train/stairs_0101_normalised\nstairs/train/stairs_0054_normalised\nstairs/train/stairs_0020_normalised\nstairs/train/stairs_0063_normalised\nstairs/train/stairs_0056_normalised\nstairs/train/stairs_0082_normalised\nstairs/train/stairs_0005_normalised\nstairs/train/stairs_0062_normalised\nstairs/train/stairs_0040_normalised\nstairs/train/stairs_0032_normalised\nstairs/train/stairs_0042_normalised\nstairs/train/stairs_0086_normalised\nstairs/train/stairs_0041_normalised\nstairs/train/stairs_0016_normalised\nstairs/train/stairs_0023_normalised\nstairs/train/stairs_0018_normalised\nstairs/train/stairs_0051_normalised\nstairs/train/stairs_0008_normalised\nstairs/train/stairs_0074_normalised\nstairs/train/stairs_0027_normalised\nstairs/train/stairs_0116_normalised\nstairs/train/stairs_0039_normalised\nstairs/train/stairs_0009_normalised\nstairs/train/stairs_0114_normalised\nstairs/train/stairs_0030_normalised\nstairs/train/stairs_0091_normalised\nstairs/train/stairs_0047_normalised\nstairs/train/stairs_0028_normalised\nstairs/train/stairs_0010_normalised\nstairs/train/stairs_0088_normalised\nstairs/train/stairs_0108_normalised\nstairs/train/stairs_0003_normalised\nstairs/train/stairs_0055_normalised\nstairs/train/stairs_0034_normalised\nstairs/train/stairs_0076_normalised\nstairs/train/stairs_0122_normalised\nstairs/train/stairs_0107_normalised\nstairs/train/stairs_0084_normalised\nstairs/train/stairs_0036_normalised\nstairs/train/stairs_0089_normalised\nstairs/train/stairs_0001_normalised\nstairs/train/stairs_0121_normalised\nstairs/train/stairs_0085_normalised\nstairs/train/stairs_0072_normalised\nstairs/train/stairs_0113_normalised\nstairs/train/stairs_0120_normalised\nstairs/train/stairs_0070_normalised\nstairs/train/stairs_0021_normalised\nstairs/train/stairs_0100_normalised\nstairs/train/stairs_0050_normalised\nstairs/train/stairs_0022_normalised\nstairs/train/stairs_0079_normalised\nstairs/train/stairs_0083_normalised\nstairs/train/stairs_0049_normalised\nstairs/train/stairs_0068_normalised\nstairs/train/stairs_0106_normalised\nstairs/train/stairs_0078_normalised\nstairs/train/stairs_0119_normalised\nstairs/train/stairs_0067_normalised\nstairs/train/stairs_0065_normalised\nstairs/train/stairs_0098_normalised\nstairs/train/stairs_0038_normalised\nstairs/train/stairs_0017_normalised\nstairs/train/stairs_0080_normalised\nstairs/train/stairs_0118_normalised\nstairs/train/stairs_0060_normalised\nstairs/train/stairs_0052_normalised\nstairs/train/stairs_0124_normalised\nstairs/train/stairs_0015_normalised\nstairs/train/stairs_0069_normalised\nstairs/train/stairs_0115_normalised\nstairs/train/stairs_0046_normalised\nstairs/train/stairs_0081_normalised\nstairs/train/stairs_0044_normalised\nstairs/train/stairs_0043_normalised\nstairs/train/stairs_0117_normalised\nstairs/train/stairs_0102_normalised\nstairs/train/stairs_0066_normalised\nstairs/train/stairs_0075_normalised\nstairs/train/stairs_0007_normalised\nstairs/train/stairs_0035_normalised\nstairs/train/stairs_0061_normalised\nstairs/train/stairs_0053_normalised\nstairs/train/stairs_0026_normalised\nstairs/train/stairs_0112_normalised\nstairs/train/stairs_0087_normalised\nstairs/train/stairs_0037_normalised\nstairs/train/stairs_0109_normalised\nstairs/train/stairs_0012_normalised\nstairs/train/stairs_0105_normalised\nstairs/train/stairs_0013_normalised\nstairs/train/stairs_0025_normalised\nstairs/train/stairs_0097_normalised\nstairs/train/stairs_0073_normalised\nstairs/train/stairs_0093_normalised\nstairs/train/stairs_0090_normalised\nstairs/train/stairs_0045_normalised\nstairs/train/stairs_0104_normalised\nstairs/train/stairs_0092_normalised\nstairs/train/stairs_0099_normalised\nstairs/train/stairs_0096_normalised\nstairs/train/stairs_0071_normalised\nstairs/train/stairs_0103_normalised\nstairs/train/stairs_0057_normalised\nstairs/train/stairs_0064_normalised\nstairs/train/stairs_0094_normalised\nstairs/train/stairs_0033_normalised\nstairs/test/stairs_0131_normalised\nstairs/test/stairs_0125_normalised\nstairs/test/stairs_0136_normalised\nstairs/test/stairs_0133_normalised\nstairs/test/stairs_0141_normalised\nstairs/test/stairs_0137_normalised\nstairs/test/stairs_0142_normalised\nstairs/test/stairs_0128_normalised\nstairs/test/stairs_0129_normalised\nstairs/test/stairs_0135_normalised\nstairs/test/stairs_0138_normalised\nstairs/test/stairs_0139_normalised\nstairs/test/stairs_0132_normalised\nstairs/test/stairs_0130_normalised\nstairs/test/stairs_0134_normalised\nstairs/test/stairs_0143_normalised\nstairs/test/stairs_0126_normalised\nstairs/test/stairs_0127_normalised\nstairs/test/stairs_0140_normalised\nstairs/test/stairs_0144_normalised\nplant/train/plant_0079_normalised\nplant/train/plant_0010_normalised\nplant/train/plant_0001_normalised\nplant/train/plant_0212_normalised\nplant/train/plant_0194_normalised\nplant/train/plant_0069_normalised\nplant/train/plant_0023_normalised\nplant/train/plant_0116_normalised\nplant/train/plant_0064_normalised\nplant/train/plant_0090_normalised\nplant/train/plant_0018_normalised\nplant/train/plant_0026_normalised\nplant/train/plant_0230_normalised\nplant/train/plant_0072_normalised\nplant/train/plant_0135_normalised\nplant/train/plant_0204_normalised\nplant/train/plant_0005_normalised\nplant/train/plant_0138_normalised\nplant/train/plant_0120_normalised\nplant/train/plant_0180_normalised\nplant/train/plant_0046_normalised\nplant/train/plant_0187_normalised\nplant/train/plant_0147_normalised\nplant/train/plant_0060_normalised\nplant/train/plant_0094_normalised\nplant/train/plant_0231_normalised\nplant/train/plant_0134_normalised\nplant/train/plant_0139_normalised\nplant/train/plant_0163_normalised\nplant/train/plant_0174_normalised\nplant/train/plant_0184_normalised\nplant/train/plant_0197_normalised\nplant/train/plant_0083_normalised\nplant/train/plant_0053_normalised\nplant/train/plant_0015_normalised\nplant/train/plant_0201_normalised\nplant/train/plant_0038_normalised\nplant/train/plant_0087_normalised\nplant/train/plant_0088_normalised\nplant/train/plant_0198_normalised\nplant/train/plant_0117_normalised\nplant/train/plant_0227_normalised\nplant/train/plant_0137_normalised\nplant/train/plant_0009_normalised\nplant/train/plant_0218_normalised\nplant/train/plant_0044_normalised\nplant/train/plant_0093_normalised\nplant/train/plant_0167_normalised\nplant/train/plant_0161_normalised\nplant/train/plant_0043_normalised\nplant/train/plant_0111_normalised\nplant/train/plant_0035_normalised\nplant/train/plant_0006_normalised\nplant/train/plant_0178_normalised\nplant/train/plant_0215_normalised\nplant/train/plant_0080_normalised\nplant/train/plant_0179_normalised\nplant/train/plant_0108_normalised\nplant/train/plant_0177_normalised\nplant/train/plant_0054_normalised\nplant/train/plant_0068_normalised\nplant/train/plant_0050_normalised\nplant/train/plant_0021_normalised\nplant/train/plant_0031_normalised\nplant/train/plant_0162_normalised\nplant/train/plant_0168_normalised\nplant/train/plant_0152_normalised\nplant/train/plant_0149_normalised\nplant/train/plant_0029_normalised\nplant/train/plant_0214_normalised\nplant/train/plant_0209_normalised\nplant/train/plant_0127_normalised\nplant/train/plant_0144_normalised\nplant/train/plant_0007_normalised\nplant/train/plant_0133_normalised\nplant/train/plant_0077_normalised\nplant/train/plant_0129_normalised\nplant/train/plant_0131_normalised\nplant/train/plant_0171_normalised\nplant/train/plant_0157_normalised\nplant/train/plant_0208_normalised\nplant/train/plant_0223_normalised\nplant/train/plant_0205_normalised\nplant/train/plant_0104_normalised\nplant/train/plant_0075_normalised\nplant/train/plant_0159_normalised\nplant/train/plant_0188_normalised\nplant/train/plant_0063_normalised\nplant/train/plant_0202_normalised\nplant/train/plant_0081_normalised\nplant/train/plant_0222_normalised\nplant/train/plant_0020_normalised\nplant/train/plant_0132_normalised\nplant/train/plant_0082_normalised\nplant/train/plant_0004_normalised\nplant/train/plant_0191_normalised\nplant/train/plant_0136_normalised\nplant/train/plant_0226_normalised\nplant/train/plant_0155_normalised\nplant/train/plant_0200_normalised\nplant/train/plant_0140_normalised\nplant/train/plant_0166_normalised\nplant/train/plant_0041_normalised\nplant/train/plant_0206_normalised\nplant/train/plant_0040_normalised\nplant/train/plant_0189_normalised\nplant/train/plant_0002_normalised\nplant/train/plant_0036_normalised\nplant/train/plant_0237_normalised\nplant/train/plant_0098_normalised\nplant/train/plant_0175_normalised\nplant/train/plant_0233_normalised\nplant/train/plant_0067_normalised\nplant/train/plant_0225_normalised\nplant/train/plant_0057_normalised\nplant/train/plant_0228_normalised\nplant/train/plant_0028_normalised\nplant/train/plant_0118_normalised\nplant/train/plant_0025_normalised\nplant/train/plant_0121_normalised\nplant/train/plant_0238_normalised\nplant/train/plant_0160_normalised\nplant/train/plant_0112_normalised\nplant/train/plant_0146_normalised\nplant/train/plant_0213_normalised\nplant/train/plant_0217_normalised\nplant/train/plant_0085_normalised\nplant/train/plant_0097_normalised\nplant/train/plant_0061_normalised\nplant/train/plant_0019_normalised\nplant/train/plant_0109_normalised\nplant/train/plant_0193_normalised\nplant/train/plant_0102_normalised\nplant/train/plant_0216_normalised\nplant/train/plant_0172_normalised\nplant/train/plant_0203_normalised\nplant/train/plant_0030_normalised\nplant/train/plant_0099_normalised\nplant/train/plant_0076_normalised\nplant/train/plant_0143_normalised\nplant/train/plant_0153_normalised\nplant/train/plant_0156_normalised\nplant/train/plant_0066_normalised\nplant/train/plant_0182_normalised\nplant/train/plant_0065_normalised\nplant/train/plant_0022_normalised\nplant/train/plant_0033_normalised\nplant/train/plant_0095_normalised\nplant/train/plant_0183_normalised\nplant/train/plant_0016_normalised\nplant/train/plant_0126_normalised\nplant/train/plant_0119_normalised\nplant/train/plant_0176_normalised\nplant/train/plant_0042_normalised\nplant/train/plant_0086_normalised\nplant/train/plant_0045_normalised\nplant/train/plant_0164_normalised\nplant/train/plant_0196_normalised\nplant/train/plant_0123_normalised\nplant/train/plant_0032_normalised\nplant/train/plant_0199_normalised\nplant/train/plant_0012_normalised\nplant/train/plant_0014_normalised\nplant/train/plant_0091_normalised\nplant/train/plant_0141_normalised\nplant/train/plant_0034_normalised\nplant/train/plant_0103_normalised\nplant/train/plant_0145_normalised\nplant/train/plant_0240_normalised\nplant/train/plant_0158_normalised\nplant/train/plant_0169_normalised\nplant/train/plant_0239_normalised\nplant/train/plant_0074_normalised\nplant/train/plant_0124_normalised\nplant/train/plant_0055_normalised\nplant/train/plant_0073_normalised\nplant/train/plant_0070_normalised\nplant/train/plant_0114_normalised\nplant/train/plant_0221_normalised\nplant/train/plant_0236_normalised\nplant/train/plant_0084_normalised\nplant/train/plant_0008_normalised\nplant/train/plant_0219_normalised\nplant/train/plant_0017_normalised\nplant/train/plant_0192_normalised\nplant/train/plant_0207_normalised\nplant/train/plant_0100_normalised\nplant/train/plant_0051_normalised\nplant/train/plant_0165_normalised\nplant/train/plant_0190_normalised\nplant/train/plant_0154_normalised\nplant/train/plant_0027_normalised\nplant/train/plant_0148_normalised\nplant/train/plant_0047_normalised\nplant/train/plant_0173_normalised\nplant/train/plant_0013_normalised\nplant/train/plant_0039_normalised\nplant/train/plant_0089_normalised\nplant/train/plant_0058_normalised\nplant/train/plant_0185_normalised\nplant/train/plant_0151_normalised\nplant/train/plant_0037_normalised\nplant/train/plant_0186_normalised\nplant/train/plant_0056_normalised\nplant/train/plant_0092_normalised\nplant/train/plant_0210_normalised\nplant/train/plant_0150_normalised\nplant/train/plant_0181_normalised\nplant/train/plant_0232_normalised\nplant/train/plant_0234_normalised\nplant/train/plant_0024_normalised\nplant/train/plant_0122_normalised\nplant/train/plant_0170_normalised\nplant/train/plant_0142_normalised\nplant/train/plant_0105_normalised\nplant/train/plant_0011_normalised\nplant/train/plant_0128_normalised\nplant/train/plant_0130_normalised\nplant/train/plant_0106_normalised\nplant/train/plant_0096_normalised\nplant/train/plant_0062_normalised\nplant/train/plant_0101_normalised\nplant/train/plant_0113_normalised\nplant/train/plant_0224_normalised\nplant/train/plant_0115_normalised\nplant/train/plant_0078_normalised\nplant/train/plant_0125_normalised\nplant/train/plant_0211_normalised\nplant/train/plant_0071_normalised\nplant/train/plant_0003_normalised\nplant/train/plant_0110_normalised\nplant/train/plant_0059_normalised\nplant/train/plant_0220_normalised\nplant/train/plant_0052_normalised\nplant/train/plant_0229_normalised\nplant/train/plant_0195_normalised\nplant/train/plant_0235_normalised\nplant/train/plant_0049_normalised\nplant/train/plant_0107_normalised\nplant/train/plant_0048_normalised\nplant/test/plant_0328_normalised\nplant/test/plant_0248_normalised\nplant/test/plant_0321_normalised\nplant/test/plant_0285_normalised\nplant/test/plant_0244_normalised\nplant/test/plant_0275_normalised\nplant/test/plant_0269_normalised\nplant/test/plant_0273_normalised\nplant/test/plant_0259_normalised\nplant/test/plant_0317_normalised\nplant/test/plant_0291_normalised\nplant/test/plant_0320_normalised\nplant/test/plant_0280_normalised\nplant/test/plant_0286_normalised\nplant/test/plant_0296_normalised\nplant/test/plant_0309_normalised\nplant/test/plant_0301_normalised\nplant/test/plant_0289_normalised\nplant/test/plant_0334_normalised\nplant/test/plant_0265_normalised\nplant/test/plant_0279_normalised\nplant/test/plant_0241_normalised\nplant/test/plant_0337_normalised\nplant/test/plant_0300_normalised\nplant/test/plant_0311_normalised\nplant/test/plant_0283_normalised\nplant/test/plant_0308_normalised\nplant/test/plant_0261_normalised\nplant/test/plant_0329_normalised\nplant/test/plant_0268_normalised\nplant/test/plant_0245_normalised\nplant/test/plant_0252_normalised\nplant/test/plant_0288_normalised\nplant/test/plant_0307_normalised\nplant/test/plant_0258_normalised\nplant/test/plant_0249_normalised\nplant/test/plant_0340_normalised\nplant/test/plant_0324_normalised\nplant/test/plant_0257_normalised\nplant/test/plant_0305_normalised\nplant/test/plant_0295_normalised\nplant/test/plant_0247_normalised\nplant/test/plant_0336_normalised\nplant/test/plant_0284_normalised\nplant/test/plant_0276_normalised\nplant/test/plant_0326_normalised\nplant/test/plant_0322_normalised\nplant/test/plant_0254_normalised\nplant/test/plant_0293_normalised\nplant/test/plant_0260_normalised\nplant/test/plant_0298_normalised\nplant/test/plant_0318_normalised\nplant/test/plant_0272_normalised\nplant/test/plant_0332_normalised\nplant/test/plant_0262_normalised\nplant/test/plant_0312_normalised\nplant/test/plant_0250_normalised\nplant/test/plant_0242_normalised\nplant/test/plant_0316_normalised\nplant/test/plant_0255_normalised\nplant/test/plant_0270_normalised\nplant/test/plant_0243_normalised\nplant/test/plant_0333_normalised\nplant/test/plant_0251_normalised\nplant/test/plant_0256_normalised\nplant/test/plant_0335_normalised\nplant/test/plant_0339_normalised\nplant/test/plant_0282_normalised\nplant/test/plant_0313_normalised\nplant/test/plant_0267_normalised\nplant/test/plant_0287_normalised\nplant/test/plant_0294_normalised\nplant/test/plant_0290_normalised\nplant/test/plant_0264_normalised\nplant/test/plant_0292_normalised\nplant/test/plant_0274_normalised\nplant/test/plant_0266_normalised\nplant/test/plant_0327_normalised\nplant/test/plant_0263_normalised\nplant/test/plant_0278_normalised\nplant/test/plant_0338_normalised\nplant/test/plant_0306_normalised\nplant/test/plant_0299_normalised\nplant/test/plant_0331_normalised\nplant/test/plant_0304_normalised\nplant/test/plant_0253_normalised\nplant/test/plant_0315_normalised\nplant/test/plant_0325_normalised\nplant/test/plant_0323_normalised\nplant/test/plant_0303_normalised\nplant/test/plant_0302_normalised\nplant/test/plant_0277_normalised\nplant/test/plant_0319_normalised\nplant/test/plant_0310_normalised\nplant/test/plant_0297_normalised\nplant/test/plant_0330_normalised\nplant/test/plant_0314_normalised\nplant/test/plant_0246_normalised\nplant/test/plant_0281_normalised\nplant/test/plant_0271_normalised\nbathtub/train/bathtub_0105_normalised\nbathtub/train/bathtub_0098_normalised\nbathtub/train/bathtub_0088_normalised\nbathtub/train/bathtub_0008_normalised\nbathtub/train/bathtub_0043_normalised\nbathtub/train/bathtub_0081_normalised\nbathtub/train/bathtub_0009_normalised\nbathtub/train/bathtub_0079_normalised\nbathtub/train/bathtub_0067_normalised\nbathtub/train/bathtub_0032_normalised\nbathtub/train/bathtub_0012_normalised\nbathtub/train/bathtub_0072_normalised\nbathtub/train/bathtub_0003_normalised\nbathtub/train/bathtub_0059_normalised\nbathtub/train/bathtub_0061_normalised\nbathtub/train/bathtub_0089_normalised\nbathtub/train/bathtub_0006_normalised\nbathtub/train/bathtub_0097_normalised\nbathtub/train/bathtub_0010_normalised\nbathtub/train/bathtub_0101_normalised\nbathtub/train/bathtub_0047_normalised\nbathtub/train/bathtub_0027_normalised\nbathtub/train/bathtub_0033_normalised\nbathtub/train/bathtub_0058_normalised\nbathtub/train/bathtub_0035_normalised\nbathtub/train/bathtub_0014_normalised\nbathtub/train/bathtub_0069_normalised\nbathtub/train/bathtub_0048_normalised\nbathtub/train/bathtub_0031_normalised\nbathtub/train/bathtub_0011_normalised\nbathtub/train/bathtub_0056_normalised\nbathtub/train/bathtub_0037_normalised\nbathtub/train/bathtub_0076_normalised\nbathtub/train/bathtub_0007_normalised\nbathtub/train/bathtub_0062_normalised\nbathtub/train/bathtub_0093_normalised\nbathtub/train/bathtub_0034_normalised\nbathtub/train/bathtub_0064_normalised\nbathtub/train/bathtub_0104_normalised\nbathtub/train/bathtub_0071_normalised\nbathtub/train/bathtub_0016_normalised\nbathtub/train/bathtub_0054_normalised\nbathtub/train/bathtub_0085_normalised\nbathtub/train/bathtub_0092_normalised\nbathtub/train/bathtub_0040_normalised\nbathtub/train/bathtub_0046_normalised\nbathtub/train/bathtub_0024_normalised\nbathtub/train/bathtub_0042_normalised\nbathtub/train/bathtub_0086_normalised\nbathtub/train/bathtub_0022_normalised\nbathtub/train/bathtub_0082_normalised\nbathtub/train/bathtub_0063_normalised\nbathtub/train/bathtub_0053_normalised\nbathtub/train/bathtub_0004_normalised\nbathtub/train/bathtub_0096_normalised\nbathtub/train/bathtub_0021_normalised\nbathtub/train/bathtub_0036_normalised\nbathtub/train/bathtub_0030_normalised\nbathtub/train/bathtub_0051_normalised\nbathtub/train/bathtub_0041_normalised\nbathtub/train/bathtub_0068_normalised\nbathtub/train/bathtub_0044_normalised\nbathtub/train/bathtub_0002_normalised\nbathtub/train/bathtub_0087_normalised\nbathtub/train/bathtub_0091_normalised\nbathtub/train/bathtub_0100_normalised\nbathtub/train/bathtub_0084_normalised\nbathtub/train/bathtub_0094_normalised\nbathtub/train/bathtub_0102_normalised\nbathtub/train/bathtub_0052_normalised\nbathtub/train/bathtub_0080_normalised\nbathtub/train/bathtub_0077_normalised\nbathtub/train/bathtub_0049_normalised\nbathtub/train/bathtub_0029_normalised\nbathtub/train/bathtub_0013_normalised\nbathtub/train/bathtub_0060_normalised\nbathtub/train/bathtub_0020_normalised\nbathtub/train/bathtub_0103_normalised\nbathtub/train/bathtub_0095_normalised\nbathtub/train/bathtub_0017_normalised\nbathtub/train/bathtub_0018_normalised\nbathtub/train/bathtub_0090_normalised\nbathtub/train/bathtub_0019_normalised\nbathtub/train/bathtub_0001_normalised\nbathtub/train/bathtub_0005_normalised\nbathtub/train/bathtub_0099_normalised\nbathtub/train/bathtub_0070_normalised\nbathtub/train/bathtub_0045_normalised\nbathtub/train/bathtub_0073_normalised\nbathtub/train/bathtub_0025_normalised\nbathtub/train/bathtub_0066_normalised\nbathtub/train/bathtub_0078_normalised\nbathtub/train/bathtub_0057_normalised\nbathtub/train/bathtub_0039_normalised\nbathtub/train/bathtub_0075_normalised\nbathtub/train/bathtub_0038_normalised\nbathtub/train/bathtub_0055_normalised\nbathtub/train/bathtub_0074_normalised\nbathtub/train/bathtub_0026_normalised\nbathtub/train/bathtub_0050_normalised\nbathtub/train/bathtub_0028_normalised\nbathtub/train/bathtub_0083_normalised\nbathtub/train/bathtub_0023_normalised\nbathtub/train/bathtub_0065_normalised\nbathtub/train/bathtub_0015_normalised\nbathtub/train/bathtub_0106_normalised\nbathtub/test/bathtub_0120_normalised\nbathtub/test/bathtub_0133_normalised\nbathtub/test/bathtub_0130_normalised\nbathtub/test/bathtub_0152_normalised\nbathtub/test/bathtub_0113_normalised\nbathtub/test/bathtub_0131_normalised\nbathtub/test/bathtub_0156_normalised\nbathtub/test/bathtub_0138_normalised\nbathtub/test/bathtub_0127_normalised\nbathtub/test/bathtub_0150_normalised\nbathtub/test/bathtub_0107_normalised\nbathtub/test/bathtub_0109_normalised\nbathtub/test/bathtub_0146_normalised\nbathtub/test/bathtub_0125_normalised\nbathtub/test/bathtub_0153_normalised\nbathtub/test/bathtub_0122_normalised\nbathtub/test/bathtub_0121_normalised\nbathtub/test/bathtub_0155_normalised\nbathtub/test/bathtub_0134_normalised\nbathtub/test/bathtub_0114_normalised\nbathtub/test/bathtub_0126_normalised\nbathtub/test/bathtub_0118_normalised\nbathtub/test/bathtub_0116_normalised\nbathtub/test/bathtub_0135_normalised\nbathtub/test/bathtub_0123_normalised\nbathtub/test/bathtub_0137_normalised\nbathtub/test/bathtub_0142_normalised\nbathtub/test/bathtub_0115_normalised\nbathtub/test/bathtub_0143_normalised\nbathtub/test/bathtub_0151_normalised\nbathtub/test/bathtub_0132_normalised\nbathtub/test/bathtub_0149_normalised\nbathtub/test/bathtub_0112_normalised\nbathtub/test/bathtub_0124_normalised\nbathtub/test/bathtub_0128_normalised\nbathtub/test/bathtub_0148_normalised\nbathtub/test/bathtub_0110_normalised\nbathtub/test/bathtub_0111_normalised\nbathtub/test/bathtub_0147_normalised\nbathtub/test/bathtub_0117_normalised\nbathtub/test/bathtub_0145_normalised\nbathtub/test/bathtub_0141_normalised\nbathtub/test/bathtub_0108_normalised\nbathtub/test/bathtub_0140_normalised\nbathtub/test/bathtub_0139_normalised\nbathtub/test/bathtub_0154_normalised\nbathtub/test/bathtub_0136_normalised\nbathtub/test/bathtub_0119_normalised\nbathtub/test/bathtub_0144_normalised\nbathtub/test/bathtub_0129_normalised\ndresser/train/dresser_0039_normalised\ndresser/train/dresser_0081_normalised\ndresser/train/dresser_0124_normalised\ndresser/train/dresser_0024_normalised\ndresser/train/dresser_0113_normalised\ndresser/train/dresser_0060_normalised\ndresser/train/dresser_0107_normalised\ndresser/train/dresser_0027_normalised\ndresser/train/dresser_0121_normalised\ndresser/train/dresser_0133_normalised\ndresser/train/dresser_0102_normalised\ndresser/train/dresser_0026_normalised\ndresser/train/dresser_0154_normalised\ndresser/train/dresser_0109_normalised\ndresser/train/dresser_0157_normalised\ndresser/train/dresser_0116_normalised\ndresser/train/dresser_0177_normalised\ndresser/train/dresser_0173_normalised\ndresser/train/dresser_0180_normalised\ndresser/train/dresser_0040_normalised\ndresser/train/dresser_0139_normalised\ndresser/train/dresser_0188_normalised\ndresser/train/dresser_0047_normalised\ndresser/train/dresser_0170_normalised\ndresser/train/dresser_0162_normalised\ndresser/train/dresser_0126_normalised\ndresser/train/dresser_0160_normalised\ndresser/train/dresser_0161_normalised\ndresser/train/dresser_0115_normalised\ndresser/train/dresser_0076_normalised\ndresser/train/dresser_0073_normalised\ndresser/train/dresser_0174_normalised\ndresser/train/dresser_0159_normalised\ndresser/train/dresser_0034_normalised\ndresser/train/dresser_0014_normalised\ndresser/train/dresser_0098_normalised\ndresser/train/dresser_0128_normalised\ndresser/train/dresser_0071_normalised\ndresser/train/dresser_0066_normalised\ndresser/train/dresser_0199_normalised\ndresser/train/dresser_0095_normalised\ndresser/train/dresser_0072_normalised\ndresser/train/dresser_0110_normalised\ndresser/train/dresser_0163_normalised\ndresser/train/dresser_0187_normalised\ndresser/train/dresser_0017_normalised\ndresser/train/dresser_0101_normalised\ndresser/train/dresser_0032_normalised\ndresser/train/dresser_0096_normalised\ndresser/train/dresser_0008_normalised\ndresser/train/dresser_0013_normalised\ndresser/train/dresser_0061_normalised\ndresser/train/dresser_0156_normalised\ndresser/train/dresser_0019_normalised\ndresser/train/dresser_0087_normalised\ndresser/train/dresser_0182_normalised\ndresser/train/dresser_0197_normalised\ndresser/train/dresser_0054_normalised\ndresser/train/dresser_0059_normalised\ndresser/train/dresser_0168_normalised\ndresser/train/dresser_0097_normalised\ndresser/train/dresser_0056_normalised\ndresser/train/dresser_0004_normalised\ndresser/train/dresser_0030_normalised\ndresser/train/dresser_0002_normalised\ndresser/train/dresser_0080_normalised\ndresser/train/dresser_0029_normalised\ndresser/train/dresser_0015_normalised\ndresser/train/dresser_0007_normalised\ndresser/train/dresser_0141_normalised\ndresser/train/dresser_0028_normalised\ndresser/train/dresser_0151_normalised\ndresser/train/dresser_0155_normalised\ndresser/train/dresser_0045_normalised\ndresser/train/dresser_0036_normalised\ndresser/train/dresser_0114_normalised\ndresser/train/dresser_0083_normalised\ndresser/train/dresser_0191_normalised\ndresser/train/dresser_0079_normalised\ndresser/train/dresser_0009_normalised\ndresser/train/dresser_0147_normalised\ndresser/train/dresser_0144_normalised\ndresser/train/dresser_0198_normalised\ndresser/train/dresser_0104_normalised\ndresser/train/dresser_0011_normalised\ndresser/train/dresser_0041_normalised\ndresser/train/dresser_0038_normalised\ndresser/train/dresser_0143_normalised\ndresser/train/dresser_0123_normalised\ndresser/train/dresser_0184_normalised\ndresser/train/dresser_0176_normalised\ndresser/train/dresser_0091_normalised\ndresser/train/dresser_0179_normalised\ndresser/train/dresser_0075_normalised\ndresser/train/dresser_0053_normalised\ndresser/train/dresser_0033_normalised\ndresser/train/dresser_0099_normalised\ndresser/train/dresser_0086_normalised\ndresser/train/dresser_0152_normalised\ndresser/train/dresser_0020_normalised\ndresser/train/dresser_0134_normalised\ndresser/train/dresser_0051_normalised\ndresser/train/dresser_0149_normalised\ndresser/train/dresser_0145_normalised\ndresser/train/dresser_0050_normalised\ndresser/train/dresser_0055_normalised\ndresser/train/dresser_0078_normalised\ndresser/train/dresser_0077_normalised\ndresser/train/dresser_0031_normalised\ndresser/train/dresser_0003_normalised\ndresser/train/dresser_0094_normalised\ndresser/train/dresser_0138_normalised\ndresser/train/dresser_0172_normalised\ndresser/train/dresser_0185_normalised\ndresser/train/dresser_0064_normalised\ndresser/train/dresser_0164_normalised\ndresser/train/dresser_0023_normalised\ndresser/train/dresser_0131_normalised\ndresser/train/dresser_0194_normalised\ndresser/train/dresser_0165_normalised\ndresser/train/dresser_0189_normalised\ndresser/train/dresser_0146_normalised\ndresser/train/dresser_0130_normalised\ndresser/train/dresser_0065_normalised\ndresser/train/dresser_0106_normalised\ndresser/train/dresser_0043_normalised\ndresser/train/dresser_0190_normalised\ndresser/train/dresser_0166_normalised\ndresser/train/dresser_0048_normalised\ndresser/train/dresser_0117_normalised\ndresser/train/dresser_0153_normalised\ndresser/train/dresser_0122_normalised\ndresser/train/dresser_0192_normalised\ndresser/train/dresser_0132_normalised\ndresser/train/dresser_0069_normalised\ndresser/train/dresser_0067_normalised\ndresser/train/dresser_0196_normalised\ndresser/train/dresser_0042_normalised\ndresser/train/dresser_0112_normalised\ndresser/train/dresser_0129_normalised\ndresser/train/dresser_0068_normalised\ndresser/train/dresser_0169_normalised\ndresser/train/dresser_0108_normalised\ndresser/train/dresser_0195_normalised\ndresser/train/dresser_0118_normalised\ndresser/train/dresser_0085_normalised\ndresser/train/dresser_0022_normalised\ndresser/train/dresser_0005_normalised\ndresser/train/dresser_0120_normalised\ndresser/train/dresser_0100_normalised\ndresser/train/dresser_0010_normalised\ndresser/train/dresser_0136_normalised\ndresser/train/dresser_0025_normalised\ndresser/train/dresser_0049_normalised\ndresser/train/dresser_0074_normalised\ndresser/train/dresser_0084_normalised\ndresser/train/dresser_0057_normalised\ndresser/train/dresser_0127_normalised\ndresser/train/dresser_0001_normalised\ndresser/train/dresser_0037_normalised\ndresser/train/dresser_0125_normalised\ndresser/train/dresser_0111_normalised\ndresser/train/dresser_0140_normalised\ndresser/train/dresser_0105_normalised\ndresser/train/dresser_0158_normalised\ndresser/train/dresser_0150_normalised\ndresser/train/dresser_0167_normalised\ndresser/train/dresser_0171_normalised\ndresser/train/dresser_0178_normalised\ndresser/train/dresser_0012_normalised\ndresser/train/dresser_0137_normalised\ndresser/train/dresser_0044_normalised\ndresser/train/dresser_0070_normalised\ndresser/train/dresser_0142_normalised\ndresser/train/dresser_0082_normalised\ndresser/train/dresser_0186_normalised\ndresser/train/dresser_0062_normalised\ndresser/train/dresser_0175_normalised\ndresser/train/dresser_0018_normalised\ndresser/train/dresser_0046_normalised\ndresser/train/dresser_0006_normalised\ndresser/train/dresser_0181_normalised\ndresser/train/dresser_0035_normalised\ndresser/train/dresser_0193_normalised\ndresser/train/dresser_0092_normalised\ndresser/train/dresser_0135_normalised\ndresser/train/dresser_0063_normalised\ndresser/train/dresser_0089_normalised\ndresser/train/dresser_0183_normalised\ndresser/train/dresser_0088_normalised\ndresser/train/dresser_0016_normalised\ndresser/train/dresser_0090_normalised\ndresser/train/dresser_0103_normalised\ndresser/train/dresser_0021_normalised\ndresser/train/dresser_0058_normalised\ndresser/train/dresser_0200_normalised\ndresser/train/dresser_0119_normalised\ndresser/train/dresser_0093_normalised\ndresser/train/dresser_0052_normalised\ndresser/train/dresser_0148_normalised\ndresser/test/dresser_0270_normalised\ndresser/test/dresser_0253_normalised\ndresser/test/dresser_0274_normalised\ndresser/test/dresser_0272_normalised\ndresser/test/dresser_0266_normalised\ndresser/test/dresser_0255_normalised\ndresser/test/dresser_0281_normalised\ndresser/test/dresser_0271_normalised\ndresser/test/dresser_0240_normalised\ndresser/test/dresser_0243_normalised\ndresser/test/dresser_0246_normalised\ndresser/test/dresser_0259_normalised\ndresser/test/dresser_0265_normalised\ndresser/test/dresser_0229_normalised\ndresser/test/dresser_0273_normalised\ndresser/test/dresser_0215_normalised\ndresser/test/dresser_0256_normalised\ndresser/test/dresser_0201_normalised\ndresser/test/dresser_0260_normalised\ndresser/test/dresser_0242_normalised\ndresser/test/dresser_0285_normalised\ndresser/test/dresser_0250_normalised\ndresser/test/dresser_0263_normalised\ndresser/test/dresser_0220_normalised\ndresser/test/dresser_0261_normalised\ndresser/test/dresser_0213_normalised\ndresser/test/dresser_0232_normalised\ndresser/test/dresser_0226_normalised\ndresser/test/dresser_0214_normalised\ndresser/test/dresser_0249_normalised\ndresser/test/dresser_0269_normalised\ndresser/test/dresser_0221_normalised\ndresser/test/dresser_0233_normalised\ndresser/test/dresser_0227_normalised\ndresser/test/dresser_0280_normalised\ndresser/test/dresser_0223_normalised\ndresser/test/dresser_0238_normalised\ndresser/test/dresser_0222_normalised\ndresser/test/dresser_0202_normalised\ndresser/test/dresser_0277_normalised\ndresser/test/dresser_0262_normalised\ndresser/test/dresser_0230_normalised\ndresser/test/dresser_0279_normalised\ndresser/test/dresser_0275_normalised\ndresser/test/dresser_0218_normalised\ndresser/test/dresser_0225_normalised\ndresser/test/dresser_0219_normalised\ndresser/test/dresser_0224_normalised\ndresser/test/dresser_0268_normalised\ndresser/test/dresser_0207_normalised\ndresser/test/dresser_0210_normalised\ndresser/test/dresser_0286_normalised\ndresser/test/dresser_0257_normalised\ndresser/test/dresser_0208_normalised\ndresser/test/dresser_0267_normalised\ndresser/test/dresser_0247_normalised\ndresser/test/dresser_0237_normalised\ndresser/test/dresser_0217_normalised\ndresser/test/dresser_0239_normalised\ndresser/test/dresser_0241_normalised\ndresser/test/dresser_0206_normalised\ndresser/test/dresser_0284_normalised\ndresser/test/dresser_0258_normalised\ndresser/test/dresser_0283_normalised\ndresser/test/dresser_0211_normalised\ndresser/test/dresser_0228_normalised\ndresser/test/dresser_0244_normalised\ndresser/test/dresser_0254_normalised\ndresser/test/dresser_0216_normalised\ndresser/test/dresser_0251_normalised\ndresser/test/dresser_0252_normalised\ndresser/test/dresser_0209_normalised\ndresser/test/dresser_0203_normalised\ndresser/test/dresser_0264_normalised\ndresser/test/dresser_0231_normalised\ndresser/test/dresser_0276_normalised\ndresser/test/dresser_0278_normalised\ndresser/test/dresser_0235_normalised\ndresser/test/dresser_0205_normalised\ndresser/test/dresser_0245_normalised\ndresser/test/dresser_0234_normalised\ndresser/test/dresser_0248_normalised\ndresser/test/dresser_0212_normalised\ndresser/test/dresser_0236_normalised\ndresser/test/dresser_0204_normalised\ndresser/test/dresser_0282_normalised\nbottle/train/bottle_0087_normalised\nbottle/train/bottle_0033_normalised\nbottle/train/bottle_0028_normalised\nbottle/train/bottle_0111_normalised\nbottle/train/bottle_0173_normalised\nbottle/train/bottle_0012_normalised\nbottle/train/bottle_0051_normalised\nbottle/train/bottle_0309_normalised\nbottle/train/bottle_0026_normalised\nbottle/train/bottle_0302_normalised\nbottle/train/bottle_0113_normalised\nbottle/train/bottle_0213_normalised\nbottle/train/bottle_0005_normalised\nbottle/train/bottle_0042_normalised\nbottle/train/bottle_0194_normalised\nbottle/train/bottle_0011_normalised\nbottle/train/bottle_0314_normalised\nbottle/train/bottle_0178_normalised\nbottle/train/bottle_0245_normalised\nbottle/train/bottle_0299_normalised\nbottle/train/bottle_0333_normalised\nbottle/train/bottle_0235_normalised\nbottle/train/bottle_0332_normalised\nbottle/train/bottle_0120_normalised\nbottle/train/bottle_0256_normalised\nbottle/train/bottle_0331_normalised\nbottle/train/bottle_0166_normalised\nbottle/train/bottle_0134_normalised\nbottle/train/bottle_0253_normalised\nbottle/train/bottle_0203_normalised\nbottle/train/bottle_0096_normalised\nbottle/train/bottle_0043_normalised\nbottle/train/bottle_0079_normalised\nbottle/train/bottle_0013_normalised\nbottle/train/bottle_0295_normalised\nbottle/train/bottle_0287_normalised\nbottle/train/bottle_0177_normalised\nbottle/train/bottle_0219_normalised\nbottle/train/bottle_0264_normalised\nbottle/train/bottle_0266_normalised\nbottle/train/bottle_0310_normalised\nbottle/train/bottle_0183_normalised\nbottle/train/bottle_0214_normalised\nbottle/train/bottle_0229_normalised\nbottle/train/bottle_0007_normalised\nbottle/train/bottle_0273_normalised\nbottle/train/bottle_0180_normalised\nbottle/train/bottle_0189_normalised\nbottle/train/bottle_0095_normalised\nbottle/train/bottle_0207_normalised\nbottle/train/bottle_0278_normalised\nbottle/train/bottle_0123_normalised\nbottle/train/bottle_0085_normalised\nbottle/train/bottle_0170_normalised\nbottle/train/bottle_0242_normalised\nbottle/train/bottle_0237_normalised\nbottle/train/bottle_0092_normalised\nbottle/train/bottle_0251_normalised\nbottle/train/bottle_0246_normalised\nbottle/train/bottle_0330_normalised\nbottle/train/bottle_0027_normalised\nbottle/train/bottle_0152_normalised\nbottle/train/bottle_0212_normalised\nbottle/train/bottle_0014_normalised\nbottle/train/bottle_0115_normalised\nbottle/train/bottle_0088_normalised\nbottle/train/bottle_0058_normalised\nbottle/train/bottle_0291_normalised\nbottle/train/bottle_0265_normalised\nbottle/train/bottle_0296_normalised\nbottle/train/bottle_0281_normalised\nbottle/train/bottle_0097_normalised\nbottle/train/bottle_0103_normalised\nbottle/train/bottle_0046_normalised\nbottle/train/bottle_0305_normalised\nbottle/train/bottle_0271_normalised\nbottle/train/bottle_0009_normalised\nbottle/train/bottle_0304_normalised\nbottle/train/bottle_0121_normalised\nbottle/train/bottle_0303_normalised\nbottle/train/bottle_0044_normalised\nbottle/train/bottle_0108_normalised\nbottle/train/bottle_0163_normalised\nbottle/train/bottle_0241_normalised\nbottle/train/bottle_0148_normalised\nbottle/train/bottle_0149_normalised\nbottle/train/bottle_0032_normalised\nbottle/train/bottle_0293_normalised\nbottle/train/bottle_0069_normalised\nbottle/train/bottle_0105_normalised\nbottle/train/bottle_0258_normalised\nbottle/train/bottle_0100_normalised\nbottle/train/bottle_0322_normalised\nbottle/train/bottle_0062_normalised\nbottle/train/bottle_0277_normalised\nbottle/train/bottle_0209_normalised\nbottle/train/bottle_0231_normalised\nbottle/train/bottle_0182_normalised\nbottle/train/bottle_0201_normalised\nbottle/train/bottle_0018_normalised\nbottle/train/bottle_0107_normalised\nbottle/train/bottle_0323_normalised\nbottle/train/bottle_0071_normalised\nbottle/train/bottle_0004_normalised\nbottle/train/bottle_0167_normalised\nbottle/train/bottle_0228_normalised\nbottle/train/bottle_0057_normalised\nbottle/train/bottle_0116_normalised\nbottle/train/bottle_0035_normalised\nbottle/train/bottle_0118_normalised\nbottle/train/bottle_0131_normalised\nbottle/train/bottle_0024_normalised\nbottle/train/bottle_0283_normalised\nbottle/train/bottle_0133_normalised\nbottle/train/bottle_0335_normalised\nbottle/train/bottle_0084_normalised\nbottle/train/bottle_0260_normalised\nbottle/train/bottle_0060_normalised\nbottle/train/bottle_0065_normalised\nbottle/train/bottle_0284_normalised\nbottle/train/bottle_0155_normalised\nbottle/train/bottle_0110_normalised\nbottle/train/bottle_0248_normalised\nbottle/train/bottle_0244_normalised\nbottle/train/bottle_0226_normalised\nbottle/train/bottle_0143_normalised\nbottle/train/bottle_0222_normalised\nbottle/train/bottle_0139_normalised\nbottle/train/bottle_0176_normalised\nbottle/train/bottle_0301_normalised\nbottle/train/bottle_0070_normalised\nbottle/train/bottle_0206_normalised\nbottle/train/bottle_0068_normalised\nbottle/train/bottle_0257_normalised\nbottle/train/bottle_0015_normalised\nbottle/train/bottle_0250_normalised\nbottle/train/bottle_0261_normalised\nbottle/train/bottle_0225_normalised\nbottle/train/bottle_0112_normalised\nbottle/train/bottle_0267_normalised\nbottle/train/bottle_0199_normalised\nbottle/train/bottle_0077_normalised\nbottle/train/bottle_0288_normalised\nbottle/train/bottle_0262_normalised\nbottle/train/bottle_0168_normalised\nbottle/train/bottle_0270_normalised\nbottle/train/bottle_0200_normalised\nbottle/train/bottle_0252_normalised\nbottle/train/bottle_0001_normalised\nbottle/train/bottle_0243_normalised\nbottle/train/bottle_0127_normalised\nbottle/train/bottle_0236_normalised\nbottle/train/bottle_0210_normalised\nbottle/train/bottle_0169_normalised\nbottle/train/bottle_0268_normalised\nbottle/train/bottle_0072_normalised\nbottle/train/bottle_0274_normalised\nbottle/train/bottle_0151_normalised\nbottle/train/bottle_0320_normalised\nbottle/train/bottle_0285_normalised\nbottle/train/bottle_0145_normalised\nbottle/train/bottle_0093_normalised\nbottle/train/bottle_0003_normalised\nbottle/train/bottle_0146_normalised\nbottle/train/bottle_0117_normalised\nbottle/train/bottle_0179_normalised\nbottle/train/bottle_0317_normalised\nbottle/train/bottle_0061_normalised\nbottle/train/bottle_0185_normalised\nbottle/train/bottle_0075_normalised\nbottle/train/bottle_0308_normalised\nbottle/train/bottle_0083_normalised\nbottle/train/bottle_0175_normalised\nbottle/train/bottle_0129_normalised\nbottle/train/bottle_0205_normalised\nbottle/train/bottle_0220_normalised\nbottle/train/bottle_0196_normalised\nbottle/train/bottle_0276_normalised\nbottle/train/bottle_0188_normalised\nbottle/train/bottle_0049_normalised\nbottle/train/bottle_0021_normalised\nbottle/train/bottle_0130_normalised\nbottle/train/bottle_0202_normalised\nbottle/train/bottle_0315_normalised\nbottle/train/bottle_0132_normalised\nbottle/train/bottle_0050_normalised\nbottle/train/bottle_0198_normalised\nbottle/train/bottle_0081_normalised\nbottle/train/bottle_0156_normalised\nbottle/train/bottle_0221_normalised\nbottle/train/bottle_0190_normalised\nbottle/train/bottle_0089_normalised\nbottle/train/bottle_0269_normalised\nbottle/train/bottle_0334_normalised\nbottle/train/bottle_0114_normalised\nbottle/train/bottle_0106_normalised\nbottle/train/bottle_0158_normalised\nbottle/train/bottle_0254_normalised\nbottle/train/bottle_0307_normalised\nbottle/train/bottle_0160_normalised\nbottle/train/bottle_0161_normalised\nbottle/train/bottle_0030_normalised\nbottle/train/bottle_0138_normalised\nbottle/train/bottle_0064_normalised\nbottle/train/bottle_0224_normalised\nbottle/train/bottle_0038_normalised\nbottle/train/bottle_0211_normalised\nbottle/train/bottle_0172_normalised\nbottle/train/bottle_0047_normalised\nbottle/train/bottle_0321_normalised\nbottle/train/bottle_0263_normalised\nbottle/train/bottle_0017_normalised\nbottle/train/bottle_0311_normalised\nbottle/train/bottle_0197_normalised\nbottle/train/bottle_0094_normalised\nbottle/train/bottle_0039_normalised\nbottle/train/bottle_0022_normalised\nbottle/train/bottle_0019_normalised\nbottle/train/bottle_0150_normalised\nbottle/train/bottle_0109_normalised\nbottle/train/bottle_0157_normalised\nbottle/train/bottle_0099_normalised\nbottle/train/bottle_0141_normalised\nbottle/train/bottle_0204_normalised\nbottle/train/bottle_0192_normalised\nbottle/train/bottle_0029_normalised\nbottle/train/bottle_0128_normalised\nbottle/train/bottle_0074_normalised\nbottle/train/bottle_0230_normalised\nbottle/train/bottle_0045_normalised\nbottle/train/bottle_0327_normalised\nbottle/train/bottle_0329_normalised\nbottle/train/bottle_0319_normalised\nbottle/train/bottle_0286_normalised\nbottle/train/bottle_0147_normalised\nbottle/train/bottle_0054_normalised\nbottle/train/bottle_0318_normalised\nbottle/train/bottle_0249_normalised\nbottle/train/bottle_0008_normalised\nbottle/train/bottle_0137_normalised\nbottle/train/bottle_0037_normalised\nbottle/train/bottle_0233_normalised\nbottle/train/bottle_0259_normalised\nbottle/train/bottle_0324_normalised\nbottle/train/bottle_0053_normalised\nbottle/train/bottle_0016_normalised\nbottle/train/bottle_0078_normalised\nbottle/train/bottle_0215_normalised\nbottle/train/bottle_0010_normalised\nbottle/train/bottle_0119_normalised\nbottle/train/bottle_0297_normalised\nbottle/train/bottle_0162_normalised\nbottle/train/bottle_0184_normalised\nbottle/train/bottle_0208_normalised\nbottle/train/bottle_0006_normalised\nbottle/train/bottle_0063_normalised\nbottle/train/bottle_0195_normalised\nbottle/train/bottle_0290_normalised\nbottle/train/bottle_0082_normalised\nbottle/train/bottle_0181_normalised\nbottle/train/bottle_0056_normalised\nbottle/train/bottle_0048_normalised\nbottle/train/bottle_0275_normalised\nbottle/train/bottle_0313_normalised\nbottle/train/bottle_0055_normalised\nbottle/train/bottle_0040_normalised\nbottle/train/bottle_0240_normalised\nbottle/train/bottle_0191_normalised\nbottle/train/bottle_0300_normalised\nbottle/train/bottle_0316_normalised\nbottle/train/bottle_0217_normalised\nbottle/train/bottle_0328_normalised\nbottle/train/bottle_0090_normalised\nbottle/train/bottle_0073_normalised\nbottle/train/bottle_0218_normalised\nbottle/train/bottle_0159_normalised\nbottle/train/bottle_0154_normalised\nbottle/train/bottle_0247_normalised\nbottle/train/bottle_0140_normalised\nbottle/train/bottle_0174_normalised\nbottle/train/bottle_0238_normalised\nbottle/train/bottle_0216_normalised\nbottle/train/bottle_0234_normalised\nbottle/train/bottle_0144_normalised\nbottle/train/bottle_0282_normalised\nbottle/train/bottle_0002_normalised\nbottle/train/bottle_0086_normalised\nbottle/train/bottle_0066_normalised\nbottle/train/bottle_0326_normalised\nbottle/train/bottle_0023_normalised\nbottle/train/bottle_0153_normalised\nbottle/train/bottle_0041_normalised\nbottle/train/bottle_0165_normalised\nbottle/train/bottle_0135_normalised\nbottle/train/bottle_0052_normalised\nbottle/train/bottle_0076_normalised\nbottle/train/bottle_0239_normalised\nbottle/train/bottle_0312_normalised\nbottle/train/bottle_0122_normalised\nbottle/train/bottle_0292_normalised\nbottle/train/bottle_0227_normalised\nbottle/train/bottle_0091_normalised\nbottle/train/bottle_0325_normalised\nbottle/train/bottle_0124_normalised\nbottle/train/bottle_0101_normalised\nbottle/train/bottle_0125_normalised\nbottle/train/bottle_0034_normalised\nbottle/train/bottle_0136_normalised\nbottle/train/bottle_0164_normalised\nbottle/train/bottle_0171_normalised\nbottle/train/bottle_0031_normalised\nbottle/train/bottle_0298_normalised\nbottle/train/bottle_0067_normalised\nbottle/train/bottle_0025_normalised\nbottle/train/bottle_0098_normalised\nbottle/train/bottle_0193_normalised\nbottle/train/bottle_0059_normalised\nbottle/train/bottle_0272_normalised\nbottle/train/bottle_0232_normalised\nbottle/train/bottle_0279_normalised\nbottle/train/bottle_0126_normalised\nbottle/train/bottle_0102_normalised\nbottle/train/bottle_0294_normalised\nbottle/train/bottle_0142_normalised\nbottle/train/bottle_0223_normalised\nbottle/train/bottle_0289_normalised\nbottle/train/bottle_0280_normalised\nbottle/train/bottle_0104_normalised\nbottle/train/bottle_0186_normalised\nbottle/train/bottle_0255_normalised\nbottle/train/bottle_0306_normalised\nbottle/train/bottle_0020_normalised\nbottle/train/bottle_0036_normalised\nbottle/train/bottle_0187_normalised\nbottle/train/bottle_0080_normalised\nbottle/test/bottle_0409_normalised\nbottle/test/bottle_0370_normalised\nbottle/test/bottle_0417_normalised\nbottle/test/bottle_0419_normalised\nbottle/test/bottle_0429_normalised\nbottle/test/bottle_0416_normalised\nbottle/test/bottle_0403_normalised\nbottle/test/bottle_0354_normalised\nbottle/test/bottle_0352_normalised\nbottle/test/bottle_0382_normalised\nbottle/test/bottle_0345_normalised\nbottle/test/bottle_0394_normalised\nbottle/test/bottle_0427_normalised\nbottle/test/bottle_0435_normalised\nbottle/test/bottle_0405_normalised\nbottle/test/bottle_0393_normalised\nbottle/test/bottle_0366_normalised\nbottle/test/bottle_0359_normalised\nbottle/test/bottle_0428_normalised\nbottle/test/bottle_0399_normalised\nbottle/test/bottle_0385_normalised\nbottle/test/bottle_0411_normalised\nbottle/test/bottle_0367_normalised\nbottle/test/bottle_0364_normalised\nbottle/test/bottle_0406_normalised\nbottle/test/bottle_0357_normalised\nbottle/test/bottle_0356_normalised\nbottle/test/bottle_0338_normalised\nbottle/test/bottle_0358_normalised\nbottle/test/bottle_0362_normalised\nbottle/test/bottle_0424_normalised\nbottle/test/bottle_0368_normalised\nbottle/test/bottle_0422_normalised\nbottle/test/bottle_0426_normalised\nbottle/test/bottle_0342_normalised\nbottle/test/bottle_0408_normalised\nbottle/test/bottle_0337_normalised\nbottle/test/bottle_0412_normalised\nbottle/test/bottle_0355_normalised\nbottle/test/bottle_0353_normalised\nbottle/test/bottle_0363_normalised\nbottle/test/bottle_0400_normalised\nbottle/test/bottle_0420_normalised\nbottle/test/bottle_0395_normalised\nbottle/test/bottle_0388_normalised\nbottle/test/bottle_0351_normalised\nbottle/test/bottle_0340_normalised\nbottle/test/bottle_0365_normalised\nbottle/test/bottle_0361_normalised\nbottle/test/bottle_0407_normalised\nbottle/test/bottle_0423_normalised\nbottle/test/bottle_0344_normalised\nbottle/test/bottle_0346_normalised\nbottle/test/bottle_0383_normalised\nbottle/test/bottle_0425_normalised\nbottle/test/bottle_0339_normalised\nbottle/test/bottle_0386_normalised\nbottle/test/bottle_0415_normalised\nbottle/test/bottle_0433_normalised\nbottle/test/bottle_0392_normalised\nbottle/test/bottle_0432_normalised\nbottle/test/bottle_0414_normalised\nbottle/test/bottle_0397_normalised\nbottle/test/bottle_0396_normalised\nbottle/test/bottle_0373_normalised\nbottle/test/bottle_0343_normalised\nbottle/test/bottle_0379_normalised\nbottle/test/bottle_0350_normalised\nbottle/test/bottle_0401_normalised\nbottle/test/bottle_0336_normalised\nbottle/test/bottle_0390_normalised\nbottle/test/bottle_0347_normalised\nbottle/test/bottle_0374_normalised\nbottle/test/bottle_0349_normalised\nbottle/test/bottle_0387_normalised\nbottle/test/bottle_0421_normalised\nbottle/test/bottle_0380_normalised\nbottle/test/bottle_0398_normalised\nbottle/test/bottle_0375_normalised\nbottle/test/bottle_0434_normalised\nbottle/test/bottle_0391_normalised\nbottle/test/bottle_0372_normalised\nbottle/test/bottle_0384_normalised\nbottle/test/bottle_0341_normalised\nbottle/test/bottle_0404_normalised\nbottle/test/bottle_0389_normalised\nbottle/test/bottle_0378_normalised\nbottle/test/bottle_0376_normalised\nbottle/test/bottle_0369_normalised\nbottle/test/bottle_0371_normalised\nbottle/test/bottle_0431_normalised\nbottle/test/bottle_0410_normalised\nbottle/test/bottle_0360_normalised\nbottle/test/bottle_0418_normalised\nbottle/test/bottle_0377_normalised\nbottle/test/bottle_0381_normalised\nbottle/test/bottle_0413_normalised\nbottle/test/bottle_0402_normalised\nbottle/test/bottle_0348_normalised\nbottle/test/bottle_0430_normalised\ntv_stand/train/tv_stand_0004_normalised\ntv_stand/train/tv_stand_0041_normalised\ntv_stand/train/tv_stand_0038_normalised\ntv_stand/train/tv_stand_0243_normalised\ntv_stand/train/tv_stand_0034_normalised\ntv_stand/train/tv_stand_0024_normalised\ntv_stand/train/tv_stand_0126_normalised\ntv_stand/train/tv_stand_0199_normalised\ntv_stand/train/tv_stand_0085_normalised\ntv_stand/train/tv_stand_0147_normalised\ntv_stand/train/tv_stand_0229_normalised\ntv_stand/train/tv_stand_0037_normalised\ntv_stand/train/tv_stand_0011_normalised\ntv_stand/train/tv_stand_0091_normalised\ntv_stand/train/tv_stand_0042_normalised\ntv_stand/train/tv_stand_0093_normalised\ntv_stand/train/tv_stand_0063_normalised\ntv_stand/train/tv_stand_0036_normalised\ntv_stand/train/tv_stand_0226_normalised\ntv_stand/train/tv_stand_0010_normalised\ntv_stand/train/tv_stand_0066_normalised\ntv_stand/train/tv_stand_0206_normalised\ntv_stand/train/tv_stand_0221_normalised\ntv_stand/train/tv_stand_0022_normalised\ntv_stand/train/tv_stand_0201_normalised\ntv_stand/train/tv_stand_0258_normalised\ntv_stand/train/tv_stand_0084_normalised\ntv_stand/train/tv_stand_0152_normalised\ntv_stand/train/tv_stand_0111_normalised\ntv_stand/train/tv_stand_0160_normalised\ntv_stand/train/tv_stand_0252_normalised\ntv_stand/train/tv_stand_0067_normalised\ntv_stand/train/tv_stand_0033_normalised\ntv_stand/train/tv_stand_0029_normalised\ntv_stand/train/tv_stand_0154_normalised\ntv_stand/train/tv_stand_0060_normalised\ntv_stand/train/tv_stand_0241_normalised\ntv_stand/train/tv_stand_0122_normalised\ntv_stand/train/tv_stand_0035_normalised\ntv_stand/train/tv_stand_0108_normalised\ntv_stand/train/tv_stand_0074_normalised\ntv_stand/train/tv_stand_0103_normalised\ntv_stand/train/tv_stand_0005_normalised\ntv_stand/train/tv_stand_0148_normalised\ntv_stand/train/tv_stand_0064_normalised\ntv_stand/train/tv_stand_0247_normalised\ntv_stand/train/tv_stand_0145_normalised\ntv_stand/train/tv_stand_0259_normalised\ntv_stand/train/tv_stand_0039_normalised\ntv_stand/train/tv_stand_0129_normalised\ntv_stand/train/tv_stand_0032_normalised\ntv_stand/train/tv_stand_0150_normalised\ntv_stand/train/tv_stand_0204_normalised\ntv_stand/train/tv_stand_0052_normalised\ntv_stand/train/tv_stand_0089_normalised\ntv_stand/train/tv_stand_0244_normalised\ntv_stand/train/tv_stand_0055_normalised\ntv_stand/train/tv_stand_0139_normalised\ntv_stand/train/tv_stand_0138_normalised\ntv_stand/train/tv_stand_0128_normalised\ntv_stand/train/tv_stand_0133_normalised\ntv_stand/train/tv_stand_0257_normalised\ntv_stand/train/tv_stand_0070_normalised\ntv_stand/train/tv_stand_0162_normalised\ntv_stand/train/tv_stand_0188_normalised\ntv_stand/train/tv_stand_0230_normalised\ntv_stand/train/tv_stand_0105_normalised\ntv_stand/train/tv_stand_0179_normalised\ntv_stand/train/tv_stand_0249_normalised\ntv_stand/train/tv_stand_0140_normalised\ntv_stand/train/tv_stand_0009_normalised\ntv_stand/train/tv_stand_0015_normalised\ntv_stand/train/tv_stand_0116_normalised\ntv_stand/train/tv_stand_0196_normalised\ntv_stand/train/tv_stand_0159_normalised\ntv_stand/train/tv_stand_0131_normalised\ntv_stand/train/tv_stand_0118_normalised\ntv_stand/train/tv_stand_0180_normalised\ntv_stand/train/tv_stand_0231_normalised\ntv_stand/train/tv_stand_0027_normalised\ntv_stand/train/tv_stand_0068_normalised\ntv_stand/train/tv_stand_0113_normalised\ntv_stand/train/tv_stand_0242_normalised\ntv_stand/train/tv_stand_0237_normalised\ntv_stand/train/tv_stand_0053_normalised\ntv_stand/train/tv_stand_0031_normalised\ntv_stand/train/tv_stand_0001_normalised\ntv_stand/train/tv_stand_0267_normalised\ntv_stand/train/tv_stand_0182_normalised\ntv_stand/train/tv_stand_0219_normalised\ntv_stand/train/tv_stand_0026_normalised\ntv_stand/train/tv_stand_0143_normalised\ntv_stand/train/tv_stand_0209_normalised\ntv_stand/train/tv_stand_0216_normalised\ntv_stand/train/tv_stand_0239_normalised\ntv_stand/train/tv_stand_0245_normalised\ntv_stand/train/tv_stand_0260_normalised\ntv_stand/train/tv_stand_0210_normalised\ntv_stand/train/tv_stand_0048_normalised\ntv_stand/train/tv_stand_0059_normalised\ntv_stand/train/tv_stand_0264_normalised\ntv_stand/train/tv_stand_0213_normalised\ntv_stand/train/tv_stand_0170_normalised\ntv_stand/train/tv_stand_0106_normalised\ntv_stand/train/tv_stand_0175_normalised\ntv_stand/train/tv_stand_0082_normalised\ntv_stand/train/tv_stand_0049_normalised\ntv_stand/train/tv_stand_0194_normalised\ntv_stand/train/tv_stand_0200_normalised\ntv_stand/train/tv_stand_0023_normalised\ntv_stand/train/tv_stand_0110_normalised\ntv_stand/train/tv_stand_0078_normalised\ntv_stand/train/tv_stand_0090_normalised\ntv_stand/train/tv_stand_0232_normalised\ntv_stand/train/tv_stand_0030_normalised\ntv_stand/train/tv_stand_0142_normalised\ntv_stand/train/tv_stand_0255_normalised\ntv_stand/train/tv_stand_0212_normalised\ntv_stand/train/tv_stand_0061_normalised\ntv_stand/train/tv_stand_0007_normalised\ntv_stand/train/tv_stand_0050_normalised\ntv_stand/train/tv_stand_0130_normalised\ntv_stand/train/tv_stand_0065_normalised\ntv_stand/train/tv_stand_0207_normalised\ntv_stand/train/tv_stand_0202_normalised\ntv_stand/train/tv_stand_0087_normalised\ntv_stand/train/tv_stand_0197_normalised\ntv_stand/train/tv_stand_0043_normalised\ntv_stand/train/tv_stand_0236_normalised\ntv_stand/train/tv_stand_0171_normalised\ntv_stand/train/tv_stand_0102_normalised\ntv_stand/train/tv_stand_0114_normalised\ntv_stand/train/tv_stand_0190_normalised\ntv_stand/train/tv_stand_0261_normalised\ntv_stand/train/tv_stand_0168_normalised\ntv_stand/train/tv_stand_0228_normalised\ntv_stand/train/tv_stand_0079_normalised\ntv_stand/train/tv_stand_0136_normalised\ntv_stand/train/tv_stand_0018_normalised\ntv_stand/train/tv_stand_0176_normalised\ntv_stand/train/tv_stand_0156_normalised\ntv_stand/train/tv_stand_0020_normalised\ntv_stand/train/tv_stand_0092_normalised\ntv_stand/train/tv_stand_0189_normalised\ntv_stand/train/tv_stand_0246_normalised\ntv_stand/train/tv_stand_0017_normalised\ntv_stand/train/tv_stand_0262_normalised\ntv_stand/train/tv_stand_0137_normalised\ntv_stand/train/tv_stand_0238_normalised\ntv_stand/train/tv_stand_0161_normalised\ntv_stand/train/tv_stand_0123_normalised\ntv_stand/train/tv_stand_0251_normalised\ntv_stand/train/tv_stand_0191_normalised\ntv_stand/train/tv_stand_0071_normalised\ntv_stand/train/tv_stand_0253_normalised\ntv_stand/train/tv_stand_0040_normalised\ntv_stand/train/tv_stand_0134_normalised\ntv_stand/train/tv_stand_0235_normalised\ntv_stand/train/tv_stand_0220_normalised\ntv_stand/train/tv_stand_0028_normalised\ntv_stand/train/tv_stand_0127_normalised\ntv_stand/train/tv_stand_0164_normalised\ntv_stand/train/tv_stand_0240_normalised\ntv_stand/train/tv_stand_0178_normalised\ntv_stand/train/tv_stand_0121_normalised\ntv_stand/train/tv_stand_0076_normalised\ntv_stand/train/tv_stand_0119_normalised\ntv_stand/train/tv_stand_0124_normalised\ntv_stand/train/tv_stand_0144_normalised\ntv_stand/train/tv_stand_0073_normalised\ntv_stand/train/tv_stand_0167_normalised\ntv_stand/train/tv_stand_0157_normalised\ntv_stand/train/tv_stand_0205_normalised\ntv_stand/train/tv_stand_0222_normalised\ntv_stand/train/tv_stand_0198_normalised\ntv_stand/train/tv_stand_0115_normalised\ntv_stand/train/tv_stand_0155_normalised\ntv_stand/train/tv_stand_0225_normalised\ntv_stand/train/tv_stand_0094_normalised\ntv_stand/train/tv_stand_0072_normalised\ntv_stand/train/tv_stand_0254_normalised\ntv_stand/train/tv_stand_0265_normalised\ntv_stand/train/tv_stand_0256_normalised\ntv_stand/train/tv_stand_0016_normalised\ntv_stand/train/tv_stand_0069_normalised\ntv_stand/train/tv_stand_0051_normalised\ntv_stand/train/tv_stand_0013_normalised\ntv_stand/train/tv_stand_0096_normalised\ntv_stand/train/tv_stand_0135_normalised\ntv_stand/train/tv_stand_0203_normalised\ntv_stand/train/tv_stand_0169_normalised\ntv_stand/train/tv_stand_0233_normalised\ntv_stand/train/tv_stand_0104_normalised\ntv_stand/train/tv_stand_0006_normalised\ntv_stand/train/tv_stand_0248_normalised\ntv_stand/train/tv_stand_0056_normalised\ntv_stand/train/tv_stand_0218_normalised\ntv_stand/train/tv_stand_0003_normalised\ntv_stand/train/tv_stand_0014_normalised\ntv_stand/train/tv_stand_0158_normalised\ntv_stand/train/tv_stand_0083_normalised\ntv_stand/train/tv_stand_0058_normalised\ntv_stand/train/tv_stand_0095_normalised\ntv_stand/train/tv_stand_0062_normalised\ntv_stand/train/tv_stand_0099_normalised\ntv_stand/train/tv_stand_0012_normalised\ntv_stand/train/tv_stand_0263_normalised\ntv_stand/train/tv_stand_0174_normalised\ntv_stand/train/tv_stand_0166_normalised\ntv_stand/train/tv_stand_0223_normalised\ntv_stand/train/tv_stand_0224_normalised\ntv_stand/train/tv_stand_0192_normalised\ntv_stand/train/tv_stand_0109_normalised\ntv_stand/train/tv_stand_0193_normalised\ntv_stand/train/tv_stand_0184_normalised\ntv_stand/train/tv_stand_0044_normalised\ntv_stand/train/tv_stand_0021_normalised\ntv_stand/train/tv_stand_0151_normalised\ntv_stand/train/tv_stand_0195_normalised\ntv_stand/train/tv_stand_0165_normalised\ntv_stand/train/tv_stand_0107_normalised\ntv_stand/train/tv_stand_0057_normalised\ntv_stand/train/tv_stand_0177_normalised\ntv_stand/train/tv_stand_0217_normalised\ntv_stand/train/tv_stand_0208_normalised\ntv_stand/train/tv_stand_0046_normalised\ntv_stand/train/tv_stand_0101_normalised\ntv_stand/train/tv_stand_0153_normalised\ntv_stand/train/tv_stand_0081_normalised\ntv_stand/train/tv_stand_0146_normalised\ntv_stand/train/tv_stand_0149_normalised\ntv_stand/train/tv_stand_0132_normalised\ntv_stand/train/tv_stand_0214_normalised\ntv_stand/train/tv_stand_0266_normalised\ntv_stand/train/tv_stand_0097_normalised\ntv_stand/train/tv_stand_0112_normalised\ntv_stand/train/tv_stand_0002_normalised\ntv_stand/train/tv_stand_0120_normalised\ntv_stand/train/tv_stand_0054_normalised\ntv_stand/train/tv_stand_0047_normalised\ntv_stand/train/tv_stand_0125_normalised\ntv_stand/train/tv_stand_0187_normalised\ntv_stand/train/tv_stand_0185_normalised\ntv_stand/train/tv_stand_0025_normalised\ntv_stand/train/tv_stand_0186_normalised\ntv_stand/train/tv_stand_0098_normalised\ntv_stand/train/tv_stand_0172_normalised\ntv_stand/train/tv_stand_0234_normalised\ntv_stand/train/tv_stand_0019_normalised\ntv_stand/train/tv_stand_0075_normalised\ntv_stand/train/tv_stand_0045_normalised\ntv_stand/train/tv_stand_0141_normalised\ntv_stand/train/tv_stand_0183_normalised\ntv_stand/train/tv_stand_0080_normalised\ntv_stand/train/tv_stand_0117_normalised\ntv_stand/train/tv_stand_0211_normalised\ntv_stand/train/tv_stand_0215_normalised\ntv_stand/train/tv_stand_0008_normalised\ntv_stand/train/tv_stand_0100_normalised\ntv_stand/train/tv_stand_0250_normalised\ntv_stand/train/tv_stand_0181_normalised\ntv_stand/train/tv_stand_0086_normalised\ntv_stand/train/tv_stand_0173_normalised\ntv_stand/train/tv_stand_0077_normalised\ntv_stand/train/tv_stand_0163_normalised\ntv_stand/train/tv_stand_0227_normalised\ntv_stand/train/tv_stand_0088_normalised\ntv_stand/test/tv_stand_0319_normalised\ntv_stand/test/tv_stand_0356_normalised\ntv_stand/test/tv_stand_0367_normalised\ntv_stand/test/tv_stand_0332_normalised\ntv_stand/test/tv_stand_0365_normalised\ntv_stand/test/tv_stand_0311_normalised\ntv_stand/test/tv_stand_0285_normalised\ntv_stand/test/tv_stand_0361_normalised\ntv_stand/test/tv_stand_0289_normalised\ntv_stand/test/tv_stand_0271_normalised\ntv_stand/test/tv_stand_0312_normalised\ntv_stand/test/tv_stand_0278_normalised\ntv_stand/test/tv_stand_0355_normalised\ntv_stand/test/tv_stand_0317_normalised\ntv_stand/test/tv_stand_0338_normalised\ntv_stand/test/tv_stand_0287_normalised\ntv_stand/test/tv_stand_0321_normalised\ntv_stand/test/tv_stand_0346_normalised\ntv_stand/test/tv_stand_0349_normalised\ntv_stand/test/tv_stand_0337_normalised\ntv_stand/test/tv_stand_0300_normalised\ntv_stand/test/tv_stand_0353_normalised\ntv_stand/test/tv_stand_0327_normalised\ntv_stand/test/tv_stand_0292_normalised\ntv_stand/test/tv_stand_0291_normalised\ntv_stand/test/tv_stand_0324_normalised\ntv_stand/test/tv_stand_0308_normalised\ntv_stand/test/tv_stand_0340_normalised\ntv_stand/test/tv_stand_0273_normalised\ntv_stand/test/tv_stand_0315_normalised\ntv_stand/test/tv_stand_0279_normalised\ntv_stand/test/tv_stand_0360_normalised\ntv_stand/test/tv_stand_0296_normalised\ntv_stand/test/tv_stand_0283_normalised\ntv_stand/test/tv_stand_0364_normalised\ntv_stand/test/tv_stand_0299_normalised\ntv_stand/test/tv_stand_0334_normalised\ntv_stand/test/tv_stand_0347_normalised\ntv_stand/test/tv_stand_0363_normalised\ntv_stand/test/tv_stand_0366_normalised\ntv_stand/test/tv_stand_0352_normalised\ntv_stand/test/tv_stand_0343_normalised\ntv_stand/test/tv_stand_0294_normalised\ntv_stand/test/tv_stand_0303_normalised\ntv_stand/test/tv_stand_0330_normalised\ntv_stand/test/tv_stand_0286_normalised\ntv_stand/test/tv_stand_0357_normalised\ntv_stand/test/tv_stand_0301_normalised\ntv_stand/test/tv_stand_0351_normalised\ntv_stand/test/tv_stand_0276_normalised\ntv_stand/test/tv_stand_0280_normalised\ntv_stand/test/tv_stand_0302_normalised\ntv_stand/test/tv_stand_0322_normalised\ntv_stand/test/tv_stand_0341_normalised\ntv_stand/test/tv_stand_0306_normalised\ntv_stand/test/tv_stand_0270_normalised\ntv_stand/test/tv_stand_0359_normalised\ntv_stand/test/tv_stand_0333_normalised\ntv_stand/test/tv_stand_0342_normalised\ntv_stand/test/tv_stand_0336_normalised\ntv_stand/test/tv_stand_0358_normalised\ntv_stand/test/tv_stand_0295_normalised\ntv_stand/test/tv_stand_0326_normalised\ntv_stand/test/tv_stand_0268_normalised\ntv_stand/test/tv_stand_0329_normalised\ntv_stand/test/tv_stand_0284_normalised\ntv_stand/test/tv_stand_0335_normalised\ntv_stand/test/tv_stand_0328_normalised\ntv_stand/test/tv_stand_0277_normalised\ntv_stand/test/tv_stand_0309_normalised\ntv_stand/test/tv_stand_0293_normalised\ntv_stand/test/tv_stand_0275_normalised\ntv_stand/test/tv_stand_0290_normalised\ntv_stand/test/tv_stand_0344_normalised\ntv_stand/test/tv_stand_0331_normalised\ntv_stand/test/tv_stand_0350_normalised\ntv_stand/test/tv_stand_0320_normalised\ntv_stand/test/tv_stand_0310_normalised\ntv_stand/test/tv_stand_0282_normalised\ntv_stand/test/tv_stand_0304_normalised\ntv_stand/test/tv_stand_0325_normalised\ntv_stand/test/tv_stand_0348_normalised\ntv_stand/test/tv_stand_0269_normalised\ntv_stand/test/tv_stand_0314_normalised\ntv_stand/test/tv_stand_0272_normalised\ntv_stand/test/tv_stand_0339_normalised\ntv_stand/test/tv_stand_0345_normalised\ntv_stand/test/tv_stand_0281_normalised\ntv_stand/test/tv_stand_0316_normalised\ntv_stand/test/tv_stand_0298_normalised\ntv_stand/test/tv_stand_0297_normalised\ntv_stand/test/tv_stand_0362_normalised\ntv_stand/test/tv_stand_0318_normalised\ntv_stand/test/tv_stand_0313_normalised\ntv_stand/test/tv_stand_0354_normalised\ntv_stand/test/tv_stand_0288_normalised\ntv_stand/test/tv_stand_0274_normalised\ntv_stand/test/tv_stand_0305_normalised\ntv_stand/test/tv_stand_0323_normalised\ntv_stand/test/tv_stand_0307_normalised\ntable/train/table_0066_normalised\ntable/train/table_0153_normalised\ntable/train/table_0025_normalised\ntable/train/table_0036_normalised\ntable/train/table_0352_normalised\ntable/train/table_0108_normalised\ntable/train/table_0079_normalised\ntable/train/table_0237_normalised\ntable/train/table_0317_normalised\ntable/train/table_0004_normalised\ntable/train/table_0194_normalised\ntable/train/table_0034_normalised\ntable/train/table_0249_normalised\ntable/train/table_0076_normalised\ntable/train/table_0099_normalised\ntable/train/table_0125_normalised\ntable/train/table_0152_normalised\ntable/train/table_0176_normalised\ntable/train/table_0113_normalised\ntable/train/table_0013_normalised\ntable/train/table_0014_normalised\ntable/train/table_0118_normalised\ntable/train/table_0286_normalised\ntable/train/table_0244_normalised\ntable/train/table_0021_normalised\ntable/train/table_0010_normalised\ntable/train/table_0180_normalised\ntable/train/table_0229_normalised\ntable/train/table_0327_normalised\ntable/train/table_0151_normalised\ntable/train/table_0082_normalised\ntable/train/table_0379_normalised\ntable/train/table_0220_normalised\ntable/train/table_0306_normalised\ntable/train/table_0044_normalised\ntable/train/table_0215_normalised\ntable/train/table_0030_normalised\ntable/train/table_0336_normalised\ntable/train/table_0052_normalised\ntable/train/table_0050_normalised\ntable/train/table_0310_normalised\ntable/train/table_0123_normalised\ntable/train/table_0390_normalised\ntable/train/table_0294_normalised\ntable/train/table_0247_normalised\ntable/train/table_0209_normalised\ntable/train/table_0345_normalised\ntable/train/table_0100_normalised\ntable/train/table_0109_normalised\ntable/train/table_0027_normalised\ntable/train/table_0155_normalised\ntable/train/table_0264_normalised\ntable/train/table_0245_normalised\ntable/train/table_0190_normalised\ntable/train/table_0283_normalised\ntable/train/table_0383_normalised\ntable/train/table_0232_normalised\ntable/train/table_0046_normalised\ntable/train/table_0159_normalised\ntable/train/table_0362_normalised\ntable/train/table_0234_normalised\ntable/train/table_0095_normalised\ntable/train/table_0150_normalised\ntable/train/table_0199_normalised\ntable/train/table_0041_normalised\ntable/train/table_0083_normalised\ntable/train/table_0131_normalised\ntable/train/table_0260_normalised\ntable/train/table_0226_normalised\ntable/train/table_0331_normalised\ntable/train/table_0035_normalised\ntable/train/table_0056_normalised\ntable/train/table_0334_normalised\ntable/train/table_0037_normalised\ntable/train/table_0333_normalised\ntable/train/table_0356_normalised\ntable/train/table_0387_normalised\ntable/train/table_0110_normalised\ntable/train/table_0015_normalised\ntable/train/table_0078_normalised\ntable/train/table_0179_normalised\ntable/train/table_0139_normalised\ntable/train/table_0224_normalised\ntable/train/table_0240_normalised\ntable/train/table_0307_normalised\ntable/train/table_0341_normalised\ntable/train/table_0028_normalised\ntable/train/table_0295_normalised\ntable/train/table_0376_normalised\ntable/train/table_0068_normalised\ntable/train/table_0329_normalised\ntable/train/table_0289_normalised\ntable/train/table_0111_normalised\ntable/train/table_0385_normalised\ntable/train/table_0342_normalised\ntable/train/table_0162_normalised\ntable/train/table_0276_normalised\ntable/train/table_0177_normalised\ntable/train/table_0026_normalised\ntable/train/table_0322_normalised\ntable/train/table_0257_normalised\ntable/train/table_0060_normalised\ntable/train/table_0184_normalised\ntable/train/table_0114_normalised\ntable/train/table_0018_normalised\ntable/train/table_0236_normalised\ntable/train/table_0091_normalised\ntable/train/table_0282_normalised\ntable/train/table_0221_normalised\ntable/train/table_0338_normalised\ntable/train/table_0122_normalised\ntable/train/table_0092_normalised\ntable/train/table_0389_normalised\ntable/train/table_0364_normalised\ntable/train/table_0381_normalised\ntable/train/table_0116_normalised\ntable/train/table_0169_normalised\ntable/train/table_0273_normalised\ntable/train/table_0168_normalised\ntable/train/table_0378_normalised\ntable/train/table_0085_normalised\ntable/train/table_0185_normalised\ntable/train/table_0272_normalised\ntable/train/table_0073_normalised\ntable/train/table_0243_normalised\ntable/train/table_0228_normalised\ntable/train/table_0373_normalised\ntable/train/table_0261_normalised\ntable/train/table_0370_normalised\ntable/train/table_0170_normalised\ntable/train/table_0024_normalised\ntable/train/table_0368_normalised\ntable/train/table_0391_normalised\ntable/train/table_0204_normalised\ntable/train/table_0182_normalised\ntable/train/table_0011_normalised\ntable/train/table_0192_normalised\ntable/train/table_0112_normalised\ntable/train/table_0313_normalised\ntable/train/table_0163_normalised\ntable/train/table_0344_normalised\ntable/train/table_0297_normalised\ntable/train/table_0369_normalised\ntable/train/table_0157_normalised\ntable/train/table_0323_normalised\ntable/train/table_0262_normalised\ntable/train/table_0256_normalised\ntable/train/table_0315_normalised\ntable/train/table_0360_normalised\ntable/train/table_0254_normalised\ntable/train/table_0102_normalised\ntable/train/table_0316_normalised\ntable/train/table_0203_normalised\ntable/train/table_0219_normalised\ntable/train/table_0175_normalised\ntable/train/table_0324_normalised\ntable/train/table_0202_normalised\ntable/train/table_0055_normalised\ntable/train/table_0218_normalised\ntable/train/table_0259_normalised\ntable/train/table_0075_normalised\ntable/train/table_0339_normalised\ntable/train/table_0002_normalised\ntable/train/table_0127_normalised\ntable/train/table_0107_normalised\ntable/train/table_0140_normalised\ntable/train/table_0012_normalised\ntable/train/table_0216_normalised\ntable/train/table_0119_normalised\ntable/train/table_0263_normalised\ntable/train/table_0097_normalised\ntable/train/table_0222_normalised\ntable/train/table_0070_normalised\ntable/train/table_0019_normalised\ntable/train/table_0089_normalised\ntable/train/table_0359_normalised\ntable/train/table_0049_normalised\ntable/train/table_0255_normalised\ntable/train/table_0128_normalised\ntable/train/table_0217_normalised\ntable/train/table_0388_normalised\ntable/train/table_0296_normalised\ntable/train/table_0250_normalised\ntable/train/table_0009_normalised\ntable/train/table_0207_normalised\ntable/train/table_0214_normalised\ntable/train/table_0136_normalised\ntable/train/table_0308_normalised\ntable/train/table_0382_normalised\ntable/train/table_0268_normalised\ntable/train/table_0074_normalised\ntable/train/table_0016_normalised\ntable/train/table_0129_normalised\ntable/train/table_0158_normalised\ntable/train/table_0267_normalised\ntable/train/table_0300_normalised\ntable/train/table_0156_normalised\ntable/train/table_0281_normalised\ntable/train/table_0301_normalised\ntable/train/table_0183_normalised\ntable/train/table_0366_normalised\ntable/train/table_0134_normalised\ntable/train/table_0374_normalised\ntable/train/table_0290_normalised\ntable/train/table_0274_normalised\ntable/train/table_0246_normalised\ntable/train/table_0059_normalised\ntable/train/table_0380_normalised\ntable/train/table_0251_normalised\ntable/train/table_0332_normalised\ntable/train/table_0293_normalised\ntable/train/table_0130_normalised\ntable/train/table_0042_normalised\ntable/train/table_0285_normalised\ntable/train/table_0354_normalised\ntable/train/table_0053_normalised\ntable/train/table_0233_normalised\ntable/train/table_0124_normalised\ntable/train/table_0343_normalised\ntable/train/table_0069_normalised\ntable/train/table_0080_normalised\ntable/train/table_0271_normalised\ntable/train/table_0086_normalised\ntable/train/table_0349_normalised\ntable/train/table_0277_normalised\ntable/train/table_0003_normalised\ntable/train/table_0126_normalised\ntable/train/table_0094_normalised\ntable/train/table_0238_normalised\ntable/train/table_0326_normalised\ntable/train/table_0072_normalised\ntable/train/table_0230_normalised\ntable/train/table_0005_normalised\ntable/train/table_0357_normalised\ntable/train/table_0121_normalised\ntable/train/table_0064_normalised\ntable/train/table_0143_normalised\ntable/train/table_0033_normalised\ntable/train/table_0031_normalised\ntable/train/table_0231_normalised\ntable/train/table_0208_normalised\ntable/train/table_0265_normalised\ntable/train/table_0105_normalised\ntable/train/table_0258_normalised\ntable/train/table_0142_normalised\ntable/train/table_0051_normalised\ntable/train/table_0133_normalised\ntable/train/table_0137_normalised\ntable/train/table_0103_normalised\ntable/train/table_0386_normalised\ntable/train/table_0269_normalised\ntable/train/table_0171_normalised\ntable/train/table_0384_normalised\ntable/train/table_0166_normalised\ntable/train/table_0302_normalised\ntable/train/table_0298_normalised\ntable/train/table_0022_normalised\ntable/train/table_0191_normalised\ntable/train/table_0205_normalised\ntable/train/table_0047_normalised\ntable/train/table_0029_normalised\ntable/train/table_0291_normalised\ntable/train/table_0299_normalised\ntable/train/table_0305_normalised\ntable/train/table_0145_normalised\ntable/train/table_0188_normalised\ntable/train/table_0213_normalised\ntable/train/table_0189_normalised\ntable/train/table_0101_normalised\ntable/train/table_0304_normalised\ntable/train/table_0165_normalised\ntable/train/table_0098_normalised\ntable/train/table_0061_normalised\ntable/train/table_0227_normalised\ntable/train/table_0330_normalised\ntable/train/table_0032_normalised\ntable/train/table_0063_normalised\ntable/train/table_0148_normalised\ntable/train/table_0358_normalised\ntable/train/table_0211_normalised\ntable/train/table_0174_normalised\ntable/train/table_0007_normalised\ntable/train/table_0303_normalised\ntable/train/table_0200_normalised\ntable/train/table_0346_normalised\ntable/train/table_0351_normalised\ntable/train/table_0377_normalised\ntable/train/table_0320_normalised\ntable/train/table_0340_normalised\ntable/train/table_0161_normalised\ntable/train/table_0178_normalised\ntable/train/table_0275_normalised\ntable/train/table_0337_normalised\ntable/train/table_0008_normalised\ntable/train/table_0045_normalised\ntable/train/table_0325_normalised\ntable/train/table_0196_normalised\ntable/train/table_0160_normalised\ntable/train/table_0173_normalised\ntable/train/table_0135_normalised\ntable/train/table_0017_normalised\ntable/train/table_0292_normalised\ntable/train/table_0039_normalised\ntable/train/table_0318_normalised\ntable/train/table_0077_normalised\ntable/train/table_0248_normalised\ntable/train/table_0225_normalised\ntable/train/table_0319_normalised\ntable/train/table_0372_normalised\ntable/train/table_0154_normalised\ntable/train/table_0106_normalised\ntable/train/table_0048_normalised\ntable/train/table_0288_normalised\ntable/train/table_0186_normalised\ntable/train/table_0057_normalised\ntable/train/table_0355_normalised\ntable/train/table_0363_normalised\ntable/train/table_0164_normalised\ntable/train/table_0104_normalised\ntable/train/table_0353_normalised\ntable/train/table_0347_normalised\ntable/train/table_0198_normalised\ntable/train/table_0193_normalised\ntable/train/table_0348_normalised\ntable/train/table_0210_normalised\ntable/train/table_0023_normalised\ntable/train/table_0006_normalised\ntable/train/table_0043_normalised\ntable/train/table_0081_normalised\ntable/train/table_0038_normalised\ntable/train/table_0090_normalised\ntable/train/table_0242_normalised\ntable/train/table_0172_normalised\ntable/train/table_0146_normalised\ntable/train/table_0084_normalised\ntable/train/table_0167_normalised\ntable/train/table_0040_normalised\ntable/train/table_0309_normalised\ntable/train/table_0253_normalised\ntable/train/table_0149_normalised\ntable/train/table_0392_normalised\ntable/train/table_0350_normalised\ntable/train/table_0311_normalised\ntable/train/table_0279_normalised\ntable/train/table_0001_normalised\ntable/train/table_0138_normalised\ntable/train/table_0120_normalised\ntable/train/table_0314_normalised\ntable/train/table_0020_normalised\ntable/train/table_0067_normalised\ntable/train/table_0088_normalised\ntable/train/table_0241_normalised\ntable/train/table_0141_normalised\ntable/train/table_0266_normalised\ntable/train/table_0147_normalised\ntable/train/table_0197_normalised\ntable/train/table_0117_normalised\ntable/train/table_0371_normalised\ntable/train/table_0321_normalised\ntable/train/table_0287_normalised\ntable/train/table_0132_normalised\ntable/train/table_0181_normalised\ntable/train/table_0096_normalised\ntable/train/table_0239_normalised\ntable/train/table_0071_normalised\ntable/train/table_0335_normalised\ntable/train/table_0058_normalised\ntable/train/table_0278_normalised\ntable/train/table_0223_normalised\ntable/train/table_0361_normalised\ntable/train/table_0270_normalised\ntable/train/table_0065_normalised\ntable/train/table_0367_normalised\ntable/train/table_0062_normalised\ntable/train/table_0144_normalised\ntable/train/table_0365_normalised\ntable/train/table_0212_normalised\ntable/train/table_0252_normalised\ntable/train/table_0280_normalised\ntable/train/table_0054_normalised\ntable/train/table_0201_normalised\ntable/train/table_0115_normalised\ntable/train/table_0284_normalised\ntable/train/table_0087_normalised\ntable/train/table_0328_normalised\ntable/train/table_0195_normalised\ntable/train/table_0206_normalised\ntable/train/table_0312_normalised\ntable/train/table_0235_normalised\ntable/train/table_0375_normalised\ntable/train/table_0093_normalised\ntable/train/table_0187_normalised\ntable/test/table_0487_normalised\ntable/test/table_0476_normalised\ntable/test/table_0471_normalised\ntable/test/table_0434_normalised\ntable/test/table_0459_normalised\ntable/test/table_0449_normalised\ntable/test/table_0477_normalised\ntable/test/table_0443_normalised\ntable/test/table_0492_normalised\ntable/test/table_0437_normalised\ntable/test/table_0422_normalised\ntable/test/table_0468_normalised\ntable/test/table_0483_normalised\ntable/test/table_0441_normalised\ntable/test/table_0438_normalised\ntable/test/table_0419_normalised\ntable/test/table_0467_normalised\ntable/test/table_0474_normalised\ntable/test/table_0399_normalised\ntable/test/table_0420_normalised\ntable/test/table_0489_normalised\ntable/test/table_0396_normalised\ntable/test/table_0430_normalised\ntable/test/table_0446_normalised\ntable/test/table_0415_normalised\ntable/test/table_0488_normalised\ntable/test/table_0393_normalised\ntable/test/table_0421_normalised\ntable/test/table_0416_normalised\ntable/test/table_0448_normalised\ntable/test/table_0482_normalised\ntable/test/table_0394_normalised\ntable/test/table_0465_normalised\ntable/test/table_0417_normalised\ntable/test/table_0432_normalised\ntable/test/table_0484_normalised\ntable/test/table_0479_normalised\ntable/test/table_0455_normalised\ntable/test/table_0480_normalised\ntable/test/table_0463_normalised\ntable/test/table_0408_normalised\ntable/test/table_0426_normalised\ntable/test/table_0444_normalised\ntable/test/table_0466_normalised\ntable/test/table_0411_normalised\ntable/test/table_0460_normalised\ntable/test/table_0407_normalised\ntable/test/table_0404_normalised\ntable/test/table_0486_normalised\ntable/test/table_0427_normalised\ntable/test/table_0406_normalised\ntable/test/table_0464_normalised\ntable/test/table_0447_normalised\ntable/test/table_0429_normalised\ntable/test/table_0414_normalised\ntable/test/table_0451_normalised\ntable/test/table_0461_normalised\ntable/test/table_0481_normalised\ntable/test/table_0398_normalised\ntable/test/table_0439_normalised\ntable/test/table_0412_normalised\ntable/test/table_0431_normalised\ntable/test/table_0395_normalised\ntable/test/table_0445_normalised\ntable/test/table_0440_normalised\ntable/test/table_0454_normalised\ntable/test/table_0433_normalised\ntable/test/table_0453_normalised\ntable/test/table_0462_normalised\ntable/test/table_0470_normalised\ntable/test/table_0402_normalised\ntable/test/table_0452_normalised\ntable/test/table_0473_normalised\ntable/test/table_0428_normalised\ntable/test/table_0490_normalised\ntable/test/table_0436_normalised\ntable/test/table_0405_normalised\ntable/test/table_0423_normalised\ntable/test/table_0400_normalised\ntable/test/table_0424_normalised\ntable/test/table_0435_normalised\ntable/test/table_0485_normalised\ntable/test/table_0472_normalised\ntable/test/table_0491_normalised\ntable/test/table_0413_normalised\ntable/test/table_0457_normalised\ntable/test/table_0475_normalised\ntable/test/table_0458_normalised\ntable/test/table_0397_normalised\ntable/test/table_0401_normalised\ntable/test/table_0478_normalised\ntable/test/table_0410_normalised\ntable/test/table_0469_normalised\ntable/test/table_0442_normalised\ntable/test/table_0450_normalised\ntable/test/table_0409_normalised\ntable/test/table_0403_normalised\ntable/test/table_0425_normalised\ntable/test/table_0418_normalised\ntable/test/table_0456_normalised\ndoor/train/door_0091_normalised\ndoor/train/door_0074_normalised\ndoor/train/door_0056_normalised\ndoor/train/door_0102_normalised\ndoor/train/door_0012_normalised\ndoor/train/door_0079_normalised\ndoor/train/door_0023_normalised\ndoor/train/door_0032_normalised\ndoor/train/door_0059_normalised\ndoor/train/door_0064_normalised\ndoor/train/door_0003_normalised\ndoor/train/door_0109_normalised\ndoor/train/door_0036_normalised\ndoor/train/door_0068_normalised\ndoor/train/door_0019_normalised\ndoor/train/door_0037_normalised\ndoor/train/door_0047_normalised\ndoor/train/door_0100_normalised\ndoor/train/door_0062_normalised\ndoor/train/door_0098_normalised\ndoor/train/door_0106_normalised\ndoor/train/door_0055_normalised\ndoor/train/door_0014_normalised\ndoor/train/door_0051_normalised\ndoor/train/door_0021_normalised\ndoor/train/door_0029_normalised\ndoor/train/door_0025_normalised\ndoor/train/door_0066_normalised\ndoor/train/door_0085_normalised\ndoor/train/door_0052_normalised\ndoor/train/door_0015_normalised\ndoor/train/door_0050_normalised\ndoor/train/door_0080_normalised\ndoor/train/door_0099_normalised\ndoor/train/door_0078_normalised\ndoor/train/door_0013_normalised\ndoor/train/door_0087_normalised\ndoor/train/door_0028_normalised\ndoor/train/door_0081_normalised\ndoor/train/door_0006_normalised\ndoor/train/door_0016_normalised\ndoor/train/door_0076_normalised\ndoor/train/door_0017_normalised\ndoor/train/door_0065_normalised\ndoor/train/door_0020_normalised\ndoor/train/door_0070_normalised\ndoor/train/door_0095_normalised\ndoor/train/door_0018_normalised\ndoor/train/door_0105_normalised\ndoor/train/door_0008_normalised\ndoor/train/door_0043_normalised\ndoor/train/door_0088_normalised\ndoor/train/door_0007_normalised\ndoor/train/door_0046_normalised\ndoor/train/door_0061_normalised\ndoor/train/door_0101_normalised\ndoor/train/door_0041_normalised\ndoor/train/door_0075_normalised\ndoor/train/door_0083_normalised\ndoor/train/door_0104_normalised\ndoor/train/door_0094_normalised\ndoor/train/door_0108_normalised\ndoor/train/door_0084_normalised\ndoor/train/door_0001_normalised\ndoor/train/door_0071_normalised\ndoor/train/door_0053_normalised\ndoor/train/door_0033_normalised\ndoor/train/door_0034_normalised\ndoor/train/door_0027_normalised\ndoor/train/door_0093_normalised\ndoor/train/door_0063_normalised\ndoor/train/door_0058_normalised\ndoor/train/door_0038_normalised\ndoor/train/door_0039_normalised\ndoor/train/door_0103_normalised\ndoor/train/door_0089_normalised\ndoor/train/door_0026_normalised\ndoor/train/door_0067_normalised\ndoor/train/door_0096_normalised\ndoor/train/door_0004_normalised\ndoor/train/door_0022_normalised\ndoor/train/door_0044_normalised\ndoor/train/door_0040_normalised\ndoor/train/door_0024_normalised\ndoor/train/door_0090_normalised\ndoor/train/door_0011_normalised\ndoor/train/door_0077_normalised\ndoor/train/door_0086_normalised\ndoor/train/door_0045_normalised\ndoor/train/door_0097_normalised\ndoor/train/door_0107_normalised\ndoor/train/door_0082_normalised\ndoor/train/door_0048_normalised\ndoor/train/door_0005_normalised\ndoor/train/door_0009_normalised\ndoor/train/door_0002_normalised\ndoor/train/door_0057_normalised\ndoor/train/door_0054_normalised\ndoor/train/door_0049_normalised\ndoor/train/door_0031_normalised\ndoor/train/door_0092_normalised\ndoor/train/door_0042_normalised\ndoor/train/door_0035_normalised\ndoor/train/door_0069_normalised\ndoor/train/door_0060_normalised\ndoor/train/door_0010_normalised\ndoor/train/door_0072_normalised\ndoor/train/door_0073_normalised\ndoor/train/door_0030_normalised\ndoor/test/door_0118_normalised\ndoor/test/door_0127_normalised\ndoor/test/door_0126_normalised\ndoor/test/door_0120_normalised\ndoor/test/door_0113_normalised\ndoor/test/door_0123_normalised\ndoor/test/door_0121_normalised\ndoor/test/door_0128_normalised\ndoor/test/door_0115_normalised\ndoor/test/door_0114_normalised\ndoor/test/door_0122_normalised\ndoor/test/door_0111_normalised\ndoor/test/door_0112_normalised\ndoor/test/door_0117_normalised\ndoor/test/door_0129_normalised\ndoor/test/door_0110_normalised\ndoor/test/door_0124_normalised\ndoor/test/door_0116_normalised\ndoor/test/door_0119_normalised\ndoor/test/door_0125_normalised\nsink/train/sink_0123_normalised\nsink/train/sink_0069_normalised\nsink/train/sink_0067_normalised\nsink/train/sink_0066_normalised\nsink/train/sink_0120_normalised\nsink/train/sink_0092_normalised\nsink/train/sink_0048_normalised\nsink/train/sink_0023_normalised\nsink/train/sink_0094_normalised\nsink/train/sink_0002_normalised\nsink/train/sink_0103_normalised\nsink/train/sink_0118_normalised\nsink/train/sink_0059_normalised\nsink/train/sink_0122_normalised\nsink/train/sink_0029_normalised\nsink/train/sink_0003_normalised\nsink/train/sink_0009_normalised\nsink/train/sink_0040_normalised\nsink/train/sink_0056_normalised\nsink/train/sink_0017_normalised\nsink/train/sink_0076_normalised\nsink/train/sink_0098_normalised\nsink/train/sink_0038_normalised\nsink/train/sink_0093_normalised\nsink/train/sink_0063_normalised\nsink/train/sink_0062_normalised\nsink/train/sink_0045_normalised\nsink/train/sink_0099_normalised\nsink/train/sink_0078_normalised\nsink/train/sink_0102_normalised\nsink/train/sink_0020_normalised\nsink/train/sink_0112_normalised\nsink/train/sink_0026_normalised\nsink/train/sink_0064_normalised\nsink/train/sink_0001_normalised\nsink/train/sink_0024_normalised\nsink/train/sink_0071_normalised\nsink/train/sink_0007_normalised\nsink/train/sink_0049_normalised\nsink/train/sink_0060_normalised\nsink/train/sink_0041_normalised\nsink/train/sink_0008_normalised\nsink/train/sink_0019_normalised\nsink/train/sink_0035_normalised\nsink/train/sink_0033_normalised\nsink/train/sink_0014_normalised\nsink/train/sink_0039_normalised\nsink/train/sink_0013_normalised\nsink/train/sink_0113_normalised\nsink/train/sink_0051_normalised\nsink/train/sink_0104_normalised\nsink/train/sink_0089_normalised\nsink/train/sink_0101_normalised\nsink/train/sink_0090_normalised\nsink/train/sink_0125_normalised\nsink/train/sink_0107_normalised\nsink/train/sink_0083_normalised\nsink/train/sink_0119_normalised\nsink/train/sink_0096_normalised\nsink/train/sink_0055_normalised\nsink/train/sink_0121_normalised\nsink/train/sink_0097_normalised\nsink/train/sink_0085_normalised\nsink/train/sink_0005_normalised\nsink/train/sink_0022_normalised\nsink/train/sink_0079_normalised\nsink/train/sink_0070_normalised\nsink/train/sink_0047_normalised\nsink/train/sink_0031_normalised\nsink/train/sink_0010_normalised\nsink/train/sink_0015_normalised\nsink/train/sink_0106_normalised\nsink/train/sink_0117_normalised\nsink/train/sink_0028_normalised\nsink/train/sink_0065_normalised\nsink/train/sink_0128_normalised\nsink/train/sink_0077_normalised\nsink/train/sink_0036_normalised\nsink/train/sink_0086_normalised\nsink/train/sink_0072_normalised\nsink/train/sink_0124_normalised\nsink/train/sink_0084_normalised\nsink/train/sink_0030_normalised\nsink/train/sink_0091_normalised\nsink/train/sink_0114_normalised\nsink/train/sink_0074_normalised\nsink/train/sink_0046_normalised\nsink/train/sink_0087_normalised\nsink/train/sink_0012_normalised\nsink/train/sink_0068_normalised\nsink/train/sink_0016_normalised\nsink/train/sink_0050_normalised\nsink/train/sink_0006_normalised\nsink/train/sink_0054_normalised\nsink/train/sink_0105_normalised\nsink/train/sink_0111_normalised\nsink/train/sink_0053_normalised\nsink/train/sink_0115_normalised\nsink/train/sink_0075_normalised\nsink/train/sink_0032_normalised\nsink/train/sink_0021_normalised\nsink/train/sink_0058_normalised\nsink/train/sink_0127_normalised\nsink/train/sink_0037_normalised\nsink/train/sink_0057_normalised\nsink/train/sink_0043_normalised\nsink/train/sink_0126_normalised\nsink/train/sink_0088_normalised\nsink/train/sink_0110_normalised\nsink/train/sink_0095_normalised\nsink/train/sink_0080_normalised\nsink/train/sink_0061_normalised\nsink/train/sink_0100_normalised\nsink/train/sink_0109_normalised\nsink/train/sink_0108_normalised\nsink/train/sink_0025_normalised\nsink/train/sink_0004_normalised\nsink/train/sink_0011_normalised\nsink/train/sink_0044_normalised\nsink/train/sink_0042_normalised\nsink/train/sink_0027_normalised\nsink/train/sink_0116_normalised\nsink/train/sink_0052_normalised\nsink/train/sink_0034_normalised\nsink/train/sink_0073_normalised\nsink/train/sink_0081_normalised\nsink/train/sink_0018_normalised\nsink/train/sink_0082_normalised\nsink/test/sink_0147_normalised\nsink/test/sink_0145_normalised\nsink/test/sink_0138_normalised\nsink/test/sink_0141_normalised\nsink/test/sink_0143_normalised\nsink/test/sink_0142_normalised\nsink/test/sink_0135_normalised\nsink/test/sink_0148_normalised\nsink/test/sink_0132_normalised\nsink/test/sink_0137_normalised\nsink/test/sink_0139_normalised\nsink/test/sink_0131_normalised\nsink/test/sink_0129_normalised\nsink/test/sink_0144_normalised\nsink/test/sink_0134_normalised\nsink/test/sink_0136_normalised\nsink/test/sink_0146_normalised\nsink/test/sink_0130_normalised\nsink/test/sink_0133_normalised\nsink/test/sink_0140_normalised\ncar/train/car_0076_normalised\ncar/train/car_0069_normalised\ncar/train/car_0140_normalised\ncar/train/car_0132_normalised\ncar/train/car_0033_normalised\ncar/train/car_0094_normalised\ncar/train/car_0099_normalised\ncar/train/car_0191_normalised\ncar/train/car_0117_normalised\ncar/train/car_0034_normalised\ncar/train/car_0122_normalised\ncar/train/car_0045_normalised\ncar/train/car_0022_normalised\ncar/train/car_0058_normalised\ncar/train/car_0181_normalised\ncar/train/car_0164_normalised\ncar/train/car_0037_normalised\ncar/train/car_0060_normalised\ncar/train/car_0116_normalised\ncar/train/car_0068_normalised\ncar/train/car_0012_normalised\ncar/train/car_0088_normalised\ncar/train/car_0137_normalised\ncar/train/car_0196_normalised\ncar/train/car_0013_normalised\ncar/train/car_0010_normalised\ncar/train/car_0129_normalised\ncar/train/car_0080_normalised\ncar/train/car_0001_normalised\ncar/train/car_0153_normalised\ncar/train/car_0113_normalised\ncar/train/car_0028_normalised\ncar/train/car_0084_normalised\ncar/train/car_0158_normalised\ncar/train/car_0110_normalised\ncar/train/car_0051_normalised\ncar/train/car_0149_normalised\ncar/train/car_0077_normalised\ncar/train/car_0097_normalised\ncar/train/car_0050_normalised\ncar/train/car_0102_normalised\ncar/train/car_0124_normalised\ncar/train/car_0105_normalised\ncar/train/car_0166_normalised\ncar/train/car_0019_normalised\ncar/train/car_0123_normalised\ncar/train/car_0165_normalised\ncar/train/car_0091_normalised\ncar/train/car_0154_normalised\ncar/train/car_0145_normalised\ncar/train/car_0152_normalised\ncar/train/car_0187_normalised\ncar/train/car_0133_normalised\ncar/train/car_0176_normalised\ncar/train/car_0173_normalised\ncar/train/car_0115_normalised\ncar/train/car_0017_normalised\ncar/train/car_0189_normalised\ncar/train/car_0042_normalised\ncar/train/car_0139_normalised\ncar/train/car_0066_normalised\ncar/train/car_0182_normalised\ncar/train/car_0190_normalised\ncar/train/car_0086_normalised\ncar/train/car_0043_normalised\ncar/train/car_0141_normalised\ncar/train/car_0138_normalised\ncar/train/car_0036_normalised\ncar/train/car_0135_normalised\ncar/train/car_0089_normalised\ncar/train/car_0160_normalised\ncar/train/car_0159_normalised\ncar/train/car_0194_normalised\ncar/train/car_0188_normalised\ncar/train/car_0039_normalised\ncar/train/car_0067_normalised\ncar/train/car_0179_normalised\ncar/train/car_0057_normalised\ncar/train/car_0040_normalised\ncar/train/car_0192_normalised\ncar/train/car_0061_normalised\ncar/train/car_0035_normalised\ncar/train/car_0169_normalised\ncar/train/car_0063_normalised\ncar/train/car_0163_normalised\ncar/train/car_0156_normalised\ncar/train/car_0171_normalised\ncar/train/car_0082_normalised\ncar/train/car_0093_normalised\ncar/train/car_0120_normalised\ncar/train/car_0087_normalised\ncar/train/car_0044_normalised\ncar/train/car_0026_normalised\ncar/train/car_0119_normalised\ncar/train/car_0178_normalised\ncar/train/car_0128_normalised\ncar/train/car_0193_normalised\ncar/train/car_0056_normalised\ncar/train/car_0162_normalised\ncar/train/car_0146_normalised\ncar/train/car_0168_normalised\ncar/train/car_0020_normalised\ncar/train/car_0070_normalised\ncar/train/car_0073_normalised\ncar/train/car_0024_normalised\ncar/train/car_0114_normalised\ncar/train/car_0142_normalised\ncar/train/car_0007_normalised\ncar/train/car_0046_normalised\ncar/train/car_0111_normalised\ncar/train/car_0130_normalised\ncar/train/car_0100_normalised\ncar/train/car_0150_normalised\ncar/train/car_0072_normalised\ncar/train/car_0112_normalised\ncar/train/car_0003_normalised\ncar/train/car_0006_normalised\ncar/train/car_0195_normalised\ncar/train/car_0055_normalised\ncar/train/car_0186_normalised\ncar/train/car_0108_normalised\ncar/train/car_0155_normalised\ncar/train/car_0126_normalised\ncar/train/car_0021_normalised\ncar/train/car_0032_normalised\ncar/train/car_0157_normalised\ncar/train/car_0098_normalised\ncar/train/car_0104_normalised\ncar/train/car_0075_normalised\ncar/train/car_0004_normalised\ncar/train/car_0136_normalised\ncar/train/car_0177_normalised\ncar/train/car_0151_normalised\ncar/train/car_0078_normalised\ncar/train/car_0049_normalised\ncar/train/car_0125_normalised\ncar/train/car_0197_normalised\ncar/train/car_0071_normalised\ncar/train/car_0059_normalised\ncar/train/car_0062_normalised\ncar/train/car_0118_normalised\ncar/train/car_0095_normalised\ncar/train/car_0183_normalised\ncar/train/car_0134_normalised\ncar/train/car_0018_normalised\ncar/train/car_0016_normalised\ncar/train/car_0053_normalised\ncar/train/car_0096_normalised\ncar/train/car_0131_normalised\ncar/train/car_0170_normalised\ncar/train/car_0106_normalised\ncar/train/car_0064_normalised\ncar/train/car_0011_normalised\ncar/train/car_0009_normalised\ncar/train/car_0002_normalised\ncar/train/car_0143_normalised\ncar/train/car_0175_normalised\ncar/train/car_0174_normalised\ncar/train/car_0005_normalised\ncar/train/car_0109_normalised\ncar/train/car_0008_normalised\ncar/train/car_0144_normalised\ncar/train/car_0083_normalised\ncar/train/car_0014_normalised\ncar/train/car_0180_normalised\ncar/train/car_0081_normalised\ncar/train/car_0127_normalised\ncar/train/car_0092_normalised\ncar/train/car_0147_normalised\ncar/train/car_0172_normalised\ncar/train/car_0029_normalised\ncar/train/car_0185_normalised\ncar/train/car_0052_normalised\ncar/train/car_0025_normalised\ncar/train/car_0079_normalised\ncar/train/car_0107_normalised\ncar/train/car_0090_normalised\ncar/train/car_0038_normalised\ncar/train/car_0031_normalised\ncar/train/car_0065_normalised\ncar/train/car_0023_normalised\ncar/train/car_0167_normalised\ncar/train/car_0030_normalised\ncar/train/car_0103_normalised\ncar/train/car_0041_normalised\ncar/train/car_0101_normalised\ncar/train/car_0085_normalised\ncar/train/car_0148_normalised\ncar/train/car_0047_normalised\ncar/train/car_0054_normalised\ncar/train/car_0184_normalised\ncar/train/car_0015_normalised\ncar/train/car_0027_normalised\ncar/train/car_0161_normalised\ncar/train/car_0048_normalised\ncar/train/car_0121_normalised\ncar/train/car_0074_normalised\ncar/test/car_0269_normalised\ncar/test/car_0207_normalised\ncar/test/car_0243_normalised\ncar/test/car_0232_normalised\ncar/test/car_0231_normalised\ncar/test/car_0266_normalised\ncar/test/car_0270_normalised\ncar/test/car_0295_normalised\ncar/test/car_0200_normalised\ncar/test/car_0285_normalised\ncar/test/car_0248_normalised\ncar/test/car_0249_normalised\ncar/test/car_0225_normalised\ncar/test/car_0224_normalised\ncar/test/car_0283_normalised\ncar/test/car_0241_normalised\ncar/test/car_0260_normalised\ncar/test/car_0234_normalised\ncar/test/car_0219_normalised\ncar/test/car_0272_normalised\ncar/test/car_0263_normalised\ncar/test/car_0282_normalised\ncar/test/car_0216_normalised\ncar/test/car_0256_normalised\ncar/test/car_0281_normalised\ncar/test/car_0247_normalised\ncar/test/car_0250_normalised\ncar/test/car_0228_normalised\ncar/test/car_0218_normalised\ncar/test/car_0252_normalised\ncar/test/car_0259_normalised\ncar/test/car_0267_normalised\ncar/test/car_0235_normalised\ncar/test/car_0291_normalised\ncar/test/car_0239_normalised\ncar/test/car_0233_normalised\ncar/test/car_0222_normalised\ncar/test/car_0206_normalised\ncar/test/car_0230_normalised\ncar/test/car_0253_normalised\ncar/test/car_0268_normalised\ncar/test/car_0278_normalised\ncar/test/car_0258_normalised\ncar/test/car_0226_normalised\ncar/test/car_0290_normalised\ncar/test/car_0276_normalised\ncar/test/car_0203_normalised\ncar/test/car_0229_normalised\ncar/test/car_0257_normalised\ncar/test/car_0213_normalised\ncar/test/car_0205_normalised\ncar/test/car_0210_normalised\ncar/test/car_0292_normalised\ncar/test/car_0215_normalised\ncar/test/car_0261_normalised\ncar/test/car_0221_normalised\ncar/test/car_0262_normalised\ncar/test/car_0297_normalised\ncar/test/car_0240_normalised\ncar/test/car_0204_normalised\ncar/test/car_0286_normalised\ncar/test/car_0296_normalised\ncar/test/car_0264_normalised\ncar/test/car_0244_normalised\ncar/test/car_0198_normalised\ncar/test/car_0199_normalised\ncar/test/car_0254_normalised\ncar/test/car_0273_normalised\ncar/test/car_0271_normalised\ncar/test/car_0211_normalised\ncar/test/car_0201_normalised\ncar/test/car_0242_normalised\ncar/test/car_0246_normalised\ncar/test/car_0227_normalised\ncar/test/car_0208_normalised\ncar/test/car_0220_normalised\ncar/test/car_0202_normalised\ncar/test/car_0289_normalised\ncar/test/car_0214_normalised\ncar/test/car_0209_normalised\ncar/test/car_0288_normalised\ncar/test/car_0251_normalised\ncar/test/car_0217_normalised\ncar/test/car_0294_normalised\ncar/test/car_0255_normalised\ncar/test/car_0245_normalised\ncar/test/car_0223_normalised\ncar/test/car_0238_normalised\ncar/test/car_0279_normalised\ncar/test/car_0237_normalised\ncar/test/car_0236_normalised\ncar/test/car_0265_normalised\ncar/test/car_0284_normalised\ncar/test/car_0274_normalised\ncar/test/car_0277_normalised\ncar/test/car_0280_normalised\ncar/test/car_0287_normalised\ncar/test/car_0275_normalised\ncar/test/car_0212_normalised\ncar/test/car_0293_normalised\ncup/train/cup_0075_normalised\ncup/train/cup_0033_normalised\ncup/train/cup_0044_normalised\ncup/train/cup_0060_normalised\ncup/train/cup_0009_normalised\ncup/train/cup_0008_normalised\ncup/train/cup_0079_normalised\ncup/train/cup_0051_normalised\ncup/train/cup_0029_normalised\ncup/train/cup_0045_normalised\ncup/train/cup_0052_normalised\ncup/train/cup_0066_normalised\ncup/train/cup_0006_normalised\ncup/train/cup_0028_normalised\ncup/train/cup_0022_normalised\ncup/train/cup_0021_normalised\ncup/train/cup_0034_normalised\ncup/train/cup_0037_normalised\ncup/train/cup_0077_normalised\ncup/train/cup_0043_normalised\ncup/train/cup_0030_normalised\ncup/train/cup_0035_normalised\ncup/train/cup_0036_normalised\ncup/train/cup_0076_normalised\ncup/train/cup_0001_normalised\ncup/train/cup_0003_normalised\ncup/train/cup_0049_normalised\ncup/train/cup_0073_normalised\ncup/train/cup_0017_normalised\ncup/train/cup_0070_normalised\ncup/train/cup_0072_normalised\ncup/train/cup_0032_normalised\ncup/train/cup_0007_normalised\ncup/train/cup_0038_normalised\ncup/train/cup_0015_normalised\ncup/train/cup_0064_normalised\ncup/train/cup_0039_normalised\ncup/train/cup_0071_normalised\ncup/train/cup_0025_normalised\ncup/train/cup_0014_normalised\ncup/train/cup_0012_normalised\ncup/train/cup_0042_normalised\ncup/train/cup_0040_normalised\ncup/train/cup_0062_normalised\ncup/train/cup_0004_normalised\ncup/train/cup_0048_normalised\ncup/train/cup_0050_normalised\ncup/train/cup_0074_normalised\ncup/train/cup_0019_normalised\ncup/train/cup_0023_normalised\ncup/train/cup_0061_normalised\ncup/train/cup_0068_normalised\ncup/train/cup_0069_normalised\ncup/train/cup_0056_normalised\ncup/train/cup_0002_normalised\ncup/train/cup_0005_normalised\ncup/train/cup_0031_normalised\ncup/train/cup_0020_normalised\ncup/train/cup_0013_normalised\ncup/train/cup_0041_normalised\ncup/train/cup_0018_normalised\ncup/train/cup_0046_normalised\ncup/train/cup_0063_normalised\ncup/train/cup_0016_normalised\ncup/train/cup_0010_normalised\ncup/train/cup_0011_normalised\ncup/train/cup_0055_normalised\ncup/train/cup_0065_normalised\ncup/train/cup_0024_normalised\ncup/train/cup_0057_normalised\ncup/train/cup_0027_normalised\ncup/train/cup_0058_normalised\ncup/train/cup_0026_normalised\ncup/train/cup_0078_normalised\ncup/train/cup_0054_normalised\ncup/train/cup_0053_normalised\ncup/train/cup_0067_normalised\ncup/train/cup_0059_normalised\ncup/train/cup_0047_normalised\ncup/test/cup_0083_normalised\ncup/test/cup_0097_normalised\ncup/test/cup_0098_normalised\ncup/test/cup_0091_normalised\ncup/test/cup_0080_normalised\ncup/test/cup_0090_normalised\ncup/test/cup_0095_normalised\ncup/test/cup_0088_normalised\ncup/test/cup_0094_normalised\ncup/test/cup_0084_normalised\ncup/test/cup_0087_normalised\ncup/test/cup_0092_normalised\ncup/test/cup_0099_normalised\ncup/test/cup_0082_normalised\ncup/test/cup_0086_normalised\ncup/test/cup_0089_normalised\ncup/test/cup_0096_normalised\ncup/test/cup_0085_normalised\ncup/test/cup_0093_normalised\ncup/test/cup_0081_normalised\nairplane/train/airplane_0486_normalised\nairplane/train/airplane_0374_normalised\nairplane/train/airplane_0316_normalised\nairplane/train/airplane_0537_normalised\nairplane/train/airplane_0284_normalised\nairplane/train/airplane_0609_normalised\nairplane/train/airplane_0086_normalised\nairplane/train/airplane_0554_normalised\nairplane/train/airplane_0307_normalised\nairplane/train/airplane_0015_normalised\nairplane/train/airplane_0567_normalised\nairplane/train/airplane_0082_normalised\nairplane/train/airplane_0150_normalised\nairplane/train/airplane_0415_normalised\nairplane/train/airplane_0289_normalised\nairplane/train/airplane_0594_normalised\nairplane/train/airplane_0579_normalised\nairplane/train/airplane_0279_normalised\nairplane/train/airplane_0060_normalised\nairplane/train/airplane_0499_normalised\nairplane/train/airplane_0165_normalised\nairplane/train/airplane_0555_normalised\nairplane/train/airplane_0389_normalised\nairplane/train/airplane_0049_normalised\nairplane/train/airplane_0067_normalised\nairplane/train/airplane_0286_normalised\nairplane/train/airplane_0238_normalised\nairplane/train/airplane_0035_normalised\nairplane/train/airplane_0129_normalised\nairplane/train/airplane_0128_normalised\nairplane/train/airplane_0033_normalised\nairplane/train/airplane_0283_normalised\nairplane/train/airplane_0355_normalised\nairplane/train/airplane_0502_normalised\nairplane/train/airplane_0148_normalised\nairplane/train/airplane_0158_normalised\nairplane/train/airplane_0477_normalised\nairplane/train/airplane_0130_normalised\nairplane/train/airplane_0169_normalised\nairplane/train/airplane_0300_normalised\nairplane/train/airplane_0242_normalised\nairplane/train/airplane_0348_normalised\nairplane/train/airplane_0222_normalised\nairplane/train/airplane_0253_normalised\nairplane/train/airplane_0402_normalised\nairplane/train/airplane_0589_normalised\nairplane/train/airplane_0187_normalised\nairplane/train/airplane_0014_normalised\nairplane/train/airplane_0503_normalised\nairplane/train/airplane_0351_normalised\nairplane/train/airplane_0443_normalised\nairplane/train/airplane_0505_normalised\nairplane/train/airplane_0020_normalised\nairplane/train/airplane_0543_normalised\nairplane/train/airplane_0101_normalised\nairplane/train/airplane_0298_normalised\nairplane/train/airplane_0041_normalised\nairplane/train/airplane_0133_normalised\nairplane/train/airplane_0516_normalised\nairplane/train/airplane_0079_normalised\nairplane/train/airplane_0484_normalised\nairplane/train/airplane_0444_normalised\nairplane/train/airplane_0264_normalised\nairplane/train/airplane_0353_normalised\nairplane/train/airplane_0310_normalised\nairplane/train/airplane_0291_normalised\nairplane/train/airplane_0449_normalised\nairplane/train/airplane_0439_normalised\nairplane/train/airplane_0448_normalised\nairplane/train/airplane_0593_normalised\nairplane/train/airplane_0229_normalised\nairplane/train/airplane_0483_normalised\nairplane/train/airplane_0110_normalised\nairplane/train/airplane_0456_normalised\nairplane/train/airplane_0492_normalised\nairplane/train/airplane_0285_normalised\nairplane/train/airplane_0622_normalised\nairplane/train/airplane_0474_normalised\nairplane/train/airplane_0387_normalised\nairplane/train/airplane_0200_normalised\nairplane/train/airplane_0277_normalised\nairplane/train/airplane_0297_normalised\nairplane/train/airplane_0190_normalised\nairplane/train/airplane_0199_normalised\nairplane/train/airplane_0454_normalised\nairplane/train/airplane_0495_normalised\nairplane/train/airplane_0215_normalised\nairplane/train/airplane_0604_normalised\nairplane/train/airplane_0100_normalised\nairplane/train/airplane_0162_normalised\nairplane/train/airplane_0152_normalised\nairplane/train/airplane_0026_normalised\nairplane/train/airplane_0626_normalised\nairplane/train/airplane_0466_normalised\nairplane/train/airplane_0207_normalised\nairplane/train/airplane_0252_normalised\nairplane/train/airplane_0008_normalised\nairplane/train/airplane_0075_normalised\nairplane/train/airplane_0544_normalised\nairplane/train/airplane_0420_normalised\nairplane/train/airplane_0102_normalised\nairplane/train/airplane_0388_normalised\nairplane/train/airplane_0142_normalised\nairplane/train/airplane_0408_normalised\nairplane/train/airplane_0401_normalised\nairplane/train/airplane_0417_normalised\nairplane/train/airplane_0216_normalised\nairplane/train/airplane_0381_normalised\nairplane/train/airplane_0550_normalised\nairplane/train/airplane_0112_normalised\nairplane/train/airplane_0360_normalised\nairplane/train/airplane_0053_normalised\nairplane/train/airplane_0571_normalised\nairplane/train/airplane_0313_normalised\nairplane/train/airplane_0205_normalised\nairplane/train/airplane_0214_normalised\nairplane/train/airplane_0052_normalised\nairplane/train/airplane_0168_normalised\nairplane/train/airplane_0188_normalised\nairplane/train/airplane_0421_normalised\nairplane/train/airplane_0383_normalised\nairplane/train/airplane_0469_normalised\nairplane/train/airplane_0156_normalised\nairplane/train/airplane_0009_normalised\nairplane/train/airplane_0467_normalised\nairplane/train/airplane_0329_normalised\nairplane/train/airplane_0559_normalised\nairplane/train/airplane_0221_normalised\nairplane/train/airplane_0029_normalised\nairplane/train/airplane_0451_normalised\nairplane/train/airplane_0465_normalised\nairplane/train/airplane_0294_normalised\nairplane/train/airplane_0006_normalised\nairplane/train/airplane_0603_normalised\nairplane/train/airplane_0511_normalised\nairplane/train/airplane_0426_normalised\nairplane/train/airplane_0149_normalised\nairplane/train/airplane_0226_normalised\nairplane/train/airplane_0445_normalised\nairplane/train/airplane_0440_normalised\nairplane/train/airplane_0532_normalised\nairplane/train/airplane_0109_normalised\nairplane/train/airplane_0614_normalised\nairplane/train/airplane_0262_normalised\nairplane/train/airplane_0295_normalised\nairplane/train/airplane_0083_normalised\nairplane/train/airplane_0527_normalised\nairplane/train/airplane_0090_normalised\nairplane/train/airplane_0338_normalised\nairplane/train/airplane_0007_normalised\nairplane/train/airplane_0078_normalised\nairplane/train/airplane_0280_normalised\nairplane/train/airplane_0612_normalised\nairplane/train/airplane_0021_normalised\nairplane/train/airplane_0494_normalised\nairplane/train/airplane_0120_normalised\nairplane/train/airplane_0065_normalised\nairplane/train/airplane_0303_normalised\nairplane/train/airplane_0380_normalised\nairplane/train/airplane_0151_normalised\nairplane/train/airplane_0260_normalised\nairplane/train/airplane_0163_normalised\nairplane/train/airplane_0347_normalised\nairplane/train/airplane_0175_normalised\nairplane/train/airplane_0245_normalised\nairplane/train/airplane_0411_normalised\nairplane/train/airplane_0025_normalised\nairplane/train/airplane_0159_normalised\nairplane/train/airplane_0208_normalised\nairplane/train/airplane_0452_normalised\nairplane/train/airplane_0403_normalised\nairplane/train/airplane_0121_normalised\nairplane/train/airplane_0111_normalised\nairplane/train/airplane_0089_normalised\nairplane/train/airplane_0613_normalised\nairplane/train/airplane_0258_normalised\nairplane/train/airplane_0181_normalised\nairplane/train/airplane_0034_normalised\nairplane/train/airplane_0396_normalised\nairplane/train/airplane_0219_normalised\nairplane/train/airplane_0261_normalised\nairplane/train/airplane_0455_normalised\nairplane/train/airplane_0522_normalised\nairplane/train/airplane_0024_normalised\nairplane/train/airplane_0328_normalised\nairplane/train/airplane_0178_normalised\nairplane/train/airplane_0526_normalised\nairplane/train/airplane_0048_normalised\nairplane/train/airplane_0545_normalised\nairplane/train/airplane_0070_normalised\nairplane/train/airplane_0281_normalised\nairplane/train/airplane_0249_normalised\nairplane/train/airplane_0539_normalised\nairplane/train/airplane_0095_normalised\nairplane/train/airplane_0384_normalised\nairplane/train/airplane_0377_normalised\nairplane/train/airplane_0489_normalised\nairplane/train/airplane_0257_normalised\nairplane/train/airplane_0105_normalised\nairplane/train/airplane_0259_normalised\nairplane/train/airplane_0345_normalised\nairplane/train/airplane_0224_normalised\nairplane/train/airplane_0496_normalised\nairplane/train/airplane_0096_normalised\nairplane/train/airplane_0002_normalised\nairplane/train/airplane_0318_normalised\nairplane/train/airplane_0534_normalised\nairplane/train/airplane_0441_normalised\nairplane/train/airplane_0363_normalised\nairplane/train/airplane_0457_normalised\nairplane/train/airplane_0620_normalised\nairplane/train/airplane_0473_normalised\nairplane/train/airplane_0097_normalised\nairplane/train/airplane_0045_normalised\nairplane/train/airplane_0055_normalised\nairplane/train/airplane_0362_normalised\nairplane/train/airplane_0016_normalised\nairplane/train/airplane_0576_normalised\nairplane/train/airplane_0227_normalised\nairplane/train/airplane_0343_normalised\nairplane/train/airplane_0047_normalised\nairplane/train/airplane_0618_normalised\nairplane/train/airplane_0427_normalised\nairplane/train/airplane_0485_normalised\nairplane/train/airplane_0273_normalised\nairplane/train/airplane_0164_normalised\nairplane/train/airplane_0367_normalised\nairplane/train/airplane_0606_normalised\nairplane/train/airplane_0562_normalised\nairplane/train/airplane_0414_normalised\nairplane/train/airplane_0438_normalised\nairplane/train/airplane_0074_normalised\nairplane/train/airplane_0573_normalised\nairplane/train/airplane_0423_normalised\nairplane/train/airplane_0087_normalised\nairplane/train/airplane_0607_normalised\nairplane/train/airplane_0081_normalised\nairplane/train/airplane_0192_normalised\nairplane/train/airplane_0320_normalised\nairplane/train/airplane_0069_normalised\nairplane/train/airplane_0617_normalised\nairplane/train/airplane_0217_normalised\nairplane/train/airplane_0019_normalised\nairplane/train/airplane_0137_normalised\nairplane/train/airplane_0203_normalised\nairplane/train/airplane_0621_normalised\nairplane/train/airplane_0251_normalised\nairplane/train/airplane_0405_normalised\nairplane/train/airplane_0372_normalised\nairplane/train/airplane_0275_normalised\nairplane/train/airplane_0154_normalised\nairplane/train/airplane_0565_normalised\nairplane/train/airplane_0376_normalised\nairplane/train/airplane_0356_normalised\nairplane/train/airplane_0039_normalised\nairplane/train/airplane_0068_normalised\nairplane/train/airplane_0066_normalised\nairplane/train/airplane_0470_normalised\nairplane/train/airplane_0412_normalised\nairplane/train/airplane_0135_normalised\nairplane/train/airplane_0157_normalised\nairplane/train/airplane_0243_normalised\nairplane/train/airplane_0223_normalised\nairplane/train/airplane_0580_normalised\nairplane/train/airplane_0332_normalised\nairplane/train/airplane_0071_normalised\nairplane/train/airplane_0064_normalised\nairplane/train/airplane_0538_normalised\nairplane/train/airplane_0179_normalised\nairplane/train/airplane_0480_normalised\nairplane/train/airplane_0098_normalised\nairplane/train/airplane_0574_normalised\nairplane/train/airplane_0293_normalised\nairplane/train/airplane_0225_normalised\nairplane/train/airplane_0488_normalised\nairplane/train/airplane_0266_normalised\nairplane/train/airplane_0305_normalised\nairplane/train/airplane_0568_normalised\nairplane/train/airplane_0575_normalised\nairplane/train/airplane_0072_normalised\nairplane/train/airplane_0309_normalised\nairplane/train/airplane_0529_normalised\nairplane/train/airplane_0147_normalised\nairplane/train/airplane_0198_normalised\nairplane/train/airplane_0051_normalised\nairplane/train/airplane_0062_normalised\nairplane/train/airplane_0352_normalised\nairplane/train/airplane_0043_normalised\nairplane/train/airplane_0600_normalised\nairplane/train/airplane_0171_normalised\nairplane/train/airplane_0616_normalised\nairplane/train/airplane_0610_normalised\nairplane/train/airplane_0602_normalised\nairplane/train/airplane_0334_normalised\nairplane/train/airplane_0202_normalised\nairplane/train/airplane_0131_normalised\nairplane/train/airplane_0431_normalised\nairplane/train/airplane_0533_normalised\nairplane/train/airplane_0450_normalised\nairplane/train/airplane_0570_normalised\nairplane/train/airplane_0321_normalised\nairplane/train/airplane_0001_normalised\nairplane/train/airplane_0231_normalised\nairplane/train/airplane_0138_normalised\nairplane/train/airplane_0369_normalised\nairplane/train/airplane_0551_normalised\nairplane/train/airplane_0141_normalised\nairplane/train/airplane_0270_normalised\nairplane/train/airplane_0524_normalised\nairplane/train/airplane_0185_normalised\nairplane/train/airplane_0212_normalised\nairplane/train/airplane_0349_normalised\nairplane/train/airplane_0422_normalised\nairplane/train/airplane_0512_normalised\nairplane/train/airplane_0459_normalised\nairplane/train/airplane_0122_normalised\nairplane/train/airplane_0429_normalised\nairplane/train/airplane_0256_normalised\nairplane/train/airplane_0136_normalised\nairplane/train/airplane_0337_normalised\nairplane/train/airplane_0010_normalised\nairplane/train/airplane_0176_normalised\nairplane/train/airplane_0556_normalised\nairplane/train/airplane_0508_normalised\nairplane/train/airplane_0561_normalised\nairplane/train/airplane_0146_normalised\nairplane/train/airplane_0288_normalised\nairplane/train/airplane_0425_normalised\nairplane/train/airplane_0235_normalised\nairplane/train/airplane_0501_normalised\nairplane/train/airplane_0447_normalised\nairplane/train/airplane_0437_normalised\nairplane/train/airplane_0308_normalised\nairplane/train/airplane_0615_normalised\nairplane/train/airplane_0042_normalised\nairplane/train/airplane_0336_normalised\nairplane/train/airplane_0124_normalised\nairplane/train/airplane_0481_normalised\nairplane/train/airplane_0322_normalised\nairplane/train/airplane_0399_normalised\nairplane/train/airplane_0201_normalised\nairplane/train/airplane_0601_normalised\nairplane/train/airplane_0233_normalised\nairplane/train/airplane_0560_normalised\nairplane/train/airplane_0274_normalised\nairplane/train/airplane_0031_normalised\nairplane/train/airplane_0244_normalised\nairplane/train/airplane_0193_normalised\nairplane/train/airplane_0563_normalised\nairplane/train/airplane_0404_normalised\nairplane/train/airplane_0530_normalised\nairplane/train/airplane_0599_normalised\nairplane/train/airplane_0419_normalised\nairplane/train/airplane_0350_normalised\nairplane/train/airplane_0378_normalised\nairplane/train/airplane_0506_normalised\nairplane/train/airplane_0536_normalised\nairplane/train/airplane_0598_normalised\nairplane/train/airplane_0472_normalised\nairplane/train/airplane_0194_normalised\nairplane/train/airplane_0390_normalised\nairplane/train/airplane_0061_normalised\nairplane/train/airplane_0166_normalised\nairplane/train/airplane_0237_normalised\nairplane/train/airplane_0395_normalised\nairplane/train/airplane_0453_normalised\nairplane/train/airplane_0424_normalised\nairplane/train/airplane_0155_normalised\nairplane/train/airplane_0359_normalised\nairplane/train/airplane_0398_normalised\nairplane/train/airplane_0497_normalised\nairplane/train/airplane_0493_normalised\nairplane/train/airplane_0108_normalised\nairplane/train/airplane_0513_normalised\nairplane/train/airplane_0365_normalised\nairplane/train/airplane_0301_normalised\nairplane/train/airplane_0432_normalised\nairplane/train/airplane_0306_normalised\nairplane/train/airplane_0468_normalised\nairplane/train/airplane_0044_normalised\nairplane/train/airplane_0531_normalised\nairplane/train/airplane_0167_normalised\nairplane/train/airplane_0552_normalised\nairplane/train/airplane_0514_normalised\nairplane/train/airplane_0004_normalised\nairplane/train/airplane_0542_normalised\nairplane/train/airplane_0106_normalised\nairplane/train/airplane_0269_normalised\nairplane/train/airplane_0255_normalised\nairplane/train/airplane_0479_normalised\nairplane/train/airplane_0267_normalised\nairplane/train/airplane_0027_normalised\nairplane/train/airplane_0442_normalised\nairplane/train/airplane_0490_normalised\nairplane/train/airplane_0099_normalised\nairplane/train/airplane_0458_normalised\nairplane/train/airplane_0504_normalised\nairplane/train/airplane_0413_normalised\nairplane/train/airplane_0435_normalised\nairplane/train/airplane_0271_normalised\nairplane/train/airplane_0236_normalised\nairplane/train/airplane_0410_normalised\nairplane/train/airplane_0030_normalised\nairplane/train/airplane_0370_normalised\nairplane/train/airplane_0379_normalised\nairplane/train/airplane_0371_normalised\nairplane/train/airplane_0191_normalised\nairplane/train/airplane_0213_normalised\nairplane/train/airplane_0385_normalised\nairplane/train/airplane_0091_normalised\nairplane/train/airplane_0040_normalised\nairplane/train/airplane_0177_normalised\nairplane/train/airplane_0140_normalised\nairplane/train/airplane_0241_normalised\nairplane/train/airplane_0290_normalised\nairplane/train/airplane_0548_normalised\nairplane/train/airplane_0471_normalised\nairplane/train/airplane_0546_normalised\nairplane/train/airplane_0218_normalised\nairplane/train/airplane_0333_normalised\nairplane/train/airplane_0547_normalised\nairplane/train/airplane_0523_normalised\nairplane/train/airplane_0272_normalised\nairplane/train/airplane_0611_normalised\nairplane/train/airplane_0373_normalised\nairplane/train/airplane_0234_normalised\nairplane/train/airplane_0302_normalised\nairplane/train/airplane_0263_normalised\nairplane/train/airplane_0344_normalised\nairplane/train/airplane_0566_normalised\nairplane/train/airplane_0314_normalised\nairplane/train/airplane_0022_normalised\nairplane/train/airplane_0409_normalised\nairplane/train/airplane_0500_normalised\nairplane/train/airplane_0446_normalised\nairplane/train/airplane_0339_normalised\nairplane/train/airplane_0582_normalised\nairplane/train/airplane_0572_normalised\nairplane/train/airplane_0265_normalised\nairplane/train/airplane_0248_normalised\nairplane/train/airplane_0361_normalised\nairplane/train/airplane_0145_normalised\nairplane/train/airplane_0211_normalised\nairplane/train/airplane_0509_normalised\nairplane/train/airplane_0118_normalised\nairplane/train/airplane_0358_normalised\nairplane/train/airplane_0324_normalised\nairplane/train/airplane_0107_normalised\nairplane/train/airplane_0114_normalised\nairplane/train/airplane_0393_normalised\nairplane/train/airplane_0518_normalised\nairplane/train/airplane_0278_normalised\nairplane/train/airplane_0063_normalised\nairplane/train/airplane_0183_normalised\nairplane/train/airplane_0364_normalised\nairplane/train/airplane_0595_normalised\nairplane/train/airplane_0368_normalised\nairplane/train/airplane_0553_normalised\nairplane/train/airplane_0342_normalised\nairplane/train/airplane_0113_normalised\nairplane/train/airplane_0046_normalised\nairplane/train/airplane_0386_normalised\nairplane/train/airplane_0125_normalised\nairplane/train/airplane_0005_normalised\nairplane/train/airplane_0104_normalised\nairplane/train/airplane_0340_normalised\nairplane/train/airplane_0357_normalised\nairplane/train/airplane_0624_normalised\nairplane/train/airplane_0323_normalised\nairplane/train/airplane_0038_normalised\nairplane/train/airplane_0239_normalised\nairplane/train/airplane_0232_normalised\nairplane/train/airplane_0160_normalised\nairplane/train/airplane_0569_normalised\nairplane/train/airplane_0119_normalised\nairplane/train/airplane_0311_normalised\nairplane/train/airplane_0525_normalised\nairplane/train/airplane_0476_normalised\nairplane/train/airplane_0549_normalised\nairplane/train/airplane_0541_normalised\nairplane/train/airplane_0299_normalised\nairplane/train/airplane_0478_normalised\nairplane/train/airplane_0254_normalised\nairplane/train/airplane_0461_normalised\nairplane/train/airplane_0584_normalised\nairplane/train/airplane_0123_normalised\nairplane/train/airplane_0073_normalised\nairplane/train/airplane_0434_normalised\nairplane/train/airplane_0116_normalised\nairplane/train/airplane_0057_normalised\nairplane/train/airplane_0331_normalised\nairplane/train/airplane_0304_normalised\nairplane/train/airplane_0058_normalised\nairplane/train/airplane_0354_normalised\nairplane/train/airplane_0491_normalised\nairplane/train/airplane_0268_normalised\nairplane/train/airplane_0619_normalised\nairplane/train/airplane_0056_normalised\nairplane/train/airplane_0346_normalised\nairplane/train/airplane_0436_normalised\nairplane/train/airplane_0134_normalised\nairplane/train/airplane_0464_normalised\nairplane/train/airplane_0319_normalised\nairplane/train/airplane_0228_normalised\nairplane/train/airplane_0287_normalised\nairplane/train/airplane_0335_normalised\nairplane/train/airplane_0625_normalised\nairplane/train/airplane_0250_normalised\nairplane/train/airplane_0032_normalised\nairplane/train/airplane_0596_normalised\nairplane/train/airplane_0080_normalised\nairplane/train/airplane_0220_normalised\nairplane/train/airplane_0189_normalised\nairplane/train/airplane_0180_normalised\nairplane/train/airplane_0587_normalised\nairplane/train/airplane_0296_normalised\nairplane/train/airplane_0317_normalised\nairplane/train/airplane_0059_normalised\nairplane/train/airplane_0623_normalised\nairplane/train/airplane_0153_normalised\nairplane/train/airplane_0173_normalised\nairplane/train/airplane_0475_normalised\nairplane/train/airplane_0103_normalised\nairplane/train/airplane_0460_normalised\nairplane/train/airplane_0382_normalised\nairplane/train/airplane_0406_normalised\nairplane/train/airplane_0084_normalised\nairplane/train/airplane_0366_normalised\nairplane/train/airplane_0416_normalised\nairplane/train/airplane_0117_normalised\nairplane/train/airplane_0330_normalised\nairplane/train/airplane_0246_normalised\nairplane/train/airplane_0161_normalised\nairplane/train/airplane_0012_normalised\nairplane/train/airplane_0327_normalised\nairplane/train/airplane_0487_normalised\nairplane/train/airplane_0407_normalised\nairplane/train/airplane_0037_normalised\nairplane/train/airplane_0463_normalised\nairplane/train/airplane_0588_normalised\nairplane/train/airplane_0170_normalised\nairplane/train/airplane_0174_normalised\nairplane/train/airplane_0605_normalised\nairplane/train/airplane_0528_normalised\nairplane/train/airplane_0028_normalised\nairplane/train/airplane_0172_normalised\nairplane/train/airplane_0510_normalised\nairplane/train/airplane_0482_normalised\nairplane/train/airplane_0326_normalised\nairplane/train/airplane_0132_normalised\nairplane/train/airplane_0182_normalised\nairplane/train/airplane_0209_normalised\nairplane/train/airplane_0076_normalised\nairplane/train/airplane_0517_normalised\nairplane/train/airplane_0126_normalised\nairplane/train/airplane_0430_normalised\nairplane/train/airplane_0054_normalised\nairplane/train/airplane_0018_normalised\nairplane/train/airplane_0590_normalised\nairplane/train/airplane_0036_normalised\nairplane/train/airplane_0519_normalised\nairplane/train/airplane_0577_normalised\nairplane/train/airplane_0292_normalised\nairplane/train/airplane_0282_normalised\nairplane/train/airplane_0397_normalised\nairplane/train/airplane_0507_normalised\nairplane/train/airplane_0315_normalised\nairplane/train/airplane_0592_normalised\nairplane/train/airplane_0092_normalised\nairplane/train/airplane_0186_normalised\nairplane/train/airplane_0375_normalised\nairplane/train/airplane_0085_normalised\nairplane/train/airplane_0418_normalised\nairplane/train/airplane_0094_normalised\nairplane/train/airplane_0557_normalised\nairplane/train/airplane_0564_normalised\nairplane/train/airplane_0013_normalised\nairplane/train/airplane_0093_normalised\nairplane/train/airplane_0184_normalised\nairplane/train/airplane_0535_normalised\nairplane/train/airplane_0597_normalised\nairplane/train/airplane_0204_normalised\nairplane/train/airplane_0581_normalised\nairplane/train/airplane_0608_normalised\nairplane/train/airplane_0127_normalised\nairplane/train/airplane_0088_normalised\nairplane/train/airplane_0017_normalised\nairplane/train/airplane_0394_normalised\nairplane/train/airplane_0276_normalised\nairplane/train/airplane_0540_normalised\nairplane/train/airplane_0011_normalised\nairplane/train/airplane_0050_normalised\nairplane/train/airplane_0341_normalised\nairplane/train/airplane_0325_normalised\nairplane/train/airplane_0003_normalised\nairplane/train/airplane_0515_normalised\nairplane/train/airplane_0583_normalised\nairplane/train/airplane_0230_normalised\nairplane/train/airplane_0197_normalised\nairplane/train/airplane_0586_normalised\nairplane/train/airplane_0023_normalised\nairplane/train/airplane_0210_normalised\nairplane/train/airplane_0462_normalised\nairplane/train/airplane_0240_normalised\nairplane/train/airplane_0077_normalised\nairplane/train/airplane_0428_normalised\nairplane/train/airplane_0558_normalised\nairplane/train/airplane_0144_normalised\nairplane/train/airplane_0206_normalised\nairplane/train/airplane_0585_normalised\nairplane/train/airplane_0115_normalised\nairplane/train/airplane_0521_normalised\nairplane/train/airplane_0400_normalised\nairplane/train/airplane_0520_normalised\nairplane/train/airplane_0195_normalised\nairplane/train/airplane_0433_normalised\nairplane/train/airplane_0391_normalised\nairplane/train/airplane_0196_normalised\nairplane/train/airplane_0312_normalised\nairplane/train/airplane_0591_normalised\nairplane/train/airplane_0578_normalised\nairplane/train/airplane_0139_normalised\nairplane/train/airplane_0392_normalised\nairplane/train/airplane_0143_normalised\nairplane/train/airplane_0247_normalised\nairplane/train/airplane_0498_normalised\nairplane/test/airplane_0663_normalised\nairplane/test/airplane_0679_normalised\nairplane/test/airplane_0715_normalised\nairplane/test/airplane_0714_normalised\nairplane/test/airplane_0685_normalised\nairplane/test/airplane_0712_normalised\nairplane/test/airplane_0655_normalised\nairplane/test/airplane_0632_normalised\nairplane/test/airplane_0656_normalised\nairplane/test/airplane_0661_normalised\nairplane/test/airplane_0660_normalised\nairplane/test/airplane_0726_normalised\nairplane/test/airplane_0689_normalised\nairplane/test/airplane_0676_normalised\nairplane/test/airplane_0673_normalised\nairplane/test/airplane_0635_normalised\nairplane/test/airplane_0659_normalised\nairplane/test/airplane_0641_normalised\nairplane/test/airplane_0684_normalised\nairplane/test/airplane_0664_normalised\nairplane/test/airplane_0696_normalised\nairplane/test/airplane_0719_normalised\nairplane/test/airplane_0693_normalised\nairplane/test/airplane_0674_normalised\nairplane/test/airplane_0683_normalised\nairplane/test/airplane_0705_normalised\nairplane/test/airplane_0720_normalised\nairplane/test/airplane_0721_normalised\nairplane/test/airplane_0682_normalised\nairplane/test/airplane_0718_normalised\nairplane/test/airplane_0638_normalised\nairplane/test/airplane_0692_normalised\nairplane/test/airplane_0636_normalised\nairplane/test/airplane_0643_normalised\nairplane/test/airplane_0688_normalised\nairplane/test/airplane_0686_normalised\nairplane/test/airplane_0699_normalised\nairplane/test/airplane_0653_normalised\nairplane/test/airplane_0627_normalised\nairplane/test/airplane_0701_normalised\nairplane/test/airplane_0666_normalised\nairplane/test/airplane_0667_normalised\nairplane/test/airplane_0698_normalised\nairplane/test/airplane_0680_normalised\nairplane/test/airplane_0713_normalised\nairplane/test/airplane_0703_normalised\nairplane/test/airplane_0690_normalised\nairplane/test/airplane_0651_normalised\nairplane/test/airplane_0675_normalised\nairplane/test/airplane_0725_normalised\nairplane/test/airplane_0630_normalised\nairplane/test/airplane_0707_normalised\nairplane/test/airplane_0700_normalised\nairplane/test/airplane_0649_normalised\nairplane/test/airplane_0710_normalised\nairplane/test/airplane_0704_normalised\nairplane/test/airplane_0662_normalised\nairplane/test/airplane_0717_normalised\nairplane/test/airplane_0631_normalised\nairplane/test/airplane_0670_normalised\nairplane/test/airplane_0629_normalised\nairplane/test/airplane_0716_normalised\nairplane/test/airplane_0711_normalised\nairplane/test/airplane_0654_normalised\nairplane/test/airplane_0648_normalised\nairplane/test/airplane_0702_normalised\nairplane/test/airplane_0646_normalised\nairplane/test/airplane_0665_normalised\nairplane/test/airplane_0723_normalised\nairplane/test/airplane_0671_normalised\nairplane/test/airplane_0658_normalised\nairplane/test/airplane_0669_normalised\nairplane/test/airplane_0722_normalised\nairplane/test/airplane_0647_normalised\nairplane/test/airplane_0634_normalised\nairplane/test/airplane_0695_normalised\nairplane/test/airplane_0709_normalised\nairplane/test/airplane_0681_normalised\nairplane/test/airplane_0642_normalised\nairplane/test/airplane_0637_normalised\nairplane/test/airplane_0628_normalised\nairplane/test/airplane_0645_normalised\nairplane/test/airplane_0691_normalised\nairplane/test/airplane_0639_normalised\nairplane/test/airplane_0644_normalised\nairplane/test/airplane_0640_normalised\nairplane/test/airplane_0694_normalised\nairplane/test/airplane_0677_normalised\nairplane/test/airplane_0633_normalised\nairplane/test/airplane_0724_normalised\nairplane/test/airplane_0708_normalised\nairplane/test/airplane_0706_normalised\nairplane/test/airplane_0652_normalised\nairplane/test/airplane_0697_normalised\nairplane/test/airplane_0657_normalised\nairplane/test/airplane_0650_normalised\nairplane/test/airplane_0672_normalised\nairplane/test/airplane_0668_normalised\nairplane/test/airplane_0687_normalised\nairplane/test/airplane_0678_normalised\nbed/train/bed_0256_normalised\nbed/train/bed_0024_normalised\nbed/train/bed_0231_normalised\nbed/train/bed_0274_normalised\nbed/train/bed_0019_normalised\nbed/train/bed_0473_normalised\nbed/train/bed_0479_normalised\nbed/train/bed_0459_normalised\nbed/train/bed_0226_normalised\nbed/train/bed_0510_normalised\nbed/train/bed_0380_normalised\nbed/train/bed_0210_normalised\nbed/train/bed_0330_normalised\nbed/train/bed_0406_normalised\nbed/train/bed_0152_normalised\nbed/train/bed_0247_normalised\nbed/train/bed_0417_normalised\nbed/train/bed_0269_normalised\nbed/train/bed_0183_normalised\nbed/train/bed_0034_normalised\nbed/train/bed_0466_normalised\nbed/train/bed_0098_normalised\nbed/train/bed_0214_normalised\nbed/train/bed_0112_normalised\nbed/train/bed_0312_normalised\nbed/train/bed_0414_normalised\nbed/train/bed_0444_normalised\nbed/train/bed_0289_normalised\nbed/train/bed_0482_normalised\nbed/train/bed_0388_normalised\nbed/train/bed_0039_normalised\nbed/train/bed_0403_normalised\nbed/train/bed_0091_normalised\nbed/train/bed_0254_normalised\nbed/train/bed_0217_normalised\nbed/train/bed_0199_normalised\nbed/train/bed_0151_normalised\nbed/train/bed_0179_normalised\nbed/train/bed_0263_normalised\nbed/train/bed_0347_normalised\nbed/train/bed_0423_normalised\nbed/train/bed_0047_normalised\nbed/train/bed_0198_normalised\nbed/train/bed_0412_normalised\nbed/train/bed_0360_normalised\nbed/train/bed_0215_normalised\nbed/train/bed_0232_normalised\nbed/train/bed_0086_normalised\nbed/train/bed_0244_normalised\nbed/train/bed_0176_normalised\nbed/train/bed_0202_normalised\nbed/train/bed_0381_normalised\nbed/train/bed_0040_normalised\nbed/train/bed_0208_normalised\nbed/train/bed_0372_normalised\nbed/train/bed_0105_normalised\nbed/train/bed_0213_normalised\nbed/train/bed_0509_normalised\nbed/train/bed_0087_normalised\nbed/train/bed_0261_normalised\nbed/train/bed_0113_normalised\nbed/train/bed_0322_normalised\nbed/train/bed_0084_normalised\nbed/train/bed_0300_normalised\nbed/train/bed_0346_normalised\nbed/train/bed_0278_normalised\nbed/train/bed_0445_normalised\nbed/train/bed_0480_normalised\nbed/train/bed_0442_normalised\nbed/train/bed_0374_normalised\nbed/train/bed_0186_normalised\nbed/train/bed_0069_normalised\nbed/train/bed_0295_normalised\nbed/train/bed_0108_normalised\nbed/train/bed_0398_normalised\nbed/train/bed_0235_normalised\nbed/train/bed_0262_normalised\nbed/train/bed_0121_normalised\nbed/train/bed_0138_normalised\nbed/train/bed_0123_normalised\nbed/train/bed_0264_normalised\nbed/train/bed_0006_normalised\nbed/train/bed_0207_normalised\nbed/train/bed_0157_normalised\nbed/train/bed_0177_normalised\nbed/train/bed_0290_normalised\nbed/train/bed_0227_normalised\nbed/train/bed_0204_normalised\nbed/train/bed_0369_normalised\nbed/train/bed_0255_normalised\nbed/train/bed_0106_normalised\nbed/train/bed_0464_normalised\nbed/train/bed_0441_normalised\nbed/train/bed_0476_normalised\nbed/train/bed_0494_normalised\nbed/train/bed_0275_normalised\nbed/train/bed_0209_normalised\nbed/train/bed_0085_normalised\nbed/train/bed_0250_normalised\nbed/train/bed_0059_normalised\nbed/train/bed_0092_normalised\nbed/train/bed_0484_normalised\nbed/train/bed_0161_normalised\nbed/train/bed_0148_normalised\nbed/train/bed_0338_normalised\nbed/train/bed_0373_normalised\nbed/train/bed_0429_normalised\nbed/train/bed_0143_normalised\nbed/train/bed_0259_normalised\nbed/train/bed_0072_normalised\nbed/train/bed_0238_normalised\nbed/train/bed_0499_normalised\nbed/train/bed_0125_normalised\nbed/train/bed_0243_normalised\nbed/train/bed_0014_normalised\nbed/train/bed_0437_normalised\nbed/train/bed_0501_normalised\nbed/train/bed_0149_normalised\nbed/train/bed_0028_normalised\nbed/train/bed_0508_normalised\nbed/train/bed_0058_normalised\nbed/train/bed_0063_normalised\nbed/train/bed_0361_normalised\nbed/train/bed_0162_normalised\nbed/train/bed_0382_normalised\nbed/train/bed_0164_normalised\nbed/train/bed_0422_normalised\nbed/train/bed_0399_normalised\nbed/train/bed_0007_normalised\nbed/train/bed_0200_normalised\nbed/train/bed_0296_normalised\nbed/train/bed_0206_normalised\nbed/train/bed_0513_normalised\nbed/train/bed_0142_normalised\nbed/train/bed_0397_normalised\nbed/train/bed_0158_normalised\nbed/train/bed_0356_normalised\nbed/train/bed_0135_normalised\nbed/train/bed_0428_normalised\nbed/train/bed_0321_normalised\nbed/train/bed_0310_normalised\nbed/train/bed_0337_normalised\nbed/train/bed_0229_normalised\nbed/train/bed_0225_normalised\nbed/train/bed_0396_normalised\nbed/train/bed_0435_normalised\nbed/train/bed_0065_normalised\nbed/train/bed_0379_normalised\nbed/train/bed_0454_normalised\nbed/train/bed_0401_normalised\nbed/train/bed_0491_normalised\nbed/train/bed_0391_normalised\nbed/train/bed_0050_normalised\nbed/train/bed_0432_normalised\nbed/train/bed_0070_normalised\nbed/train/bed_0088_normalised\nbed/train/bed_0390_normalised\nbed/train/bed_0316_normalised\nbed/train/bed_0309_normalised\nbed/train/bed_0068_normalised\nbed/train/bed_0468_normalised\nbed/train/bed_0487_normalised\nbed/train/bed_0377_normalised\nbed/train/bed_0251_normalised\nbed/train/bed_0234_normalised\nbed/train/bed_0122_normalised\nbed/train/bed_0236_normalised\nbed/train/bed_0449_normalised\nbed/train/bed_0450_normalised\nbed/train/bed_0237_normalised\nbed/train/bed_0409_normalised\nbed/train/bed_0258_normalised\nbed/train/bed_0026_normalised\nbed/train/bed_0418_normalised\nbed/train/bed_0440_normalised\nbed/train/bed_0083_normalised\nbed/train/bed_0497_normalised\nbed/train/bed_0283_normalised\nbed/train/bed_0228_normalised\nbed/train/bed_0097_normalised\nbed/train/bed_0172_normalised\nbed/train/bed_0293_normalised\nbed/train/bed_0090_normalised\nbed/train/bed_0498_normalised\nbed/train/bed_0191_normalised\nbed/train/bed_0302_normalised\nbed/train/bed_0016_normalised\nbed/train/bed_0153_normalised\nbed/train/bed_0425_normalised\nbed/train/bed_0294_normalised\nbed/train/bed_0452_normalised\nbed/train/bed_0292_normalised\nbed/train/bed_0064_normalised\nbed/train/bed_0168_normalised\nbed/train/bed_0018_normalised\nbed/train/bed_0169_normalised\nbed/train/bed_0431_normalised\nbed/train/bed_0095_normalised\nbed/train/bed_0467_normalised\nbed/train/bed_0471_normalised\nbed/train/bed_0324_normalised\nbed/train/bed_0277_normalised\nbed/train/bed_0465_normalised\nbed/train/bed_0368_normalised\nbed/train/bed_0266_normalised\nbed/train/bed_0001_normalised\nbed/train/bed_0375_normalised\nbed/train/bed_0419_normalised\nbed/train/bed_0311_normalised\nbed/train/bed_0053_normalised\nbed/train/bed_0504_normalised\nbed/train/bed_0370_normalised\nbed/train/bed_0359_normalised\nbed/train/bed_0140_normalised\nbed/train/bed_0196_normalised\nbed/train/bed_0076_normalised\nbed/train/bed_0486_normalised\nbed/train/bed_0233_normalised\nbed/train/bed_0462_normalised\nbed/train/bed_0389_normalised\nbed/train/bed_0130_normalised\nbed/train/bed_0015_normalised\nbed/train/bed_0071_normalised\nbed/train/bed_0514_normalised\nbed/train/bed_0230_normalised\nbed/train/bed_0308_normalised\nbed/train/bed_0416_normalised\nbed/train/bed_0474_normalised\nbed/train/bed_0045_normalised\nbed/train/bed_0009_normalised\nbed/train/bed_0220_normalised\nbed/train/bed_0386_normalised\nbed/train/bed_0333_normalised\nbed/train/bed_0271_normalised\nbed/train/bed_0502_normalised\nbed/train/bed_0279_normalised\nbed/train/bed_0049_normalised\nbed/train/bed_0469_normalised\nbed/train/bed_0156_normalised\nbed/train/bed_0107_normalised\nbed/train/bed_0131_normalised\nbed/train/bed_0089_normalised\nbed/train/bed_0348_normalised\nbed/train/bed_0031_normalised\nbed/train/bed_0297_normalised\nbed/train/bed_0257_normalised\nbed/train/bed_0137_normalised\nbed/train/bed_0248_normalised\nbed/train/bed_0002_normalised\nbed/train/bed_0500_normalised\nbed/train/bed_0030_normalised\nbed/train/bed_0342_normalised\nbed/train/bed_0493_normalised\nbed/train/bed_0180_normalised\nbed/train/bed_0411_normalised\nbed/train/bed_0400_normalised\nbed/train/bed_0323_normalised\nbed/train/bed_0212_normalised\nbed/train/bed_0287_normalised\nbed/train/bed_0003_normalised\nbed/train/bed_0241_normalised\nbed/train/bed_0453_normalised\nbed/train/bed_0426_normalised\nbed/train/bed_0366_normalised\nbed/train/bed_0410_normalised\nbed/train/bed_0495_normalised\nbed/train/bed_0221_normalised\nbed/train/bed_0011_normalised\nbed/train/bed_0004_normalised\nbed/train/bed_0515_normalised\nbed/train/bed_0281_normalised\nbed/train/bed_0189_normalised\nbed/train/bed_0080_normalised\nbed/train/bed_0242_normalised\nbed/train/bed_0349_normalised\nbed/train/bed_0298_normalised\nbed/train/bed_0027_normalised\nbed/train/bed_0332_normalised\nbed/train/bed_0331_normalised\nbed/train/bed_0132_normalised\nbed/train/bed_0363_normalised\nbed/train/bed_0352_normalised\nbed/train/bed_0096_normalised\nbed/train/bed_0329_normalised\nbed/train/bed_0334_normalised\nbed/train/bed_0194_normalised\nbed/train/bed_0421_normalised\nbed/train/bed_0415_normalised\nbed/train/bed_0430_normalised\nbed/train/bed_0507_normalised\nbed/train/bed_0218_normalised\nbed/train/bed_0012_normalised\nbed/train/bed_0351_normalised\nbed/train/bed_0029_normalised\nbed/train/bed_0365_normalised\nbed/train/bed_0187_normalised\nbed/train/bed_0489_normalised\nbed/train/bed_0060_normalised\nbed/train/bed_0032_normalised\nbed/train/bed_0117_normalised\nbed/train/bed_0075_normalised\nbed/train/bed_0420_normalised\nbed/train/bed_0376_normalised\nbed/train/bed_0239_normalised\nbed/train/bed_0394_normalised\nbed/train/bed_0163_normalised\nbed/train/bed_0013_normalised\nbed/train/bed_0224_normalised\nbed/train/bed_0339_normalised\nbed/train/bed_0078_normalised\nbed/train/bed_0171_normalised\nbed/train/bed_0166_normalised\nbed/train/bed_0114_normalised\nbed/train/bed_0195_normalised\nbed/train/bed_0506_normalised\nbed/train/bed_0355_normalised\nbed/train/bed_0273_normalised\nbed/train/bed_0318_normalised\nbed/train/bed_0126_normalised\nbed/train/bed_0350_normalised\nbed/train/bed_0124_normalised\nbed/train/bed_0427_normalised\nbed/train/bed_0035_normalised\nbed/train/bed_0245_normalised\nbed/train/bed_0005_normalised\nbed/train/bed_0101_normalised\nbed/train/bed_0038_normalised\nbed/train/bed_0182_normalised\nbed/train/bed_0285_normalised\nbed/train/bed_0477_normalised\nbed/train/bed_0336_normalised\nbed/train/bed_0488_normalised\nbed/train/bed_0110_normalised\nbed/train/bed_0252_normalised\nbed/train/bed_0472_normalised\nbed/train/bed_0343_normalised\nbed/train/bed_0364_normalised\nbed/train/bed_0223_normalised\nbed/train/bed_0133_normalised\nbed/train/bed_0461_normalised\nbed/train/bed_0240_normalised\nbed/train/bed_0136_normalised\nbed/train/bed_0127_normalised\nbed/train/bed_0433_normalised\nbed/train/bed_0128_normalised\nbed/train/bed_0211_normalised\nbed/train/bed_0503_normalised\nbed/train/bed_0178_normalised\nbed/train/bed_0276_normalised\nbed/train/bed_0115_normalised\nbed/train/bed_0044_normalised\nbed/train/bed_0328_normalised\nbed/train/bed_0492_normalised\nbed/train/bed_0505_normalised\nbed/train/bed_0301_normalised\nbed/train/bed_0150_normalised\nbed/train/bed_0061_normalised\nbed/train/bed_0042_normalised\nbed/train/bed_0190_normalised\nbed/train/bed_0094_normalised\nbed/train/bed_0129_normalised\nbed/train/bed_0057_normalised\nbed/train/bed_0303_normalised\nbed/train/bed_0458_normalised\nbed/train/bed_0272_normalised\nbed/train/bed_0483_normalised\nbed/train/bed_0307_normalised\nbed/train/bed_0134_normalised\nbed/train/bed_0043_normalised\nbed/train/bed_0017_normalised\nbed/train/bed_0268_normalised\nbed/train/bed_0260_normalised\nbed/train/bed_0008_normalised\nbed/train/bed_0313_normalised\nbed/train/bed_0052_normalised\nbed/train/bed_0056_normalised\nbed/train/bed_0041_normalised\nbed/train/bed_0284_normalised\nbed/train/bed_0304_normalised\nbed/train/bed_0066_normalised\nbed/train/bed_0253_normalised\nbed/train/bed_0446_normalised\nbed/train/bed_0104_normalised\nbed/train/bed_0345_normalised\nbed/train/bed_0246_normalised\nbed/train/bed_0413_normalised\nbed/train/bed_0340_normalised\nbed/train/bed_0305_normalised\nbed/train/bed_0358_normalised\nbed/train/bed_0378_normalised\nbed/train/bed_0512_normalised\nbed/train/bed_0270_normalised\nbed/train/bed_0451_normalised\nbed/train/bed_0455_normalised\nbed/train/bed_0288_normalised\nbed/train/bed_0371_normalised\nbed/train/bed_0282_normalised\nbed/train/bed_0341_normalised\nbed/train/bed_0315_normalised\nbed/train/bed_0188_normalised\nbed/train/bed_0170_normalised\nbed/train/bed_0073_normalised\nbed/train/bed_0319_normalised\nbed/train/bed_0408_normalised\nbed/train/bed_0120_normalised\nbed/train/bed_0033_normalised\nbed/train/bed_0299_normalised\nbed/train/bed_0103_normalised\nbed/train/bed_0010_normalised\nbed/train/bed_0280_normalised\nbed/train/bed_0286_normalised\nbed/train/bed_0141_normalised\nbed/train/bed_0249_normalised\nbed/train/bed_0353_normalised\nbed/train/bed_0357_normalised\nbed/train/bed_0203_normalised\nbed/train/bed_0438_normalised\nbed/train/bed_0478_normalised\nbed/train/bed_0174_normalised\nbed/train/bed_0184_normalised\nbed/train/bed_0100_normalised\nbed/train/bed_0448_normalised\nbed/train/bed_0165_normalised\nbed/train/bed_0384_normalised\nbed/train/bed_0062_normalised\nbed/train/bed_0205_normalised\nbed/train/bed_0109_normalised\nbed/train/bed_0079_normalised\nbed/train/bed_0470_normalised\nbed/train/bed_0155_normalised\nbed/train/bed_0402_normalised\nbed/train/bed_0393_normalised\nbed/train/bed_0222_normalised\nbed/train/bed_0081_normalised\nbed/train/bed_0023_normalised\nbed/train/bed_0463_normalised\nbed/train/bed_0119_normalised\nbed/train/bed_0102_normalised\nbed/train/bed_0496_normalised\nbed/train/bed_0192_normalised\nbed/train/bed_0082_normalised\nbed/train/bed_0116_normalised\nbed/train/bed_0362_normalised\nbed/train/bed_0077_normalised\nbed/train/bed_0439_normalised\nbed/train/bed_0424_normalised\nbed/train/bed_0118_normalised\nbed/train/bed_0485_normalised\nbed/train/bed_0048_normalised\nbed/train/bed_0074_normalised\nbed/train/bed_0146_normalised\nbed/train/bed_0219_normalised\nbed/train/bed_0326_normalised\nbed/train/bed_0093_normalised\nbed/train/bed_0265_normalised\nbed/train/bed_0436_normalised\nbed/train/bed_0020_normalised\nbed/train/bed_0022_normalised\nbed/train/bed_0325_normalised\nbed/train/bed_0291_normalised\nbed/train/bed_0159_normalised\nbed/train/bed_0111_normalised\nbed/train/bed_0144_normalised\nbed/train/bed_0335_normalised\nbed/train/bed_0267_normalised\nbed/train/bed_0317_normalised\nbed/train/bed_0395_normalised\nbed/train/bed_0054_normalised\nbed/train/bed_0456_normalised\nbed/train/bed_0185_normalised\nbed/train/bed_0197_normalised\nbed/train/bed_0099_normalised\nbed/train/bed_0036_normalised\nbed/train/bed_0037_normalised\nbed/train/bed_0481_normalised\nbed/train/bed_0404_normalised\nbed/train/bed_0306_normalised\nbed/train/bed_0443_normalised\nbed/train/bed_0167_normalised\nbed/train/bed_0147_normalised\nbed/train/bed_0490_normalised\nbed/train/bed_0139_normalised\nbed/train/bed_0216_normalised\nbed/train/bed_0320_normalised\nbed/train/bed_0055_normalised\nbed/train/bed_0344_normalised\nbed/train/bed_0145_normalised\nbed/train/bed_0511_normalised\nbed/train/bed_0327_normalised\nbed/train/bed_0407_normalised\nbed/train/bed_0025_normalised\nbed/train/bed_0021_normalised\nbed/train/bed_0367_normalised\nbed/train/bed_0460_normalised\nbed/train/bed_0434_normalised\nbed/train/bed_0173_normalised\nbed/train/bed_0046_normalised\nbed/train/bed_0392_normalised\nbed/train/bed_0154_normalised\nbed/train/bed_0201_normalised\nbed/train/bed_0193_normalised\nbed/train/bed_0447_normalised\nbed/train/bed_0457_normalised\nbed/train/bed_0354_normalised\nbed/train/bed_0314_normalised\nbed/train/bed_0387_normalised\nbed/train/bed_0405_normalised\nbed/train/bed_0067_normalised\nbed/train/bed_0475_normalised\nbed/train/bed_0383_normalised\nbed/train/bed_0175_normalised\nbed/train/bed_0385_normalised\nbed/train/bed_0160_normalised\nbed/train/bed_0181_normalised\nbed/train/bed_0051_normalised\nbed/test/bed_0542_normalised\nbed/test/bed_0517_normalised\nbed/test/bed_0599_normalised\nbed/test/bed_0596_normalised\nbed/test/bed_0606_normalised\nbed/test/bed_0546_normalised\nbed/test/bed_0593_normalised\nbed/test/bed_0550_normalised\nbed/test/bed_0547_normalised\nbed/test/bed_0579_normalised\nbed/test/bed_0565_normalised\nbed/test/bed_0545_normalised\nbed/test/bed_0595_normalised\nbed/test/bed_0532_normalised\nbed/test/bed_0609_normalised\nbed/test/bed_0584_normalised\nbed/test/bed_0533_normalised\nbed/test/bed_0571_normalised\nbed/test/bed_0585_normalised\nbed/test/bed_0568_normalised\nbed/test/bed_0572_normalised\nbed/test/bed_0578_normalised\nbed/test/bed_0588_normalised\nbed/test/bed_0520_normalised\nbed/test/bed_0583_normalised\nbed/test/bed_0541_normalised\nbed/test/bed_0592_normalised\nbed/test/bed_0523_normalised\nbed/test/bed_0516_normalised\nbed/test/bed_0582_normalised\nbed/test/bed_0563_normalised\nbed/test/bed_0519_normalised\nbed/test/bed_0554_normalised\nbed/test/bed_0581_normalised\nbed/test/bed_0525_normalised\nbed/test/bed_0536_normalised\nbed/test/bed_0586_normalised\nbed/test/bed_0527_normalised\nbed/test/bed_0573_normalised\nbed/test/bed_0567_normalised\nbed/test/bed_0543_normalised\nbed/test/bed_0587_normalised\nbed/test/bed_0603_normalised\nbed/test/bed_0549_normalised\nbed/test/bed_0524_normalised\nbed/test/bed_0570_normalised\nbed/test/bed_0521_normalised\nbed/test/bed_0594_normalised\nbed/test/bed_0522_normalised\nbed/test/bed_0589_normalised\nbed/test/bed_0598_normalised\nbed/test/bed_0531_normalised\nbed/test/bed_0566_normalised\nbed/test/bed_0601_normalised\nbed/test/bed_0551_normalised\nbed/test/bed_0530_normalised\nbed/test/bed_0559_normalised\nbed/test/bed_0560_normalised\nbed/test/bed_0615_normalised\nbed/test/bed_0544_normalised\nbed/test/bed_0610_normalised\nbed/test/bed_0518_normalised\nbed/test/bed_0539_normalised\nbed/test/bed_0607_normalised\nbed/test/bed_0535_normalised\nbed/test/bed_0611_normalised\nbed/test/bed_0577_normalised\nbed/test/bed_0602_normalised\nbed/test/bed_0576_normalised\nbed/test/bed_0534_normalised\nbed/test/bed_0538_normalised\nbed/test/bed_0528_normalised\nbed/test/bed_0604_normalised\nbed/test/bed_0562_normalised\nbed/test/bed_0612_normalised\nbed/test/bed_0591_normalised\nbed/test/bed_0555_normalised\nbed/test/bed_0597_normalised\nbed/test/bed_0553_normalised\nbed/test/bed_0558_normalised\nbed/test/bed_0574_normalised\nbed/test/bed_0526_normalised\nbed/test/bed_0556_normalised\nbed/test/bed_0540_normalised\nbed/test/bed_0575_normalised\nbed/test/bed_0580_normalised\nbed/test/bed_0608_normalised\nbed/test/bed_0552_normalised\nbed/test/bed_0600_normalised\nbed/test/bed_0564_normalised\nbed/test/bed_0605_normalised\nbed/test/bed_0613_normalised\nbed/test/bed_0590_normalised\nbed/test/bed_0614_normalised\nbed/test/bed_0561_normalised\nbed/test/bed_0569_normalised\nbed/test/bed_0548_normalised\nbed/test/bed_0557_normalised\nbed/test/bed_0537_normalised\nbed/test/bed_0529_normalised\nstool/train/stool_0068_normalised\nstool/train/stool_0085_normalised\nstool/train/stool_0086_normalised\nstool/train/stool_0012_normalised\nstool/train/stool_0021_normalised\nstool/train/stool_0080_normalised\nstool/train/stool_0053_normalised\nstool/train/stool_0054_normalised\nstool/train/stool_0088_normalised\nstool/train/stool_0056_normalised\nstool/train/stool_0070_normalised\nstool/train/stool_0028_normalised\nstool/train/stool_0030_normalised\nstool/train/stool_0079_normalised\nstool/train/stool_0007_normalised\nstool/train/stool_0033_normalised\nstool/train/stool_0062_normalised\nstool/train/stool_0066_normalised\nstool/train/stool_0071_normalised\nstool/train/stool_0002_normalised\nstool/train/stool_0069_normalised\nstool/train/stool_0084_normalised\nstool/train/stool_0075_normalised\nstool/train/stool_0036_normalised\nstool/train/stool_0045_normalised\nstool/train/stool_0087_normalised\nstool/train/stool_0063_normalised\nstool/train/stool_0017_normalised\nstool/train/stool_0003_normalised\nstool/train/stool_0044_normalised\nstool/train/stool_0061_normalised\nstool/train/stool_0041_normalised\nstool/train/stool_0077_normalised\nstool/train/stool_0010_normalised\nstool/train/stool_0076_normalised\nstool/train/stool_0046_normalised\nstool/train/stool_0055_normalised\nstool/train/stool_0051_normalised\nstool/train/stool_0005_normalised\nstool/train/stool_0043_normalised\nstool/train/stool_0027_normalised\nstool/train/stool_0038_normalised\nstool/train/stool_0073_normalised\nstool/train/stool_0034_normalised\nstool/train/stool_0047_normalised\nstool/train/stool_0048_normalised\nstool/train/stool_0018_normalised\nstool/train/stool_0065_normalised\nstool/train/stool_0014_normalised\nstool/train/stool_0031_normalised\nstool/train/stool_0004_normalised\nstool/train/stool_0026_normalised\nstool/train/stool_0081_normalised\nstool/train/stool_0032_normalised\nstool/train/stool_0008_normalised\nstool/train/stool_0059_normalised\nstool/train/stool_0052_normalised\nstool/train/stool_0082_normalised\nstool/train/stool_0029_normalised\nstool/train/stool_0013_normalised\nstool/train/stool_0067_normalised\nstool/train/stool_0090_normalised\nstool/train/stool_0024_normalised\nstool/train/stool_0042_normalised\nstool/train/stool_0006_normalised\nstool/train/stool_0022_normalised\nstool/train/stool_0037_normalised\nstool/train/stool_0035_normalised\nstool/train/stool_0083_normalised\nstool/train/stool_0058_normalised\nstool/train/stool_0039_normalised\nstool/train/stool_0060_normalised\nstool/train/stool_0016_normalised\nstool/train/stool_0049_normalised\nstool/train/stool_0025_normalised\nstool/train/stool_0089_normalised\nstool/train/stool_0023_normalised\nstool/train/stool_0009_normalised\nstool/train/stool_0057_normalised\nstool/train/stool_0019_normalised\nstool/train/stool_0001_normalised\nstool/train/stool_0074_normalised\nstool/train/stool_0078_normalised\nstool/train/stool_0020_normalised\nstool/train/stool_0050_normalised\nstool/train/stool_0011_normalised\nstool/train/stool_0040_normalised\nstool/train/stool_0064_normalised\nstool/train/stool_0015_normalised\nstool/train/stool_0072_normalised\nstool/test/stool_0092_normalised\nstool/test/stool_0108_normalised\nstool/test/stool_0100_normalised\nstool/test/stool_0110_normalised\nstool/test/stool_0091_normalised\nstool/test/stool_0097_normalised\nstool/test/stool_0103_normalised\nstool/test/stool_0105_normalised\nstool/test/stool_0109_normalised\nstool/test/stool_0095_normalised\nstool/test/stool_0106_normalised\nstool/test/stool_0101_normalised\nstool/test/stool_0098_normalised\nstool/test/stool_0094_normalised\nstool/test/stool_0093_normalised\nstool/test/stool_0099_normalised\nstool/test/stool_0107_normalised\nstool/test/stool_0096_normalised\nstool/test/stool_0102_normalised\nstool/test/stool_0104_normalised\nperson/train/person_0042_normalised\nperson/train/person_0064_normalised\nperson/train/person_0059_normalised\nperson/train/person_0045_normalised\nperson/train/person_0014_normalised\nperson/train/person_0080_normalised\nperson/train/person_0025_normalised\nperson/train/person_0049_normalised\nperson/train/person_0085_normalised\nperson/train/person_0007_normalised\nperson/train/person_0058_normalised\nperson/train/person_0067_normalised\nperson/train/person_0047_normalised\nperson/train/person_0086_normalised\nperson/train/person_0038_normalised\nperson/train/person_0066_normalised\nperson/train/person_0037_normalised\nperson/train/person_0016_normalised\nperson/train/person_0074_normalised\nperson/train/person_0062_normalised\nperson/train/person_0072_normalised\nperson/train/person_0001_normalised\nperson/train/person_0075_normalised\nperson/train/person_0034_normalised\nperson/train/person_0020_normalised\nperson/train/person_0004_normalised\nperson/train/person_0063_normalised\nperson/train/person_0010_normalised\nperson/train/person_0008_normalised\nperson/train/person_0028_normalised\nperson/train/person_0065_normalised\nperson/train/person_0012_normalised\nperson/train/person_0030_normalised\nperson/train/person_0050_normalised\nperson/train/person_0056_normalised\nperson/train/person_0044_normalised\nperson/train/person_0051_normalised\nperson/train/person_0031_normalised\nperson/train/person_0083_normalised\nperson/train/person_0035_normalised\nperson/train/person_0057_normalised\nperson/train/person_0023_normalised\nperson/train/person_0039_normalised\nperson/train/person_0077_normalised\nperson/train/person_0053_normalised\nperson/train/person_0017_normalised\nperson/train/person_0032_normalised\nperson/train/person_0036_normalised\nperson/train/person_0055_normalised\nperson/train/person_0076_normalised\nperson/train/person_0052_normalised\nperson/train/person_0003_normalised\nperson/train/person_0005_normalised\nperson/train/person_0011_normalised\nperson/train/person_0033_normalised\nperson/train/person_0068_normalised\nperson/train/person_0006_normalised\nperson/train/person_0013_normalised\nperson/train/person_0040_normalised\nperson/train/person_0027_normalised\nperson/train/person_0070_normalised\nperson/train/person_0078_normalised\nperson/train/person_0084_normalised\nperson/train/person_0054_normalised\nperson/train/person_0079_normalised\nperson/train/person_0019_normalised\nperson/train/person_0043_normalised\nperson/train/person_0026_normalised\nperson/train/person_0081_normalised\nperson/train/person_0024_normalised\nperson/train/person_0069_normalised\nperson/train/person_0046_normalised\nperson/train/person_0022_normalised\nperson/train/person_0018_normalised\nperson/train/person_0073_normalised\nperson/train/person_0088_normalised\nperson/train/person_0087_normalised\nperson/train/person_0060_normalised\nperson/train/person_0015_normalised\nperson/train/person_0082_normalised\nperson/train/person_0071_normalised\nperson/train/person_0061_normalised\nperson/train/person_0002_normalised\nperson/train/person_0029_normalised\nperson/train/person_0041_normalised\nperson/train/person_0048_normalised\nperson/train/person_0009_normalised\nperson/train/person_0021_normalised\nperson/test/person_0103_normalised\nperson/test/person_0099_normalised\nperson/test/person_0094_normalised\nperson/test/person_0089_normalised\nperson/test/person_0096_normalised\nperson/test/person_0092_normalised\nperson/test/person_0105_normalised\nperson/test/person_0108_normalised\nperson/test/person_0097_normalised\nperson/test/person_0098_normalised\nperson/test/person_0095_normalised\nperson/test/person_0101_normalised\nperson/test/person_0093_normalised\nperson/test/person_0091_normalised\nperson/test/person_0100_normalised\nperson/test/person_0104_normalised\nperson/test/person_0090_normalised\nperson/test/person_0102_normalised\nperson/test/person_0107_normalised\nperson/test/person_0106_normalised\nxbox/train/xbox_0076_normalised\nxbox/train/xbox_0056_normalised\nxbox/train/xbox_0084_normalised\nxbox/train/xbox_0038_normalised\nxbox/train/xbox_0087_normalised\nxbox/train/xbox_0008_normalised\nxbox/train/xbox_0030_normalised\nxbox/train/xbox_0017_normalised\nxbox/train/xbox_0092_normalised\nxbox/train/xbox_0020_normalised\nxbox/train/xbox_0096_normalised\nxbox/train/xbox_0025_normalised\nxbox/train/xbox_0034_normalised\nxbox/train/xbox_0055_normalised\nxbox/train/xbox_0098_normalised\nxbox/train/xbox_0004_normalised\nxbox/train/xbox_0062_normalised\nxbox/train/xbox_0095_normalised\nxbox/train/xbox_0050_normalised\nxbox/train/xbox_0019_normalised\nxbox/train/xbox_0058_normalised\nxbox/train/xbox_0026_normalised\nxbox/train/xbox_0013_normalised\nxbox/train/xbox_0072_normalised\nxbox/train/xbox_0044_normalised\nxbox/train/xbox_0073_normalised\nxbox/train/xbox_0065_normalised\nxbox/train/xbox_0033_normalised\nxbox/train/xbox_0043_normalised\nxbox/train/xbox_0060_normalised\nxbox/train/xbox_0007_normalised\nxbox/train/xbox_0089_normalised\nxbox/train/xbox_0088_normalised\nxbox/train/xbox_0036_normalised\nxbox/train/xbox_0049_normalised\nxbox/train/xbox_0077_normalised\nxbox/train/xbox_0071_normalised\nxbox/train/xbox_0091_normalised\nxbox/train/xbox_0037_normalised\nxbox/train/xbox_0075_normalised\nxbox/train/xbox_0048_normalised\nxbox/train/xbox_0045_normalised\nxbox/train/xbox_0046_normalised\nxbox/train/xbox_0021_normalised\nxbox/train/xbox_0015_normalised\nxbox/train/xbox_0028_normalised\nxbox/train/xbox_0029_normalised\nxbox/train/xbox_0064_normalised\nxbox/train/xbox_0001_normalised\nxbox/train/xbox_0014_normalised\nxbox/train/xbox_0090_normalised\nxbox/train/xbox_0052_normalised\nxbox/train/xbox_0022_normalised\nxbox/train/xbox_0018_normalised\nxbox/train/xbox_0039_normalised\nxbox/train/xbox_0085_normalised\nxbox/train/xbox_0070_normalised\nxbox/train/xbox_0003_normalised\nxbox/train/xbox_0009_normalised\nxbox/train/xbox_0061_normalised\nxbox/train/xbox_0006_normalised\nxbox/train/xbox_0097_normalised\nxbox/train/xbox_0066_normalised\nxbox/train/xbox_0051_normalised\nxbox/train/xbox_0032_normalised\nxbox/train/xbox_0059_normalised\nxbox/train/xbox_0023_normalised\nxbox/train/xbox_0005_normalised\nxbox/train/xbox_0035_normalised\nxbox/train/xbox_0074_normalised\nxbox/train/xbox_0103_normalised\nxbox/train/xbox_0042_normalised\nxbox/train/xbox_0079_normalised\nxbox/train/xbox_0047_normalised\nxbox/train/xbox_0102_normalised\nxbox/train/xbox_0031_normalised\nxbox/train/xbox_0053_normalised\nxbox/train/xbox_0057_normalised\nxbox/train/xbox_0002_normalised\nxbox/train/xbox_0081_normalised\nxbox/train/xbox_0094_normalised\nxbox/train/xbox_0100_normalised\nxbox/train/xbox_0099_normalised\nxbox/train/xbox_0068_normalised\nxbox/train/xbox_0086_normalised\nxbox/train/xbox_0069_normalised\nxbox/train/xbox_0067_normalised\nxbox/train/xbox_0080_normalised\nxbox/train/xbox_0024_normalised\nxbox/train/xbox_0016_normalised\nxbox/train/xbox_0010_normalised\nxbox/train/xbox_0101_normalised\nxbox/train/xbox_0012_normalised\nxbox/train/xbox_0027_normalised\nxbox/train/xbox_0041_normalised\nxbox/train/xbox_0063_normalised\nxbox/train/xbox_0040_normalised\nxbox/train/xbox_0093_normalised\nxbox/train/xbox_0082_normalised\nxbox/train/xbox_0011_normalised\nxbox/train/xbox_0083_normalised\nxbox/train/xbox_0078_normalised\nxbox/train/xbox_0054_normalised\nxbox/test/xbox_0120_normalised\nxbox/test/xbox_0109_normalised\nxbox/test/xbox_0106_normalised\nxbox/test/xbox_0112_normalised\nxbox/test/xbox_0121_normalised\nxbox/test/xbox_0113_normalised\nxbox/test/xbox_0107_normalised\nxbox/test/xbox_0108_normalised\nxbox/test/xbox_0123_normalised\nxbox/test/xbox_0116_normalised\nxbox/test/xbox_0114_normalised\nxbox/test/xbox_0118_normalised\nxbox/test/xbox_0115_normalised\nxbox/test/xbox_0117_normalised\nxbox/test/xbox_0105_normalised\nxbox/test/xbox_0104_normalised\nxbox/test/xbox_0119_normalised\nxbox/test/xbox_0110_normalised\nxbox/test/xbox_0111_normalised\nxbox/test/xbox_0122_normalised\nchair/train/chair_0629_normalised\nchair/train/chair_0099_normalised\nchair/train/chair_0859_normalised\nchair/train/chair_0602_normalised\nchair/train/chair_0362_normalised\nchair/train/chair_0261_normalised\nchair/train/chair_0150_normalised\nchair/train/chair_0651_normalised\nchair/train/chair_0850_normalised\nchair/train/chair_0393_normalised\nchair/train/chair_0579_normalised\nchair/train/chair_0594_normalised\nchair/train/chair_0185_normalised\nchair/train/chair_0761_normalised\nchair/train/chair_0209_normalised\nchair/train/chair_0299_normalised\nchair/train/chair_0210_normalised\nchair/train/chair_0851_normalised\nchair/train/chair_0005_normalised\nchair/train/chair_0522_normalised\nchair/train/chair_0140_normalised\nchair/train/chair_0606_normalised\nchair/train/chair_0499_normalised\nchair/train/chair_0253_normalised\nchair/train/chair_0783_normalised\nchair/train/chair_0401_normalised\nchair/train/chair_0858_normalised\nchair/train/chair_0533_normalised\nchair/train/chair_0790_normalised\nchair/train/chair_0410_normalised\nchair/train/chair_0421_normalised\nchair/train/chair_0620_normalised\nchair/train/chair_0376_normalised\nchair/train/chair_0829_normalised\nchair/train/chair_0574_normalised\nchair/train/chair_0687_normalised\nchair/train/chair_0702_normalised\nchair/train/chair_0876_normalised\nchair/train/chair_0437_normalised\nchair/train/chair_0348_normalised\nchair/train/chair_0338_normalised\nchair/train/chair_0875_normalised\nchair/train/chair_0297_normalised\nchair/train/chair_0818_normalised\nchair/train/chair_0781_normalised\nchair/train/chair_0223_normalised\nchair/train/chair_0354_normalised\nchair/train/chair_0590_normalised\nchair/train/chair_0081_normalised\nchair/train/chair_0663_normalised\nchair/train/chair_0291_normalised\nchair/train/chair_0519_normalised\nchair/train/chair_0105_normalised\nchair/train/chair_0615_normalised\nchair/train/chair_0306_normalised\nchair/train/chair_0383_normalised\nchair/train/chair_0251_normalised\nchair/train/chair_0678_normalised\nchair/train/chair_0597_normalised\nchair/train/chair_0246_normalised\nchair/train/chair_0072_normalised\nchair/train/chair_0144_normalised\nchair/train/chair_0368_normalised\nchair/train/chair_0184_normalised\nchair/train/chair_0637_normalised\nchair/train/chair_0676_normalised\nchair/train/chair_0885_normalised\nchair/train/chair_0459_normalised\nchair/train/chair_0116_normalised\nchair/train/chair_0387_normalised\nchair/train/chair_0043_normalised\nchair/train/chair_0259_normalised\nchair/train/chair_0392_normalised\nchair/train/chair_0016_normalised\nchair/train/chair_0337_normalised\nchair/train/chair_0037_normalised\nchair/train/chair_0252_normalised\nchair/train/chair_0349_normalised\nchair/train/chair_0400_normalised\nchair/train/chair_0248_normalised\nchair/train/chair_0377_normalised\nchair/train/chair_0815_normalised\nchair/train/chair_0616_normalised\nchair/train/chair_0877_normalised\nchair/train/chair_0611_normalised\nchair/train/chair_0386_normalised\nchair/train/chair_0504_normalised\nchair/train/chair_0752_normalised\nchair/train/chair_0588_normalised\nchair/train/chair_0744_normalised\nchair/train/chair_0290_normalised\nchair/train/chair_0225_normalised\nchair/train/chair_0514_normalised\nchair/train/chair_0190_normalised\nchair/train/chair_0142_normalised\nchair/train/chair_0431_normalised\nchair/train/chair_0403_normalised\nchair/train/chair_0260_normalised\nchair/train/chair_0078_normalised\nchair/train/chair_0517_normalised\nchair/train/chair_0090_normalised\nchair/train/chair_0433_normalised\nchair/train/chair_0340_normalised\nchair/train/chair_0494_normalised\nchair/train/chair_0351_normalised\nchair/train/chair_0233_normalised\nchair/train/chair_0496_normalised\nchair/train/chair_0557_normalised\nchair/train/chair_0263_normalised\nchair/train/chair_0776_normalised\nchair/train/chair_0329_normalised\nchair/train/chair_0028_normalised\nchair/train/chair_0469_normalised\nchair/train/chair_0366_normalised\nchair/train/chair_0671_normalised\nchair/train/chair_0326_normalised\nchair/train/chair_0438_normalised\nchair/train/chair_0464_normalised\nchair/train/chair_0228_normalised\nchair/train/chair_0139_normalised\nchair/train/chair_0889_normalised\nchair/train/chair_0278_normalised\nchair/train/chair_0288_normalised\nchair/train/chair_0038_normalised\nchair/train/chair_0415_normalised\nchair/train/chair_0577_normalised\nchair/train/chair_0698_normalised\nchair/train/chair_0235_normalised\nchair/train/chair_0451_normalised\nchair/train/chair_0009_normalised\nchair/train/chair_0457_normalised\nchair/train/chair_0662_normalised\nchair/train/chair_0612_normalised\nchair/train/chair_0175_normalised\nchair/train/chair_0372_normalised\nchair/train/chair_0473_normalised\nchair/train/chair_0644_normalised\nchair/train/chair_0232_normalised\nchair/train/chair_0746_normalised\nchair/train/chair_0632_normalised\nchair/train/chair_0399_normalised\nchair/train/chair_0093_normalised\nchair/train/chair_0520_normalised\nchair/train/chair_0374_normalised\nchair/train/chair_0394_normalised\nchair/train/chair_0855_normalised\nchair/train/chair_0646_normalised\nchair/train/chair_0336_normalised\nchair/train/chair_0388_normalised\nchair/train/chair_0723_normalised\nchair/train/chair_0466_normalised\nchair/train/chair_0019_normalised\nchair/train/chair_0262_normalised\nchair/train/chair_0391_normalised\nchair/train/chair_0532_normalised\nchair/train/chair_0327_normalised\nchair/train/chair_0852_normalised\nchair/train/chair_0529_normalised\nchair/train/chair_0312_normalised\nchair/train/chair_0255_normalised\nchair/train/chair_0243_normalised\nchair/train/chair_0539_normalised\nchair/train/chair_0240_normalised\nchair/train/chair_0808_normalised\nchair/train/chair_0649_normalised\nchair/train/chair_0848_normalised\nchair/train/chair_0020_normalised\nchair/train/chair_0390_normalised\nchair/train/chair_0279_normalised\nchair/train/chair_0046_normalised\nchair/train/chair_0426_normalised\nchair/train/chair_0523_normalised\nchair/train/chair_0180_normalised\nchair/train/chair_0668_normalised\nchair/train/chair_0607_normalised\nchair/train/chair_0430_normalised\nchair/train/chair_0754_normalised\nchair/train/chair_0217_normalised\nchair/train/chair_0788_normalised\nchair/train/chair_0721_normalised\nchair/train/chair_0015_normalised\nchair/train/chair_0817_normalised\nchair/train/chair_0580_normalised\nchair/train/chair_0083_normalised\nchair/train/chair_0186_normalised\nchair/train/chair_0824_normalised\nchair/train/chair_0560_normalised\nchair/train/chair_0044_normalised\nchair/train/chair_0063_normalised\nchair/train/chair_0353_normalised\nchair/train/chair_0481_normalised\nchair/train/chair_0109_normalised\nchair/train/chair_0724_normalised\nchair/train/chair_0095_normalised\nchair/train/chair_0535_normalised\nchair/train/chair_0578_normalised\nchair/train/chair_0027_normalised\nchair/train/chair_0280_normalised\nchair/train/chair_0879_normalised\nchair/train/chair_0358_normalised\nchair/train/chair_0755_normalised\nchair/train/chair_0172_normalised\nchair/train/chair_0042_normalised\nchair/train/chair_0029_normalised\nchair/train/chair_0866_normalised\nchair/train/chair_0684_normalised\nchair/train/chair_0341_normalised\nchair/train/chair_0218_normalised\nchair/train/chair_0103_normalised\nchair/train/chair_0170_normalised\nchair/train/chair_0575_normalised\nchair/train/chair_0156_normalised\nchair/train/chair_0443_normalised\nchair/train/chair_0558_normalised\nchair/train/chair_0622_normalised\nchair/train/chair_0836_normalised\nchair/train/chair_0694_normalised\nchair/train/chair_0826_normalised\nchair/train/chair_0030_normalised\nchair/train/chair_0655_normalised\nchair/train/chair_0604_normalised\nchair/train/chair_0308_normalised\nchair/train/chair_0073_normalised\nchair/train/chair_0205_normalised\nchair/train/chair_0264_normalised\nchair/train/chair_0617_normalised\nchair/train/chair_0465_normalised\nchair/train/chair_0062_normalised\nchair/train/chair_0396_normalised\nchair/train/chair_0061_normalised\nchair/train/chair_0714_normalised\nchair/train/chair_0820_normalised\nchair/train/chair_0477_normalised\nchair/train/chair_0869_normalised\nchair/train/chair_0123_normalised\nchair/train/chair_0508_normalised\nchair/train/chair_0133_normalised\nchair/train/chair_0516_normalised\nchair/train/chair_0641_normalised\nchair/train/chair_0314_normalised\nchair/train/chair_0750_normalised\nchair/train/chair_0511_normalised\nchair/train/chair_0127_normalised\nchair/train/chair_0472_normalised\nchair/train/chair_0730_normalised\nchair/train/chair_0159_normalised\nchair/train/chair_0101_normalised\nchair/train/chair_0441_normalised\nchair/train/chair_0013_normalised\nchair/train/chair_0589_normalised\nchair/train/chair_0685_normalised\nchair/train/chair_0241_normalised\nchair/train/chair_0222_normalised\nchair/train/chair_0849_normalised\nchair/train/chair_0732_normalised\nchair/train/chair_0796_normalised\nchair/train/chair_0108_normalised\nchair/train/chair_0151_normalised\nchair/train/chair_0507_normalised\nchair/train/chair_0488_normalised\nchair/train/chair_0238_normalised\nchair/train/chair_0347_normalised\nchair/train/chair_0741_normalised\nchair/train/chair_0525_normalised\nchair/train/chair_0201_normalised\nchair/train/chair_0160_normalised\nchair/train/chair_0198_normalised\nchair/train/chair_0206_normalised\nchair/train/chair_0084_normalised\nchair/train/chair_0435_normalised\nchair/train/chair_0429_normalised\nchair/train/chair_0302_normalised\nchair/train/chair_0352_normalised\nchair/train/chair_0463_normalised\nchair/train/chair_0113_normalised\nchair/train/chair_0798_normalised\nchair/train/chair_0501_normalised\nchair/train/chair_0273_normalised\nchair/train/chair_0024_normalised\nchair/train/chair_0881_normalised\nchair/train/chair_0018_normalised\nchair/train/chair_0628_normalised\nchair/train/chair_0659_normalised\nchair/train/chair_0689_normalised\nchair/train/chair_0282_normalised\nchair/train/chair_0770_normalised\nchair/train/chair_0461_normalised\nchair/train/chair_0242_normalised\nchair/train/chair_0303_normalised\nchair/train/chair_0624_normalised\nchair/train/chair_0564_normalised\nchair/train/chair_0556_normalised\nchair/train/chair_0106_normalised\nchair/train/chair_0026_normalised\nchair/train/chair_0735_normalised\nchair/train/chair_0695_normalised\nchair/train/chair_0470_normalised\nchair/train/chair_0174_normalised\nchair/train/chair_0865_normalised\nchair/train/chair_0060_normalised\nchair/train/chair_0035_normalised\nchair/train/chair_0234_normalised\nchair/train/chair_0489_normalised\nchair/train/chair_0672_normalised\nchair/train/chair_0313_normalised\nchair/train/chair_0171_normalised\nchair/train/chair_0008_normalised\nchair/train/chair_0661_normalised\nchair/train/chair_0583_normalised\nchair/train/chair_0086_normalised\nchair/train/chair_0295_normalised\nchair/train/chair_0162_normalised\nchair/train/chair_0152_normalised\nchair/train/chair_0271_normalised\nchair/train/chair_0791_normalised\nchair/train/chair_0373_normalised\nchair/train/chair_0397_normalised\nchair/train/chair_0424_normalised\nchair/train/chair_0417_normalised\nchair/train/chair_0854_normalised\nchair/train/chair_0636_normalised\nchair/train/chair_0513_normalised\nchair/train/chair_0074_normalised\nchair/train/chair_0047_normalised\nchair/train/chair_0667_normalised\nchair/train/chair_0014_normalised\nchair/train/chair_0051_normalised\nchair/train/chair_0587_normalised\nchair/train/chair_0811_normalised\nchair/train/chair_0479_normalised\nchair/train/chair_0153_normalised\nchair/train/chair_0549_normalised\nchair/train/chair_0467_normalised\nchair/train/chair_0229_normalised\nchair/train/chair_0143_normalised\nchair/train/chair_0448_normalised\nchair/train/chair_0001_normalised\nchair/train/chair_0169_normalised\nchair/train/chair_0647_normalised\nchair/train/chair_0087_normalised\nchair/train/chair_0454_normalised\nchair/train/chair_0316_normalised\nchair/train/chair_0309_normalised\nchair/train/chair_0654_normalised\nchair/train/chair_0226_normalised\nchair/train/chair_0070_normalised\nchair/train/chair_0056_normalised\nchair/train/chair_0258_normalised\nchair/train/chair_0195_normalised\nchair/train/chair_0591_normalised\nchair/train/chair_0146_normalised\nchair/train/chair_0183_normalised\nchair/train/chair_0318_normalised\nchair/train/chair_0807_normalised\nchair/train/chair_0453_normalised\nchair/train/chair_0091_normalised\nchair/train/chair_0256_normalised\nchair/train/chair_0692_normalised\nchair/train/chair_0526_normalised\nchair/train/chair_0360_normalised\nchair/train/chair_0840_normalised\nchair/train/chair_0269_normalised\nchair/train/chair_0017_normalised\nchair/train/chair_0844_normalised\nchair/train/chair_0482_normalised\nchair/train/chair_0214_normalised\nchair/train/chair_0773_normalised\nchair/train/chair_0179_normalised\nchair/train/chair_0704_normalised\nchair/train/chair_0220_normalised\nchair/train/chair_0131_normalised\nchair/train/chair_0567_normalised\nchair/train/chair_0419_normalised\nchair/train/chair_0398_normalised\nchair/train/chair_0625_normalised\nchair/train/chair_0298_normalised\nchair/train/chair_0384_normalised\nchair/train/chair_0203_normalised\nchair/train/chair_0458_normalised\nchair/train/chair_0089_normalised\nchair/train/chair_0806_normalised\nchair/train/chair_0503_normalised\nchair/train/chair_0274_normalised\nchair/train/chair_0719_normalised\nchair/train/chair_0793_normalised\nchair/train/chair_0289_normalised\nchair/train/chair_0728_normalised\nchair/train/chair_0276_normalised\nchair/train/chair_0837_normalised\nchair/train/chair_0069_normalised\nchair/train/chair_0609_normalised\nchair/train/chair_0452_normalised\nchair/train/chair_0076_normalised\nchair/train/chair_0800_normalised\nchair/train/chair_0192_normalised\nchair/train/chair_0187_normalised\nchair/train/chair_0096_normalised\nchair/train/chair_0592_normalised\nchair/train/chair_0593_normalised\nchair/train/chair_0082_normalised\nchair/train/chair_0231_normalised\nchair/train/chair_0199_normalised\nchair/train/chair_0371_normalised\nchair/train/chair_0792_normalised\nchair/train/chair_0335_normalised\nchair/train/chair_0771_normalised\nchair/train/chair_0285_normalised\nchair/train/chair_0827_normalised\nchair/train/chair_0713_normalised\nchair/train/chair_0102_normalised\nchair/train/chair_0521_normalised\nchair/train/chair_0639_normalised\nchair/train/chair_0870_normalised\nchair/train/chair_0500_normalised\nchair/train/chair_0007_normalised\nchair/train/chair_0816_normalised\nchair/train/chair_0884_normalised\nchair/train/chair_0710_normalised\nchair/train/chair_0370_normalised\nchair/train/chair_0460_normalised\nchair/train/chair_0766_normalised\nchair/train/chair_0789_normalised\nchair/train/chair_0653_normalised\nchair/train/chair_0058_normalised\nchair/train/chair_0708_normalised\nchair/train/chair_0471_normalised\nchair/train/chair_0440_normalised\nchair/train/chair_0334_normalised\nchair/train/chair_0355_normalised\nchair/train/chair_0425_normalised\nchair/train/chair_0427_normalised\nchair/train/chair_0666_normalised\nchair/train/chair_0545_normalised\nchair/train/chair_0733_normalised\nchair/train/chair_0322_normalised\nchair/train/chair_0332_normalised\nchair/train/chair_0534_normalised\nchair/train/chair_0088_normalised\nchair/train/chair_0600_normalised\nchair/train/chair_0033_normalised\nchair/train/chair_0822_normalised\nchair/train/chair_0640_normalised\nchair/train/chair_0565_normalised\nchair/train/chair_0795_normalised\nchair/train/chair_0204_normalised\nchair/train/chair_0227_normalised\nchair/train/chair_0328_normalised\nchair/train/chair_0527_normalised\nchair/train/chair_0167_normalised\nchair/train/chair_0841_normalised\nchair/train/chair_0857_normalised\nchair/train/chair_0320_normalised\nchair/train/chair_0157_normalised\nchair/train/chair_0847_normalised\nchair/train/chair_0310_normalised\nchair/train/chair_0307_normalised\nchair/train/chair_0495_normalised\nchair/train/chair_0883_normalised\nchair/train/chair_0237_normalised\nchair/train/chair_0068_normalised\nchair/train/chair_0748_normalised\nchair/train/chair_0097_normalised\nchair/train/chair_0012_normalised\nchair/train/chair_0550_normalised\nchair/train/chair_0882_normalised\nchair/train/chair_0509_normalised\nchair/train/chair_0054_normalised\nchair/train/chair_0601_normalised\nchair/train/chair_0546_normalised\nchair/train/chair_0486_normalised\nchair/train/chair_0753_normalised\nchair/train/chair_0100_normalised\nchair/train/chair_0701_normalised\nchair/train/chair_0420_normalised\nchair/train/chair_0305_normalised\nchair/train/chair_0809_normalised\nchair/train/chair_0128_normalised\nchair/train/chair_0277_normalised\nchair/train/chair_0480_normalised\nchair/train/chair_0779_normalised\nchair/train/chair_0468_normalised\nchair/train/chair_0518_normalised\nchair/train/chair_0369_normalised\nchair/train/chair_0768_normalised\nchair/train/chair_0738_normalised\nchair/train/chair_0098_normalised\nchair/train/chair_0135_normalised\nchair/train/chair_0691_normalised\nchair/train/chair_0445_normalised\nchair/train/chair_0212_normalised\nchair/train/chair_0561_normalised\nchair/train/chair_0734_normalised\nchair/train/chair_0104_normalised\nchair/train/chair_0404_normalised\nchair/train/chair_0803_normalised\nchair/train/chair_0439_normalised\nchair/train/chair_0812_normalised\nchair/train/chair_0365_normalised\nchair/train/chair_0860_normalised\nchair/train/chair_0774_normalised\nchair/train/chair_0436_normalised\nchair/train/chair_0036_normalised\nchair/train/chair_0618_normalised\nchair/train/chair_0745_normalised\nchair/train/chair_0207_normalised\nchair/train/chair_0638_normalised\nchair/train/chair_0342_normalised\nchair/train/chair_0787_normalised\nchair/train/chair_0887_normalised\nchair/train/chair_0506_normalised\nchair/train/chair_0188_normalised\nchair/train/chair_0447_normalised\nchair/train/chair_0270_normalised\nchair/train/chair_0756_normalised\nchair/train/chair_0677_normalised\nchair/train/chair_0408_normalised\nchair/train/chair_0208_normalised\nchair/train/chair_0442_normalised\nchair/train/chair_0835_normalised\nchair/train/chair_0598_normalised\nchair/train/chair_0130_normalised\nchair/train/chair_0196_normalised\nchair/train/chair_0221_normalised\nchair/train/chair_0155_normalised\nchair/train/chair_0287_normalised\nchair/train/chair_0797_normalised\nchair/train/chair_0572_normalised\nchair/train/chair_0003_normalised\nchair/train/chair_0658_normalised\nchair/train/chair_0711_normalised\nchair/train/chair_0548_normalised\nchair/train/chair_0634_normalised\nchair/train/chair_0483_normalised\nchair/train/chair_0823_normalised\nchair/train/chair_0414_normalised\nchair/train/chair_0552_normalised\nchair/train/chair_0568_normalised\nchair/train/chair_0530_normalised\nchair/train/chair_0541_normalised\nchair/train/chair_0751_normalised\nchair/train/chair_0474_normalised\nchair/train/chair_0832_normalised\nchair/train/chair_0434_normalised\nchair/train/chair_0147_normalised\nchair/train/chair_0720_normalised\nchair/train/chair_0149_normalised\nchair/train/chair_0510_normalised\nchair/train/chair_0177_normalised\nchair/train/chair_0537_normalised\nchair/train/chair_0428_normalised\nchair/train/chair_0121_normalised\nchair/train/chair_0163_normalised\nchair/train/chair_0543_normalised\nchair/train/chair_0886_normalised\nchair/train/chair_0512_normalised\nchair/train/chair_0838_normalised\nchair/train/chair_0758_normalised\nchair/train/chair_0762_normalised\nchair/train/chair_0742_normalised\nchair/train/chair_0048_normalised\nchair/train/chair_0266_normalised\nchair/train/chair_0554_normalised\nchair/train/chair_0178_normalised\nchair/train/chair_0344_normalised\nchair/train/chair_0379_normalised\nchair/train/chair_0164_normalised\nchair/train/chair_0551_normalised\nchair/train/chair_0039_normalised\nchair/train/chair_0491_normalised\nchair/train/chair_0717_normalised\nchair/train/chair_0418_normalised\nchair/train/chair_0834_normalised\nchair/train/chair_0880_normalised\nchair/train/chair_0819_normalised\nchair/train/chair_0378_normalised\nchair/train/chair_0562_normalised\nchair/train/chair_0114_normalised\nchair/train/chair_0784_normalised\nchair/train/chair_0528_normalised\nchair/train/chair_0120_normalised\nchair/train/chair_0767_normalised\nchair/train/chair_0077_normalised\nchair/train/chair_0216_normalised\nchair/train/chair_0006_normalised\nchair/train/chair_0333_normalised\nchair/train/chair_0536_normalised\nchair/train/chair_0524_normalised\nchair/train/chair_0323_normalised\nchair/train/chair_0361_normalised\nchair/train/chair_0193_normalised\nchair/train/chair_0122_normalised\nchair/train/chair_0079_normalised\nchair/train/chair_0065_normalised\nchair/train/chair_0700_normalised\nchair/train/chair_0669_normalised\nchair/train/chair_0760_normalised\nchair/train/chair_0802_normalised\nchair/train/chair_0141_normalised\nchair/train/chair_0389_normalised\nchair/train/chair_0595_normalised\nchair/train/chair_0265_normalised\nchair/train/chair_0555_normalised\nchair/train/chair_0747_normalised\nchair/train/chair_0825_normalised\nchair/train/chair_0423_normalised\nchair/train/chair_0244_normalised\nchair/train/chair_0581_normalised\nchair/train/chair_0416_normalised\nchair/train/chair_0782_normalised\nchair/train/chair_0656_normalised\nchair/train/chair_0004_normalised\nchair/train/chair_0821_normalised\nchair/train/chair_0861_normalised\nchair/train/chair_0189_normalised\nchair/train/chair_0540_normalised\nchair/train/chair_0346_normalised\nchair/train/chair_0045_normalised\nchair/train/chair_0868_normalised\nchair/train/chair_0475_normalised\nchair/train/chair_0213_normalised\nchair/train/chair_0124_normalised\nchair/train/chair_0531_normalised\nchair/train/chair_0080_normalised\nchair/train/chair_0422_normalised\nchair/train/chair_0066_normalised\nchair/train/chair_0191_normalised\nchair/train/chair_0853_normalised\nchair/train/chair_0703_normalised\nchair/train/chair_0722_normalised\nchair/train/chair_0092_normalised\nchair/train/chair_0161_normalised\nchair/train/chair_0117_normalised\nchair/train/chair_0757_normalised\nchair/train/chair_0134_normalised\nchair/train/chair_0635_normalised\nchair/train/chair_0502_normalised\nchair/train/chair_0450_normalised\nchair/train/chair_0680_normalised\nchair/train/chair_0411_normalised\nchair/train/chair_0067_normalised\nchair/train/chair_0584_normalised\nchair/train/chair_0432_normalised\nchair/train/chair_0573_normalised\nchair/train/chair_0094_normalised\nchair/train/chair_0842_normalised\nchair/train/chair_0737_normalised\nchair/train/chair_0367_normalised\nchair/train/chair_0158_normalised\nchair/train/chair_0296_normalised\nchair/train/chair_0786_normalised\nchair/train/chair_0538_normalised\nchair/train/chair_0449_normalised\nchair/train/chair_0219_normalised\nchair/train/chair_0645_normalised\nchair/train/chair_0707_normalised\nchair/train/chair_0743_normalised\nchair/train/chair_0022_normalised\nchair/train/chair_0268_normalised\nchair/train/chair_0686_normalised\nchair/train/chair_0176_normalised\nchair/train/chair_0239_normalised\nchair/train/chair_0110_normalised\nchair/train/chair_0633_normalised\nchair/train/chair_0490_normalised\nchair/train/chair_0126_normalised\nchair/train/chair_0874_normalised\nchair/train/chair_0780_normalised\nchair/train/chair_0785_normalised\nchair/train/chair_0563_normalised\nchair/train/chair_0833_normalised\nchair/train/chair_0614_normalised\nchair/train/chair_0505_normalised\nchair/train/chair_0843_normalised\nchair/train/chair_0648_normalised\nchair/train/chair_0331_normalised\nchair/train/chair_0215_normalised\nchair/train/chair_0544_normalised\nchair/train/chair_0025_normalised\nchair/train/chair_0856_normalised\nchair/train/chair_0049_normalised\nchair/train/chair_0631_normalised\nchair/train/chair_0119_normalised\nchair/train/chair_0888_normalised\nchair/train/chair_0382_normalised\nchair/train/chair_0395_normalised\nchair/train/chair_0317_normalised\nchair/train/chair_0846_normalised\nchair/train/chair_0569_normalised\nchair/train/chair_0726_normalised\nchair/train/chair_0444_normalised\nchair/train/chair_0696_normalised\nchair/train/chair_0727_normalised\nchair/train/chair_0810_normalised\nchair/train/chair_0660_normalised\nchair/train/chair_0381_normalised\nchair/train/chair_0492_normalised\nchair/train/chair_0002_normalised\nchair/train/chair_0799_normalised\nchair/train/chair_0173_normalised\nchair/train/chair_0319_normalised\nchair/train/chair_0409_normalised\nchair/train/chair_0357_normalised\nchair/train/chair_0621_normalised\nchair/train/chair_0830_normalised\nchair/train/chair_0679_normalised\nchair/train/chair_0740_normalised\nchair/train/chair_0202_normalised\nchair/train/chair_0111_normalised\nchair/train/chair_0284_normalised\nchair/train/chair_0129_normalised\nchair/train/chair_0032_normalised\nchair/train/chair_0627_normalised\nchair/train/chair_0498_normalised\nchair/train/chair_0281_normalised\nchair/train/chair_0497_normalised\nchair/train/chair_0630_normalised\nchair/train/chair_0311_normalised\nchair/train/chair_0197_normalised\nchair/train/chair_0878_normalised\nchair/train/chair_0665_normalised\nchair/train/chair_0115_normalised\nchair/train/chair_0688_normalised\nchair/train/chair_0375_normalised\nchair/train/chair_0053_normalised\nchair/train/chair_0055_normalised\nchair/train/chair_0286_normalised\nchair/train/chair_0585_normalised\nchair/train/chair_0455_normalised\nchair/train/chair_0828_normalised\nchair/train/chair_0112_normalised\nchair/train/chair_0716_normalised\nchair/train/chair_0670_normalised\nchair/train/chair_0247_normalised\nchair/train/chair_0254_normalised\nchair/train/chair_0769_normalised\nchair/train/chair_0324_normalised\nchair/train/chair_0873_normalised\nchair/train/chair_0775_normalised\nchair/train/chair_0586_normalised\nchair/train/chair_0619_normalised\nchair/train/chair_0559_normalised\nchair/train/chair_0863_normalised\nchair/train/chair_0293_normalised\nchair/train/chair_0613_normalised\nchair/train/chair_0831_normalised\nchair/train/chair_0057_normalised\nchair/train/chair_0736_normalised\nchair/train/chair_0011_normalised\nchair/train/chair_0759_normalised\nchair/train/chair_0674_normalised\nchair/train/chair_0642_normalised\nchair/train/chair_0813_normalised\nchair/train/chair_0034_normalised\nchair/train/chair_0211_normalised\nchair/train/chair_0570_normalised\nchair/train/chair_0118_normalised\nchair/train/chair_0675_normalised\nchair/train/chair_0683_normalised\nchair/train/chair_0553_normalised\nchair/train/chair_0085_normalised\nchair/train/chair_0462_normalised\nchair/train/chair_0699_normalised\nchair/train/chair_0138_normalised\nchair/train/chair_0245_normalised\nchair/train/chair_0052_normalised\nchair/train/chair_0712_normalised\nchair/train/chair_0697_normalised\nchair/train/chair_0867_normalised\nchair/train/chair_0596_normalised\nchair/train/chair_0485_normalised\nchair/train/chair_0257_normalised\nchair/train/chair_0801_normalised\nchair/train/chair_0693_normalised\nchair/train/chair_0023_normalised\nchair/train/chair_0731_normalised\nchair/train/chair_0566_normalised\nchair/train/chair_0194_normalised\nchair/train/chair_0643_normalised\nchair/train/chair_0325_normalised\nchair/train/chair_0582_normalised\nchair/train/chair_0814_normalised\nchair/train/chair_0657_normalised\nchair/train/chair_0075_normalised\nchair/train/chair_0839_normalised\nchair/train/chair_0652_normalised\nchair/train/chair_0872_normalised\nchair/train/chair_0605_normalised\nchair/train/chair_0706_normalised\nchair/train/chair_0739_normalised\nchair/train/chair_0343_normalised\nchair/train/chair_0542_normalised\nchair/train/chair_0402_normalised\nchair/train/chair_0764_normalised\nchair/train/chair_0339_normalised\nchair/train/chair_0267_normalised\nchair/train/chair_0603_normalised\nchair/train/chair_0547_normalised\nchair/train/chair_0608_normalised\nchair/train/chair_0040_normalised\nchair/train/chair_0690_normalised\nchair/train/chair_0132_normalised\nchair/train/chair_0345_normalised\nchair/train/chair_0107_normalised\nchair/train/chair_0405_normalised\nchair/train/chair_0064_normalised\nchair/train/chair_0673_normalised\nchair/train/chair_0749_normalised\nchair/train/chair_0794_normalised\nchair/train/chair_0871_normalised\nchair/train/chair_0484_normalised\nchair/train/chair_0413_normalised\nchair/train/chair_0778_normalised\nchair/train/chair_0385_normalised\nchair/train/chair_0493_normalised\nchair/train/chair_0571_normalised\nchair/train/chair_0315_normalised\nchair/train/chair_0041_normalised\nchair/train/chair_0446_normalised\nchair/train/chair_0350_normalised\nchair/train/chair_0145_normalised\nchair/train/chair_0705_normalised\nchair/train/chair_0154_normalised\nchair/train/chair_0363_normalised\nchair/train/chair_0845_normalised\nchair/train/chair_0230_normalised\nchair/train/chair_0137_normalised\nchair/train/chair_0200_normalised\nchair/train/chair_0359_normalised\nchair/train/chair_0478_normalised\nchair/train/chair_0456_normalised\nchair/train/chair_0182_normalised\nchair/train/chair_0626_normalised\nchair/train/chair_0576_normalised\nchair/train/chair_0380_normalised\nchair/train/chair_0772_normalised\nchair/train/chair_0250_normalised\nchair/train/chair_0487_normalised\nchair/train/chair_0715_normalised\nchair/train/chair_0224_normalised\nchair/train/chair_0862_normalised\nchair/train/chair_0300_normalised\nchair/train/chair_0071_normalised\nchair/train/chair_0864_normalised\nchair/train/chair_0031_normalised\nchair/train/chair_0599_normalised\nchair/train/chair_0292_normalised\nchair/train/chair_0805_normalised\nchair/train/chair_0729_normalised\nchair/train/chair_0283_normalised\nchair/train/chair_0623_normalised\nchair/train/chair_0166_normalised\nchair/train/chair_0412_normalised\nchair/train/chair_0763_normalised\nchair/train/chair_0515_normalised\nchair/train/chair_0125_normalised\nchair/train/chair_0249_normalised\nchair/train/chair_0709_normalised\nchair/train/chair_0050_normalised\nchair/train/chair_0664_normalised\nchair/train/chair_0168_normalised\nchair/train/chair_0725_normalised\nchair/train/chair_0804_normalised\nchair/train/chair_0364_normalised\nchair/train/chair_0681_normalised\nchair/train/chair_0021_normalised\nchair/train/chair_0010_normalised\nchair/train/chair_0765_normalised\nchair/train/chair_0610_normalised\nchair/train/chair_0165_normalised\nchair/train/chair_0148_normalised\nchair/train/chair_0356_normalised\nchair/train/chair_0181_normalised\nchair/train/chair_0682_normalised\nchair/train/chair_0294_normalised\nchair/train/chair_0059_normalised\nchair/train/chair_0236_normalised\nchair/train/chair_0275_normalised\nchair/train/chair_0272_normalised\nchair/train/chair_0330_normalised\nchair/train/chair_0304_normalised\nchair/train/chair_0718_normalised\nchair/train/chair_0136_normalised\nchair/train/chair_0301_normalised\nchair/train/chair_0407_normalised\nchair/train/chair_0406_normalised\nchair/train/chair_0476_normalised\nchair/train/chair_0650_normalised\nchair/train/chair_0321_normalised\nchair/train/chair_0777_normalised\nchair/test/chair_0916_normalised\nchair/test/chair_0900_normalised\nchair/test/chair_0984_normalised\nchair/test/chair_0983_normalised\nchair/test/chair_0937_normalised\nchair/test/chair_0964_normalised\nchair/test/chair_0914_normalised\nchair/test/chair_0891_normalised\nchair/test/chair_0976_normalised\nchair/test/chair_0910_normalised\nchair/test/chair_0924_normalised\nchair/test/chair_0929_normalised\nchair/test/chair_0930_normalised\nchair/test/chair_0949_normalised\nchair/test/chair_0927_normalised\nchair/test/chair_0911_normalised\nchair/test/chair_0973_normalised\nchair/test/chair_0987_normalised\nchair/test/chair_0915_normalised\nchair/test/chair_0902_normalised\nchair/test/chair_0942_normalised\nchair/test/chair_0953_normalised\nchair/test/chair_0892_normalised\nchair/test/chair_0969_normalised\nchair/test/chair_0962_normalised\nchair/test/chair_0977_normalised\nchair/test/chair_0982_normalised\nchair/test/chair_0901_normalised\nchair/test/chair_0960_normalised\nchair/test/chair_0957_normalised\nchair/test/chair_0978_normalised\nchair/test/chair_0904_normalised\nchair/test/chair_0968_normalised\nchair/test/chair_0913_normalised\nchair/test/chair_0938_normalised\nchair/test/chair_0945_normalised\nchair/test/chair_0925_normalised\nchair/test/chair_0931_normalised\nchair/test/chair_0988_normalised\nchair/test/chair_0932_normalised\nchair/test/chair_0897_normalised\nchair/test/chair_0941_normalised\nchair/test/chair_0975_normalised\nchair/test/chair_0952_normalised\nchair/test/chair_0919_normalised\nchair/test/chair_0922_normalised\nchair/test/chair_0917_normalised\nchair/test/chair_0940_normalised\nchair/test/chair_0943_normalised\nchair/test/chair_0906_normalised\nchair/test/chair_0989_normalised\nchair/test/chair_0944_normalised\nchair/test/chair_0903_normalised\nchair/test/chair_0979_normalised\nchair/test/chair_0939_normalised\nchair/test/chair_0933_normalised\nchair/test/chair_0980_normalised\nchair/test/chair_0967_normalised\nchair/test/chair_0965_normalised\nchair/test/chair_0986_normalised\nchair/test/chair_0934_normalised\nchair/test/chair_0890_normalised\nchair/test/chair_0955_normalised\nchair/test/chair_0966_normalised\nchair/test/chair_0895_normalised\nchair/test/chair_0899_normalised\nchair/test/chair_0896_normalised\nchair/test/chair_0981_normalised\nchair/test/chair_0971_normalised\nchair/test/chair_0926_normalised\nchair/test/chair_0909_normalised\nchair/test/chair_0970_normalised\nchair/test/chair_0920_normalised\nchair/test/chair_0961_normalised\nchair/test/chair_0921_normalised\nchair/test/chair_0948_normalised\nchair/test/chair_0908_normalised\nchair/test/chair_0898_normalised\nchair/test/chair_0905_normalised\nchair/test/chair_0918_normalised\nchair/test/chair_0985_normalised\nchair/test/chair_0972_normalised\nchair/test/chair_0935_normalised\nchair/test/chair_0958_normalised\nchair/test/chair_0946_normalised\nchair/test/chair_0907_normalised\nchair/test/chair_0936_normalised\nchair/test/chair_0923_normalised\nchair/test/chair_0959_normalised\nchair/test/chair_0963_normalised\nchair/test/chair_0893_normalised\nchair/test/chair_0947_normalised\nchair/test/chair_0954_normalised\nchair/test/chair_0956_normalised\nchair/test/chair_0951_normalised\nchair/test/chair_0912_normalised\nchair/test/chair_0950_normalised\nchair/test/chair_0974_normalised\nchair/test/chair_0928_normalised\nchair/test/chair_0894_normalised\nflower_pot/train/flower_pot_0089_normalised\nflower_pot/train/flower_pot_0101_normalised\nflower_pot/train/flower_pot_0030_normalised\nflower_pot/train/flower_pot_0145_normalised\nflower_pot/train/flower_pot_0116_normalised\nflower_pot/train/flower_pot_0091_normalised\nflower_pot/train/flower_pot_0029_normalised\nflower_pot/train/flower_pot_0141_normalised\nflower_pot/train/flower_pot_0121_normalised\nflower_pot/train/flower_pot_0069_normalised\nflower_pot/train/flower_pot_0045_normalised\nflower_pot/train/flower_pot_0126_normalised\nflower_pot/train/flower_pot_0084_normalised\nflower_pot/train/flower_pot_0088_normalised\nflower_pot/train/flower_pot_0100_normalised\nflower_pot/train/flower_pot_0093_normalised\nflower_pot/train/flower_pot_0010_normalised\nflower_pot/train/flower_pot_0044_normalised\nflower_pot/train/flower_pot_0039_normalised\nflower_pot/train/flower_pot_0122_normalised\nflower_pot/train/flower_pot_0078_normalised\nflower_pot/train/flower_pot_0142_normalised\nflower_pot/train/flower_pot_0017_normalised\nflower_pot/train/flower_pot_0066_normalised\nflower_pot/train/flower_pot_0071_normalised\nflower_pot/train/flower_pot_0132_normalised\nflower_pot/train/flower_pot_0027_normalised\nflower_pot/train/flower_pot_0092_normalised\nflower_pot/train/flower_pot_0035_normalised\nflower_pot/train/flower_pot_0009_normalised\nflower_pot/train/flower_pot_0137_normalised\nflower_pot/train/flower_pot_0083_normalised\nflower_pot/train/flower_pot_0001_normalised\nflower_pot/train/flower_pot_0149_normalised\nflower_pot/train/flower_pot_0085_normalised\nflower_pot/train/flower_pot_0086_normalised\nflower_pot/train/flower_pot_0074_normalised\nflower_pot/train/flower_pot_0038_normalised\nflower_pot/train/flower_pot_0081_normalised\nflower_pot/train/flower_pot_0131_normalised\nflower_pot/train/flower_pot_0063_normalised\nflower_pot/train/flower_pot_0095_normalised\nflower_pot/train/flower_pot_0065_normalised\nflower_pot/train/flower_pot_0060_normalised\nflower_pot/train/flower_pot_0013_normalised\nflower_pot/train/flower_pot_0053_normalised\nflower_pot/train/flower_pot_0068_normalised\nflower_pot/train/flower_pot_0124_normalised\nflower_pot/train/flower_pot_0052_normalised\nflower_pot/train/flower_pot_0070_normalised\nflower_pot/train/flower_pot_0006_normalised\nflower_pot/train/flower_pot_0075_normalised\nflower_pot/train/flower_pot_0087_normalised\nflower_pot/train/flower_pot_0096_normalised\nflower_pot/train/flower_pot_0080_normalised\nflower_pot/train/flower_pot_0057_normalised\nflower_pot/train/flower_pot_0012_normalised\nflower_pot/train/flower_pot_0133_normalised\nflower_pot/train/flower_pot_0072_normalised\nflower_pot/train/flower_pot_0011_normalised\nflower_pot/train/flower_pot_0105_normalised\nflower_pot/train/flower_pot_0028_normalised\nflower_pot/train/flower_pot_0008_normalised\nflower_pot/train/flower_pot_0062_normalised\nflower_pot/train/flower_pot_0049_normalised\nflower_pot/train/flower_pot_0021_normalised\nflower_pot/train/flower_pot_0031_normalised\nflower_pot/train/flower_pot_0090_normalised\nflower_pot/train/flower_pot_0067_normalised\nflower_pot/train/flower_pot_0102_normalised\nflower_pot/train/flower_pot_0033_normalised\nflower_pot/train/flower_pot_0016_normalised\nflower_pot/train/flower_pot_0111_normalised\nflower_pot/train/flower_pot_0043_normalised\nflower_pot/train/flower_pot_0004_normalised\nflower_pot/train/flower_pot_0002_normalised\nflower_pot/train/flower_pot_0104_normalised\nflower_pot/train/flower_pot_0019_normalised\nflower_pot/train/flower_pot_0036_normalised\nflower_pot/train/flower_pot_0128_normalised\nflower_pot/train/flower_pot_0056_normalised\nflower_pot/train/flower_pot_0115_normalised\nflower_pot/train/flower_pot_0106_normalised\nflower_pot/train/flower_pot_0134_normalised\nflower_pot/train/flower_pot_0146_normalised\nflower_pot/train/flower_pot_0144_normalised\nflower_pot/train/flower_pot_0003_normalised\nflower_pot/train/flower_pot_0079_normalised\nflower_pot/train/flower_pot_0048_normalised\nflower_pot/train/flower_pot_0034_normalised\nflower_pot/train/flower_pot_0107_normalised\nflower_pot/train/flower_pot_0059_normalised\nflower_pot/train/flower_pot_0007_normalised\nflower_pot/train/flower_pot_0076_normalised\nflower_pot/train/flower_pot_0136_normalised\nflower_pot/train/flower_pot_0051_normalised\nflower_pot/train/flower_pot_0098_normalised\nflower_pot/train/flower_pot_0118_normalised\nflower_pot/train/flower_pot_0073_normalised\nflower_pot/train/flower_pot_0108_normalised\nflower_pot/train/flower_pot_0109_normalised\nflower_pot/train/flower_pot_0129_normalised\nflower_pot/train/flower_pot_0050_normalised\nflower_pot/train/flower_pot_0026_normalised\nflower_pot/train/flower_pot_0112_normalised\nflower_pot/train/flower_pot_0018_normalised\nflower_pot/train/flower_pot_0041_normalised\nflower_pot/train/flower_pot_0140_normalised\nflower_pot/train/flower_pot_0054_normalised\nflower_pot/train/flower_pot_0032_normalised\nflower_pot/train/flower_pot_0061_normalised\nflower_pot/train/flower_pot_0135_normalised\nflower_pot/train/flower_pot_0046_normalised\nflower_pot/train/flower_pot_0103_normalised\nflower_pot/train/flower_pot_0082_normalised\nflower_pot/train/flower_pot_0024_normalised\nflower_pot/train/flower_pot_0025_normalised\nflower_pot/train/flower_pot_0120_normalised\nflower_pot/train/flower_pot_0097_normalised\nflower_pot/train/flower_pot_0014_normalised\nflower_pot/train/flower_pot_0119_normalised\nflower_pot/train/flower_pot_0015_normalised\nflower_pot/train/flower_pot_0147_normalised\nflower_pot/train/flower_pot_0148_normalised\nflower_pot/train/flower_pot_0064_normalised\nflower_pot/train/flower_pot_0055_normalised\nflower_pot/train/flower_pot_0099_normalised\nflower_pot/train/flower_pot_0094_normalised\nflower_pot/train/flower_pot_0127_normalised\nflower_pot/train/flower_pot_0139_normalised\nflower_pot/train/flower_pot_0040_normalised\nflower_pot/train/flower_pot_0138_normalised\nflower_pot/train/flower_pot_0113_normalised\nflower_pot/train/flower_pot_0077_normalised\nflower_pot/train/flower_pot_0058_normalised\nflower_pot/train/flower_pot_0117_normalised\nflower_pot/train/flower_pot_0005_normalised\nflower_pot/train/flower_pot_0037_normalised\nflower_pot/train/flower_pot_0110_normalised\nflower_pot/train/flower_pot_0125_normalised\nflower_pot/train/flower_pot_0020_normalised\nflower_pot/train/flower_pot_0123_normalised\nflower_pot/train/flower_pot_0047_normalised\nflower_pot/train/flower_pot_0143_normalised\nflower_pot/train/flower_pot_0042_normalised\nflower_pot/train/flower_pot_0114_normalised\nflower_pot/train/flower_pot_0023_normalised\nflower_pot/train/flower_pot_0022_normalised\nflower_pot/train/flower_pot_0130_normalised\nflower_pot/test/flower_pot_0156_normalised\nflower_pot/test/flower_pot_0157_normalised\nflower_pot/test/flower_pot_0166_normalised\nflower_pot/test/flower_pot_0160_normalised\nflower_pot/test/flower_pot_0161_normalised\nflower_pot/test/flower_pot_0154_normalised\nflower_pot/test/flower_pot_0151_normalised\nflower_pot/test/flower_pot_0163_normalised\nflower_pot/test/flower_pot_0165_normalised\nflower_pot/test/flower_pot_0164_normalised\nflower_pot/test/flower_pot_0150_normalised\nflower_pot/test/flower_pot_0155_normalised\nflower_pot/test/flower_pot_0168_normalised\nflower_pot/test/flower_pot_0167_normalised\nflower_pot/test/flower_pot_0169_normalised\nflower_pot/test/flower_pot_0153_normalised\nflower_pot/test/flower_pot_0158_normalised\nflower_pot/test/flower_pot_0159_normalised\nflower_pot/test/flower_pot_0162_normalised\nflower_pot/test/flower_pot_0152_normalised\ntoilet/train/toilet_0209_normalised\ntoilet/train/toilet_0081_normalised\ntoilet/train/toilet_0181_normalised\ntoilet/train/toilet_0095_normalised\ntoilet/train/toilet_0032_normalised\ntoilet/train/toilet_0062_normalised\ntoilet/train/toilet_0106_normalised\ntoilet/train/toilet_0094_normalised\ntoilet/train/toilet_0053_normalised\ntoilet/train/toilet_0282_normalised\ntoilet/train/toilet_0025_normalised\ntoilet/train/toilet_0242_normalised\ntoilet/train/toilet_0196_normalised\ntoilet/train/toilet_0015_normalised\ntoilet/train/toilet_0008_normalised\ntoilet/train/toilet_0140_normalised\ntoilet/train/toilet_0195_normalised\ntoilet/train/toilet_0299_normalised\ntoilet/train/toilet_0250_normalised\ntoilet/train/toilet_0215_normalised\ntoilet/train/toilet_0076_normalised\ntoilet/train/toilet_0338_normalised\ntoilet/train/toilet_0017_normalised\ntoilet/train/toilet_0026_normalised\ntoilet/train/toilet_0084_normalised\ntoilet/train/toilet_0126_normalised\ntoilet/train/toilet_0247_normalised\ntoilet/train/toilet_0079_normalised\ntoilet/train/toilet_0306_normalised\ntoilet/train/toilet_0231_normalised\ntoilet/train/toilet_0204_normalised\ntoilet/train/toilet_0260_normalised\ntoilet/train/toilet_0336_normalised\ntoilet/train/toilet_0002_normalised\ntoilet/train/toilet_0030_normalised\ntoilet/train/toilet_0009_normalised\ntoilet/train/toilet_0125_normalised\ntoilet/train/toilet_0280_normalised\ntoilet/train/toilet_0266_normalised\ntoilet/train/toilet_0274_normalised\ntoilet/train/toilet_0043_normalised\ntoilet/train/toilet_0185_normalised\ntoilet/train/toilet_0326_normalised\ntoilet/train/toilet_0277_normalised\ntoilet/train/toilet_0292_normalised\ntoilet/train/toilet_0310_normalised\ntoilet/train/toilet_0198_normalised\ntoilet/train/toilet_0205_normalised\ntoilet/train/toilet_0093_normalised\ntoilet/train/toilet_0138_normalised\ntoilet/train/toilet_0044_normalised\ntoilet/train/toilet_0199_normalised\ntoilet/train/toilet_0163_normalised\ntoilet/train/toilet_0201_normalised\ntoilet/train/toilet_0295_normalised\ntoilet/train/toilet_0089_normalised\ntoilet/train/toilet_0134_normalised\ntoilet/train/toilet_0021_normalised\ntoilet/train/toilet_0234_normalised\ntoilet/train/toilet_0080_normalised\ntoilet/train/toilet_0165_normalised\ntoilet/train/toilet_0133_normalised\ntoilet/train/toilet_0272_normalised\ntoilet/train/toilet_0171_normalised\ntoilet/train/toilet_0259_normalised\ntoilet/train/toilet_0136_normalised\ntoilet/train/toilet_0064_normalised\ntoilet/train/toilet_0186_normalised\ntoilet/train/toilet_0283_normalised\ntoilet/train/toilet_0323_normalised\ntoilet/train/toilet_0219_normalised\ntoilet/train/toilet_0342_normalised\ntoilet/train/toilet_0311_normalised\ntoilet/train/toilet_0039_normalised\ntoilet/train/toilet_0168_normalised\ntoilet/train/toilet_0031_normalised\ntoilet/train/toilet_0013_normalised\ntoilet/train/toilet_0285_normalised\ntoilet/train/toilet_0246_normalised\ntoilet/train/toilet_0343_normalised\ntoilet/train/toilet_0091_normalised\ntoilet/train/toilet_0287_normalised\ntoilet/train/toilet_0249_normalised\ntoilet/train/toilet_0301_normalised\ntoilet/train/toilet_0257_normalised\ntoilet/train/toilet_0232_normalised\ntoilet/train/toilet_0069_normalised\ntoilet/train/toilet_0220_normalised\ntoilet/train/toilet_0121_normalised\ntoilet/train/toilet_0010_normalised\ntoilet/train/toilet_0120_normalised\ntoilet/train/toilet_0300_normalised\ntoilet/train/toilet_0038_normalised\ntoilet/train/toilet_0238_normalised\ntoilet/train/toilet_0308_normalised\ntoilet/train/toilet_0154_normalised\ntoilet/train/toilet_0132_normalised\ntoilet/train/toilet_0035_normalised\ntoilet/train/toilet_0214_normalised\ntoilet/train/toilet_0271_normalised\ntoilet/train/toilet_0221_normalised\ntoilet/train/toilet_0110_normalised\ntoilet/train/toilet_0122_normalised\ntoilet/train/toilet_0131_normalised\ntoilet/train/toilet_0243_normalised\ntoilet/train/toilet_0335_normalised\ntoilet/train/toilet_0296_normalised\ntoilet/train/toilet_0135_normalised\ntoilet/train/toilet_0114_normalised\ntoilet/train/toilet_0085_normalised\ntoilet/train/toilet_0078_normalised\ntoilet/train/toilet_0083_normalised\ntoilet/train/toilet_0222_normalised\ntoilet/train/toilet_0048_normalised\ntoilet/train/toilet_0228_normalised\ntoilet/train/toilet_0029_normalised\ntoilet/train/toilet_0184_normalised\ntoilet/train/toilet_0158_normalised\ntoilet/train/toilet_0146_normalised\ntoilet/train/toilet_0004_normalised\ntoilet/train/toilet_0202_normalised\ntoilet/train/toilet_0318_normalised\ntoilet/train/toilet_0177_normalised\ntoilet/train/toilet_0203_normalised\ntoilet/train/toilet_0067_normalised\ntoilet/train/toilet_0124_normalised\ntoilet/train/toilet_0273_normalised\ntoilet/train/toilet_0019_normalised\ntoilet/train/toilet_0276_normalised\ntoilet/train/toilet_0049_normalised\ntoilet/train/toilet_0041_normalised\ntoilet/train/toilet_0328_normalised\ntoilet/train/toilet_0190_normalised\ntoilet/train/toilet_0057_normalised\ntoilet/train/toilet_0099_normalised\ntoilet/train/toilet_0332_normalised\ntoilet/train/toilet_0111_normalised\ntoilet/train/toilet_0016_normalised\ntoilet/train/toilet_0291_normalised\ntoilet/train/toilet_0001_normalised\ntoilet/train/toilet_0262_normalised\ntoilet/train/toilet_0334_normalised\ntoilet/train/toilet_0224_normalised\ntoilet/train/toilet_0327_normalised\ntoilet/train/toilet_0223_normalised\ntoilet/train/toilet_0156_normalised\ntoilet/train/toilet_0073_normalised\ntoilet/train/toilet_0147_normalised\ntoilet/train/toilet_0155_normalised\ntoilet/train/toilet_0101_normalised\ntoilet/train/toilet_0269_normalised\ntoilet/train/toilet_0312_normalised\ntoilet/train/toilet_0261_normalised\ntoilet/train/toilet_0022_normalised\ntoilet/train/toilet_0108_normalised\ntoilet/train/toilet_0118_normalised\ntoilet/train/toilet_0197_normalised\ntoilet/train/toilet_0317_normalised\ntoilet/train/toilet_0339_normalised\ntoilet/train/toilet_0173_normalised\ntoilet/train/toilet_0281_normalised\ntoilet/train/toilet_0096_normalised\ntoilet/train/toilet_0244_normalised\ntoilet/train/toilet_0104_normalised\ntoilet/train/toilet_0023_normalised\ntoilet/train/toilet_0191_normalised\ntoilet/train/toilet_0127_normalised\ntoilet/train/toilet_0005_normalised\ntoilet/train/toilet_0183_normalised\ntoilet/train/toilet_0063_normalised\ntoilet/train/toilet_0256_normalised\ntoilet/train/toilet_0105_normalised\ntoilet/train/toilet_0059_normalised\ntoilet/train/toilet_0254_normalised\ntoilet/train/toilet_0267_normalised\ntoilet/train/toilet_0047_normalised\ntoilet/train/toilet_0123_normalised\ntoilet/train/toilet_0268_normalised\ntoilet/train/toilet_0098_normalised\ntoilet/train/toilet_0248_normalised\ntoilet/train/toilet_0208_normalised\ntoilet/train/toilet_0143_normalised\ntoilet/train/toilet_0322_normalised\ntoilet/train/toilet_0279_normalised\ntoilet/train/toilet_0264_normalised\ntoilet/train/toilet_0068_normalised\ntoilet/train/toilet_0187_normalised\ntoilet/train/toilet_0040_normalised\ntoilet/train/toilet_0193_normalised\ntoilet/train/toilet_0192_normalised\ntoilet/train/toilet_0340_normalised\ntoilet/train/toilet_0011_normalised\ntoilet/train/toilet_0075_normalised\ntoilet/train/toilet_0227_normalised\ntoilet/train/toilet_0066_normalised\ntoilet/train/toilet_0152_normalised\ntoilet/train/toilet_0252_normalised\ntoilet/train/toilet_0284_normalised\ntoilet/train/toilet_0229_normalised\ntoilet/train/toilet_0046_normalised\ntoilet/train/toilet_0129_normalised\ntoilet/train/toilet_0236_normalised\ntoilet/train/toilet_0082_normalised\ntoilet/train/toilet_0178_normalised\ntoilet/train/toilet_0074_normalised\ntoilet/train/toilet_0302_normalised\ntoilet/train/toilet_0225_normalised\ntoilet/train/toilet_0012_normalised\ntoilet/train/toilet_0052_normalised\ntoilet/train/toilet_0130_normalised\ntoilet/train/toilet_0309_normalised\ntoilet/train/toilet_0325_normalised\ntoilet/train/toilet_0018_normalised\ntoilet/train/toilet_0321_normalised\ntoilet/train/toilet_0003_normalised\ntoilet/train/toilet_0241_normalised\ntoilet/train/toilet_0112_normalised\ntoilet/train/toilet_0344_normalised\ntoilet/train/toilet_0270_normalised\ntoilet/train/toilet_0115_normalised\ntoilet/train/toilet_0113_normalised\ntoilet/train/toilet_0139_normalised\ntoilet/train/toilet_0167_normalised\ntoilet/train/toilet_0037_normalised\ntoilet/train/toilet_0330_normalised\ntoilet/train/toilet_0055_normalised\ntoilet/train/toilet_0313_normalised\ntoilet/train/toilet_0045_normalised\ntoilet/train/toilet_0086_normalised\ntoilet/train/toilet_0278_normalised\ntoilet/train/toilet_0007_normalised\ntoilet/train/toilet_0027_normalised\ntoilet/train/toilet_0151_normalised\ntoilet/train/toilet_0307_normalised\ntoilet/train/toilet_0297_normalised\ntoilet/train/toilet_0251_normalised\ntoilet/train/toilet_0294_normalised\ntoilet/train/toilet_0150_normalised\ntoilet/train/toilet_0090_normalised\ntoilet/train/toilet_0207_normalised\ntoilet/train/toilet_0157_normalised\ntoilet/train/toilet_0071_normalised\ntoilet/train/toilet_0200_normalised\ntoilet/train/toilet_0148_normalised\ntoilet/train/toilet_0162_normalised\ntoilet/train/toilet_0117_normalised\ntoilet/train/toilet_0051_normalised\ntoilet/train/toilet_0142_normalised\ntoilet/train/toilet_0233_normalised\ntoilet/train/toilet_0235_normalised\ntoilet/train/toilet_0164_normalised\ntoilet/train/toilet_0304_normalised\ntoilet/train/toilet_0119_normalised\ntoilet/train/toilet_0329_normalised\ntoilet/train/toilet_0216_normalised\ntoilet/train/toilet_0175_normalised\ntoilet/train/toilet_0288_normalised\ntoilet/train/toilet_0237_normalised\ntoilet/train/toilet_0170_normalised\ntoilet/train/toilet_0060_normalised\ntoilet/train/toilet_0240_normalised\ntoilet/train/toilet_0206_normalised\ntoilet/train/toilet_0218_normalised\ntoilet/train/toilet_0303_normalised\ntoilet/train/toilet_0182_normalised\ntoilet/train/toilet_0042_normalised\ntoilet/train/toilet_0161_normalised\ntoilet/train/toilet_0103_normalised\ntoilet/train/toilet_0239_normalised\ntoilet/train/toilet_0159_normalised\ntoilet/train/toilet_0166_normalised\ntoilet/train/toilet_0128_normalised\ntoilet/train/toilet_0070_normalised\ntoilet/train/toilet_0341_normalised\ntoilet/train/toilet_0314_normalised\ntoilet/train/toilet_0061_normalised\ntoilet/train/toilet_0109_normalised\ntoilet/train/toilet_0006_normalised\ntoilet/train/toilet_0265_normalised\ntoilet/train/toilet_0100_normalised\ntoilet/train/toilet_0324_normalised\ntoilet/train/toilet_0333_normalised\ntoilet/train/toilet_0107_normalised\ntoilet/train/toilet_0050_normalised\ntoilet/train/toilet_0315_normalised\ntoilet/train/toilet_0092_normalised\ntoilet/train/toilet_0054_normalised\ntoilet/train/toilet_0174_normalised\ntoilet/train/toilet_0213_normalised\ntoilet/train/toilet_0065_normalised\ntoilet/train/toilet_0145_normalised\ntoilet/train/toilet_0144_normalised\ntoilet/train/toilet_0097_normalised\ntoilet/train/toilet_0275_normalised\ntoilet/train/toilet_0217_normalised\ntoilet/train/toilet_0180_normalised\ntoilet/train/toilet_0149_normalised\ntoilet/train/toilet_0289_normalised\ntoilet/train/toilet_0088_normalised\ntoilet/train/toilet_0172_normalised\ntoilet/train/toilet_0160_normalised\ntoilet/train/toilet_0188_normalised\ntoilet/train/toilet_0316_normalised\ntoilet/train/toilet_0226_normalised\ntoilet/train/toilet_0058_normalised\ntoilet/train/toilet_0102_normalised\ntoilet/train/toilet_0293_normalised\ntoilet/train/toilet_0153_normalised\ntoilet/train/toilet_0255_normalised\ntoilet/train/toilet_0056_normalised\ntoilet/train/toilet_0212_normalised\ntoilet/train/toilet_0298_normalised\ntoilet/train/toilet_0141_normalised\ntoilet/train/toilet_0211_normalised\ntoilet/train/toilet_0286_normalised\ntoilet/train/toilet_0014_normalised\ntoilet/train/toilet_0320_normalised\ntoilet/train/toilet_0169_normalised\ntoilet/train/toilet_0036_normalised\ntoilet/train/toilet_0258_normalised\ntoilet/train/toilet_0137_normalised\ntoilet/train/toilet_0072_normalised\ntoilet/train/toilet_0331_normalised\ntoilet/train/toilet_0263_normalised\ntoilet/train/toilet_0305_normalised\ntoilet/train/toilet_0245_normalised\ntoilet/train/toilet_0230_normalised\ntoilet/train/toilet_0028_normalised\ntoilet/train/toilet_0116_normalised\ntoilet/train/toilet_0087_normalised\ntoilet/train/toilet_0290_normalised\ntoilet/train/toilet_0337_normalised\ntoilet/train/toilet_0034_normalised\ntoilet/train/toilet_0077_normalised\ntoilet/train/toilet_0210_normalised\ntoilet/train/toilet_0179_normalised\ntoilet/train/toilet_0020_normalised\ntoilet/train/toilet_0194_normalised\ntoilet/train/toilet_0024_normalised\ntoilet/train/toilet_0176_normalised\ntoilet/train/toilet_0189_normalised\ntoilet/train/toilet_0253_normalised\ntoilet/train/toilet_0033_normalised\ntoilet/train/toilet_0319_normalised\ntoilet/test/toilet_0356_normalised\ntoilet/test/toilet_0413_normalised\ntoilet/test/toilet_0371_normalised\ntoilet/test/toilet_0443_normalised\ntoilet/test/toilet_0367_normalised\ntoilet/test/toilet_0349_normalised\ntoilet/test/toilet_0385_normalised\ntoilet/test/toilet_0392_normalised\ntoilet/test/toilet_0399_normalised\ntoilet/test/toilet_0429_normalised\ntoilet/test/toilet_0387_normalised\ntoilet/test/toilet_0420_normalised\ntoilet/test/toilet_0375_normalised\ntoilet/test/toilet_0434_normalised\ntoilet/test/toilet_0351_normalised\ntoilet/test/toilet_0421_normalised\ntoilet/test/toilet_0400_normalised\ntoilet/test/toilet_0440_normalised\ntoilet/test/toilet_0398_normalised\ntoilet/test/toilet_0396_normalised\ntoilet/test/toilet_0354_normalised\ntoilet/test/toilet_0384_normalised\ntoilet/test/toilet_0386_normalised\ntoilet/test/toilet_0353_normalised\ntoilet/test/toilet_0373_normalised\ntoilet/test/toilet_0405_normalised\ntoilet/test/toilet_0347_normalised\ntoilet/test/toilet_0428_normalised\ntoilet/test/toilet_0411_normalised\ntoilet/test/toilet_0412_normalised\ntoilet/test/toilet_0408_normalised\ntoilet/test/toilet_0391_normalised\ntoilet/test/toilet_0401_normalised\ntoilet/test/toilet_0381_normalised\ntoilet/test/toilet_0403_normalised\ntoilet/test/toilet_0383_normalised\ntoilet/test/toilet_0346_normalised\ntoilet/test/toilet_0423_normalised\ntoilet/test/toilet_0389_normalised\ntoilet/test/toilet_0404_normalised\ntoilet/test/toilet_0406_normalised\ntoilet/test/toilet_0431_normalised\ntoilet/test/toilet_0433_normalised\ntoilet/test/toilet_0418_normalised\ntoilet/test/toilet_0361_normalised\ntoilet/test/toilet_0363_normalised\ntoilet/test/toilet_0415_normalised\ntoilet/test/toilet_0382_normalised\ntoilet/test/toilet_0388_normalised\ntoilet/test/toilet_0365_normalised\ntoilet/test/toilet_0416_normalised\ntoilet/test/toilet_0379_normalised\ntoilet/test/toilet_0393_normalised\ntoilet/test/toilet_0424_normalised\ntoilet/test/toilet_0369_normalised\ntoilet/test/toilet_0394_normalised\ntoilet/test/toilet_0390_normalised\ntoilet/test/toilet_0422_normalised\ntoilet/test/toilet_0380_normalised\ntoilet/test/toilet_0439_normalised\ntoilet/test/toilet_0402_normalised\ntoilet/test/toilet_0368_normalised\ntoilet/test/toilet_0364_normalised\ntoilet/test/toilet_0426_normalised\ntoilet/test/toilet_0410_normalised\ntoilet/test/toilet_0430_normalised\ntoilet/test/toilet_0414_normalised\ntoilet/test/toilet_0427_normalised\ntoilet/test/toilet_0348_normalised\ntoilet/test/toilet_0359_normalised\ntoilet/test/toilet_0419_normalised\ntoilet/test/toilet_0438_normalised\ntoilet/test/toilet_0425_normalised\ntoilet/test/toilet_0358_normalised\ntoilet/test/toilet_0352_normalised\ntoilet/test/toilet_0374_normalised\ntoilet/test/toilet_0417_normalised\ntoilet/test/toilet_0357_normalised\ntoilet/test/toilet_0362_normalised\ntoilet/test/toilet_0436_normalised\ntoilet/test/toilet_0370_normalised\ntoilet/test/toilet_0407_normalised\ntoilet/test/toilet_0376_normalised\ntoilet/test/toilet_0366_normalised\ntoilet/test/toilet_0442_normalised\ntoilet/test/toilet_0437_normalised\ntoilet/test/toilet_0409_normalised\ntoilet/test/toilet_0372_normalised\ntoilet/test/toilet_0360_normalised\ntoilet/test/toilet_0432_normalised\ntoilet/test/toilet_0345_normalised\ntoilet/test/toilet_0350_normalised\ntoilet/test/toilet_0441_normalised\ntoilet/test/toilet_0444_normalised\ntoilet/test/toilet_0355_normalised\ntoilet/test/toilet_0397_normalised\ntoilet/test/toilet_0435_normalised\ntoilet/test/toilet_0378_normalised\ntoilet/test/toilet_0395_normalised\ntoilet/test/toilet_0377_normalised\n"
  },
  {
    "path": "render/PC_Normalisation.py",
    "content": "#  Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk\n\nimport os, open3d, numpy as np\n\nFile_ = open('ModelNet_flist_short.txt', 'w')\n\nif __name__ == \"__main__\":\n    root_dir = \"../data/ModelNet_subset/\"\n\n    for root, dirs, files in os.walk(root_dir, topdown=False):\n        for file in files:\n            if '.ply' in file:\n                amesh = open3d.io.read_triangle_mesh(os.path.join(root, file))\n                out_file_name = os.path.join(root, file).replace('.ply', '_normalised.obj')\n\n                center = amesh.get_center()\n                amesh.translate(-center)\n                maxR = (np.asarray(amesh.vertices)**2).sum(axis=1).max()**(1/2)\n                # we found divided by (2*maxR) has best rendered visualisation results\n                amesh.scale(1/(2*maxR))\n                open3d.io.write_triangle_mesh(out_file_name, amesh)\n                File_.writelines(out_file_name.replace('.obj', '').replace(root_dir, '') + '\\n')\n                print(out_file_name)\n"
  },
  {
    "path": "render/readme.md",
    "content": "This directory contains code that generates partial point clouds objects. \n\nTo start with:\n\n1. Download and Install [Blender](https://blender.org/download/)\n\n2. Create a list of normalized 3D objects to be rendered, which should be in `.obj` format, we provide `ModelNet_Flist.txt`. as a template. We also provide <a href=\"PC_Normalisation.py\">PC_Normalisation.py</a> for normalization.\n\n3. To generate the rendered depth image from 3d objects (you might need to install a few more supportive packages, i.e. `Imath, OpenEXR`, due to the differences in the development environments)\n\n\t```bash\n\t# blender -b -P Depth_Renderer.py [data directory] [file list] [output directory] [num scans per model]\n\t\n\tblender -b -P render_depth.py ../data/modelnet40 ModelNet_Flist.txt ./dump 10\n\t```\n\n\tThe generated intermediate files are in OpenEXR format (`*.exr`). You can also modify the intrinsics of the camera model in <a href=\"Depth_Renderer.py\">Depth_Renderer.py</a>, which will be automatically saved in the `intrinsics.txt`.\n\n4. To re-project the partial occluded point cloud from the depth image:\n\n\t``` bash\n\tpython EXR_Process.py \\\n\t\t--list_file ModelNet_Flist.txt \\\n\t    --intrinsics intrinsics.txt \\\n\t    --output_dir ./dump \\\n\t    --num_scans 10 ;\n\t```\n\n\tThis will convert the `*.exr` files into depth images (`*.png`) then point clouds (`*.pcd`)\n\n5. Now use <a href=\"../OcCo_Torch/utils/LMDB_Writer.py\">OcCo_Torch/utils/LMDB_Writer.py</a> to convert all the  `pcd` files into `lmdb` dataloader:\n\n\t```bash\n\tpython LMDB_Writer.py \\\n\t\t--list_path ../render/ModelNet_Flist.txt \\\n\t    --complete_dir ../data/modelnet40 \\\n\t    --partial_dir ../render/dump/pcd \\\n\t    --num_scans 10 \\\n\t    --output_file ../data/MyTrain.lmdb ;\n\t```\n\n6. Now you can pre-train the models via OcCo on your own constructed data, enjoy :)\n"
  },
  {
    "path": "sample/CMakeLists.txt",
    "content": "cmake_minimum_required(VERSION 3.0)\n\nproject(mesh_sampling)\n\nfind_package(PCL 1.7 REQUIRED)\ninclude_directories(${PCL_INCLUDE_DIRS})\nlink_directories(${PCL_LIBRARY_DIRS})\nadd_definitions(${PCL_DEFINITIONS})\n\nfind_package(VTK 7.0 REQUIRED)\ninclude(${VTK_USE_FILE})\n\nadd_executable (mesh_sampling mesh_sampling.cpp)\ntarget_link_libraries (mesh_sampling ${PCL_LIBRARIES} ${VTK_LIBRARIES})\n"
  },
  {
    "path": "sample/mesh_sampling.cpp",
    "content": "/*\n * Software License Agreement (BSD License)\n *\n *  Point Cloud Library (PCL) - www.pointclouds.org\n *  Copyright (c) 2010-2011, Willow Garage, Inc.\n *\n *  All rights reserved.\n *\n *  Redistribution and use in source and binary forms, with or without\n *  modification, are permitted provided that the following conditions\n *  are met:\n *\n *   * Redistributions of source code must retain the above copyright\n *     notice, this list of conditions and the following disclaimer.\n *   * Redistributions in binary form must reproduce the above\n *     copyright notice, this list of conditions and the following\n *     disclaimer in the documentation and/or other materials provided\n *     with the distribution.\n *   * Neither the name of the copyright holder(s) nor the names of its\n *     contributors may be used to endorse or promote products derived\n *     from this software without specific prior written permission.\n *\n *  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n *  \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n *  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS\n *  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE\n *  COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\n *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n *  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n *  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n *  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT\n *  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN\n *  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n *  POSSIBILITY OF SUCH DAMAGE.\n *\n * Modified by Wentao Yuan (wyuan1@cs.cmu.edu) 05/31/2018\n */\n\n#include <pcl/visualization/pcl_visualizer.h>\n#include <pcl/io/pcd_io.h>\n#include <pcl/io/vtk_lib_io.h>\n#include <pcl/common/transforms.h>\n#include <vtkVersion.h>\n#include <vtkPLYReader.h>\n#include <vtkOBJReader.h>\n#include <vtkTriangle.h>\n#include <vtkTriangleFilter.h>\n#include <vtkPolyDataMapper.h>\n#include <pcl/filters/voxel_grid.h>\n#include <pcl/console/print.h>\n#include <pcl/console/parse.h>\n\ninline double\nuniform_deviate (int seed)\n{\n  double ran = seed * (1.0 / (RAND_MAX + 1.0));\n  return ran;\n}\n\ninline void\nrandomPointTriangle (float a1, float a2, float a3, float b1, float b2, float b3, float c1, float c2, float c3,\n                     Eigen::Vector4f& p)\n{\n  float r1 = static_cast<float> (uniform_deviate (rand ()));\n  float r2 = static_cast<float> (uniform_deviate (rand ()));\n  float r1sqr = std::sqrt (r1);\n  float OneMinR1Sqr = (1 - r1sqr);\n  float OneMinR2 = (1 - r2);\n  a1 *= OneMinR1Sqr;\n  a2 *= OneMinR1Sqr;\n  a3 *= OneMinR1Sqr;\n  b1 *= OneMinR2;\n  b2 *= OneMinR2;\n  b3 *= OneMinR2;\n  c1 = r1sqr * (r2 * c1 + b1) + a1;\n  c2 = r1sqr * (r2 * c2 + b2) + a2;\n  c3 = r1sqr * (r2 * c3 + b3) + a3;\n  p[0] = c1;\n  p[1] = c2;\n  p[2] = c3;\n  p[3] = 0;\n}\n\ninline void\nrandPSurface (vtkPolyData * polydata, std::vector<double> * cumulativeAreas, double totalArea, Eigen::Vector4f& p, bool calcNormal, Eigen::Vector3f& n)\n{\n  float r = static_cast<float> (uniform_deviate (rand ()) * totalArea);\n\n  std::vector<double>::iterator low = std::lower_bound (cumulativeAreas->begin (), cumulativeAreas->end (), r);\n  vtkIdType el = vtkIdType (low - cumulativeAreas->begin ());\n\n  double A[3], B[3], C[3];\n  vtkIdType npts = 0;\n  vtkIdType *ptIds = NULL;\n  polydata->GetCellPoints (el, npts, ptIds);\n  polydata->GetPoint (ptIds[0], A);\n  polydata->GetPoint (ptIds[1], B);\n  polydata->GetPoint (ptIds[2], C);\n  if (calcNormal)\n  {\n    // OBJ: Vertices are stored in a counter-clockwise order by default\n    Eigen::Vector3f v1 = Eigen::Vector3f (A[0], A[1], A[2]) - Eigen::Vector3f (C[0], C[1], C[2]);\n    Eigen::Vector3f v2 = Eigen::Vector3f (B[0], B[1], B[2]) - Eigen::Vector3f (C[0], C[1], C[2]);\n    n = v1.cross (v2);\n    n.normalize ();\n  }\n  randomPointTriangle (float (A[0]), float (A[1]), float (A[2]),\n                       float (B[0]), float (B[1]), float (B[2]),\n                       float (C[0]), float (C[1]), float (C[2]), p);\n}\n\nvoid\nuniform_sampling (vtkSmartPointer<vtkPolyData> polydata, size_t n_samples, bool calc_normal, pcl::PointCloud<pcl::PointNormal> & cloud_out)\n{\n  polydata->BuildCells ();\n  vtkSmartPointer<vtkCellArray> cells = polydata->GetPolys ();\n\n  double p1[3], p2[3], p3[3], totalArea = 0;\n  std::vector<double> cumulativeAreas (cells->GetNumberOfCells (), 0);\n  size_t i = 0;\n  vtkIdType npts = 0, *ptIds = NULL;\n  for (cells->InitTraversal (); cells->GetNextCell (npts, ptIds); i++)\n  {\n    polydata->GetPoint (ptIds[0], p1);\n    polydata->GetPoint (ptIds[1], p2);\n    polydata->GetPoint (ptIds[2], p3);\n    totalArea += vtkTriangle::TriangleArea (p1, p2, p3);\n    cumulativeAreas[i] = totalArea;\n  }\n\n  cloud_out.points.resize (n_samples);\n  cloud_out.width = static_cast<pcl::uint32_t> (n_samples);\n  cloud_out.height = 1;\n\n  for (i = 0; i < n_samples; i++)\n  {\n    Eigen::Vector4f p;\n    Eigen::Vector3f n;\n    randPSurface (polydata, &cumulativeAreas, totalArea, p, calc_normal, n);\n    cloud_out.points[i].x = p[0];\n    cloud_out.points[i].y = p[1];\n    cloud_out.points[i].z = p[2];\n    if (calc_normal)\n    {\n      cloud_out.points[i].normal_x = n[0];\n      cloud_out.points[i].normal_y = n[1];\n      cloud_out.points[i].normal_z = n[2];\n    }\n  }\n}\n\nusing namespace pcl;\nusing namespace pcl::io;\nusing namespace pcl::console;\n\nconst int default_number_samples = 100000;\nconst float default_leaf_size = 0.01f;\n\nvoid\nprintHelp (int, char **argv)\n{\n  print_error(\"Syntax is: %s input.{ply,obj} output.pcd <options>\\n\", argv[0]);\n  print_info (\"  where options are:\\n\");\n  print_info (\"                -n_samples X   = number of samples (default: \");\n  print_value(\"%d\", default_number_samples);\n  print_info (\")\\n\");\n  print_info (\n              \"                -leaf_size X   = the XYZ leaf size for the VoxelGrid -- for data reduction (default: \");\n  print_value(\"%f\", default_leaf_size);\n  print_info (\" m)\\n\");\n  print_info (\"                -write_normals = flag to write normals to the output pcd\\n\");\n  print_info (\n              \"                -no_vis_result = flag to stop visualizing the generated pcd\\n\");\n  print_info (\n              \"                -no_vox_filter = flag to stop downsampling the generated pcd\\n\");\n}\n\n/* ---[ */\nint\nmain (int argc, char **argv)\n{\n  if (argc < 3)\n  {\n    printHelp (argc, argv);\n    return (-1);\n  }\n\n  // Parse command line arguments\n  int SAMPLE_POINTS_ = default_number_samples;\n  parse_argument (argc, argv, \"-n_samples\", SAMPLE_POINTS_);\n  float leaf_size = default_leaf_size;\n  parse_argument (argc, argv, \"-leaf_size\", leaf_size);\n  bool vis_result = ! find_switch (argc, argv, \"-no_vis_result\");\n  bool vox_filter = ! find_switch (argc, argv, \"-no_vox_filter\");\n  const bool write_normals = find_switch (argc, argv, \"-write_normals\");\n\n  std::vector<int> pcd_file_indices = parse_file_extension_argument (argc, argv, \".pcd\");\n  std::vector<int> ply_file_indices = parse_file_extension_argument (argc, argv, \".ply\");\n  std::vector<int> obj_file_indices = parse_file_extension_argument (argc, argv, \".obj\");\n  if (pcd_file_indices.size () != 1)\n  {\n    print_error (\"Need a single output PCD file to continue.\\n\");\n    return (-1);\n  }\n  if (ply_file_indices.size () != 1 && obj_file_indices.size () != 1)\n  {\n    print_error (\"Need a single input PLY/OBJ file to continue.\\n\");\n    return (-1);\n  }\n\n  vtkSmartPointer<vtkPolyData> polydata1 = vtkSmartPointer<vtkPolyData>::New ();\n  if (ply_file_indices.size () == 1)\n  {\n    pcl::PolygonMesh mesh;\n    pcl::io::loadPolygonFilePLY (argv[ply_file_indices[0]], mesh);\n    pcl::io::mesh2vtk (mesh, polydata1);\n  }\n  else if (obj_file_indices.size () == 1)\n  {\n    print_info (\"Convert %s to a point cloud using uniform sampling.\\n\", argv[obj_file_indices[0]]);\n    vtkSmartPointer<vtkOBJReader> readerQuery = vtkSmartPointer<vtkOBJReader>::New ();\n    readerQuery->SetFileName (argv[obj_file_indices[0]]);\n    readerQuery->Update ();\n    polydata1 = readerQuery->GetOutput ();\n  }\n\n  //make sure that the polygons are triangles!\n  vtkSmartPointer<vtkTriangleFilter> triangleFilter = vtkSmartPointer<vtkTriangleFilter>::New ();\n#if VTK_MAJOR_VERSION < 6\n  triangleFilter->SetInput (polydata1);\n#else\n  triangleFilter->SetInputData (polydata1);\n#endif\n  triangleFilter->Update ();\n\n  vtkSmartPointer<vtkPolyDataMapper> triangleMapper = vtkSmartPointer<vtkPolyDataMapper>::New ();\n  triangleMapper->SetInputConnection (triangleFilter->GetOutputPort ());\n  triangleMapper->Update ();\n  polydata1 = triangleMapper->GetInput ();\n\n  bool INTER_VIS = false;\n\n  if (INTER_VIS)\n  {\n    visualization::PCLVisualizer vis;\n    vis.addModelFromPolyData (polydata1, \"mesh1\", 0);\n    vis.setRepresentationToSurfaceForAllActors ();\n    vis.spin ();\n  }\n\n  pcl::PointCloud<pcl::PointNormal>::Ptr cloud_1 (new pcl::PointCloud<pcl::PointNormal>);\n  uniform_sampling (polydata1, SAMPLE_POINTS_, write_normals, *cloud_1);\n\n  if (INTER_VIS)\n  {\n    visualization::PCLVisualizer vis_sampled;\n    vis_sampled.addPointCloud<pcl::PointNormal> (cloud_1);\n    if (write_normals)\n      vis_sampled.addPointCloudNormals<pcl::PointNormal> (cloud_1, 1, 0.02f, \"cloud_normals\");\n    vis_sampled.spin ();\n  }\n\n  pcl::PointCloud<pcl::PointNormal>::Ptr cloud (new pcl::PointCloud<pcl::PointNormal>);\n\n  // Voxelgrid\n  if (vox_filter)\n  {\n    VoxelGrid<PointNormal> grid_;\n    grid_.setInputCloud (cloud_1);\n    grid_.setLeafSize (leaf_size, leaf_size, leaf_size);\n    grid_.filter (*cloud);\n  }\n  else\n  {\n    *cloud = *cloud_1;\n  }\n\n  if (vis_result)\n  {\n    visualization::PCLVisualizer vis3 (\"VOXELIZED SAMPLES CLOUD\");\n    vis3.addPointCloud<pcl::PointNormal> (cloud);\n    if (write_normals)\n      vis3.addPointCloudNormals<pcl::PointNormal> (cloud, 1, 0.02f, \"cloud_normals\");\n    vis3.spin ();\n  }\n\n  if (!write_normals)\n  {\n    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz (new pcl::PointCloud<pcl::PointXYZ>);\n    // Strip uninitialized normals from cloud:\n    pcl::copyPointCloud (*cloud, *cloud_xyz);\n    savePCDFileASCII (argv[pcd_file_indices[0]], *cloud_xyz);\n  }\n  else\n  {\n    savePCDFileASCII (argv[pcd_file_indices[0]], *cloud);\n  }\n}\n"
  },
  {
    "path": "sample/readme.md",
    "content": "[Optional] This directory contains code for a command line tool that uniformly samples a point cloud on a mesh. It is a modified version of `pcl_mesh_sampling`. To use it:\n1. Install [CMake](https://cmake.org/download/), [PCL](http://pointclouds.org/downloads/) and [VTK](https://vtk.org/download/).\n2. Make a build directory: `makedir build & cd build`.\n3. Build the code by running `cmake ..` and then `make`.\n4. Run `./mesh_sampling` to see the command line usage."
  }
]