[
  {
    "path": "README.md",
    "content": "# adversarial-patch\nPyTorch implementation of adversarial patch \n\nThis is an implementation of the <a href=\"https://arxiv.org/pdf/1712.09665.pdf\">Adversarial Patch paper</a>. Not official and likely to have bugs/errors.\n\n## How to run:\n\nData set-up:\n\n - Follow instructions https://github.com/amd/OpenCL-caffe/wiki/Instructions-to-create-ImageNet-2012-data . The validation set should be in path `./imagenet/val/`. There should be 1000 directories, each with 50 images.\n \nRun attack:\n\n- `python make_patch.py --cuda --netClassifier inceptionv3 --max_count 500 --image_size 299 --patch_type circle --outf log`\n\n## Results:\n\nUsing patch shapes of both circles and squares gave good results (both achieved 100% success on the training set and eventually > 90% success on test set)\n\nI managed to recreate the toaster example in the original paper. It looks slightly different but it is evidently a toaster.\n\n![Alt text](1981_859_adversarial.png?raw=true \"\") This is a toaster\n\nSquare patches are a little more homogenous due to that I only rotate by multiples of 90 degrees.\n\n![Alt text](1978_859_adversarial.png?raw=true \"\") This is also a toaster\n\n## Issues:\n\n- Cannot make a perfect circle with numpy/pytorch. The hack I came up with makes the boundary slightly hexagonal.\n\n- Rather slow if max_count and conf_target are large.\n\n- Probably lots of redundant calls and variables.\n\n\n"
  },
  {
    "path": "make_patch.py",
    "content": "import argparse\nimport os\nimport random\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim as optim\nimport torch.utils.data\nimport torch.nn.functional as F\nimport torchvision.datasets as dset\nimport torchvision.transforms as transforms\nimport torchvision.utils as vutils\nfrom torch.autograd import Variable\nfrom torch.utils.data.sampler import SubsetRandomSampler\n\nfrom pretrained_models_pytorch import pretrainedmodels\n\nfrom utils import *\n\nparser = argparse.ArgumentParser()\nparser.add_argument('--workers', type=int, help='number of data loading workers', default=2)\nparser.add_argument('--epochs', type=int, default=20, help='number of epochs to train for')\nparser.add_argument('--cuda', action='store_true', help='enables cuda')\n\nparser.add_argument('--target', type=int, default=859, help='The target class: 859 == toaster')\nparser.add_argument('--conf_target', type=float, default=0.9, help='Stop attack on image when target classifier reaches this value for target class')\n\nparser.add_argument('--max_count', type=int, default=1000, help='max number of iterations to find adversarial example')\nparser.add_argument('--patch_type', type=str, default='circle', help='patch type: circle or square')\nparser.add_argument('--patch_size', type=float, default=0.05, help='patch size. E.g. 0.05 ~= 5% of image ')\n\nparser.add_argument('--train_size', type=int, default=2000, help='Number of training images')\nparser.add_argument('--test_size', type=int, default=2000, help='Number of test images')\n\nparser.add_argument('--image_size', type=int, default=299, help='the height / width of the input image to network')\n\nparser.add_argument('--plot_all', type=int, default=1, help='1 == plot all successful adversarial images')\n\nparser.add_argument('--netClassifier', default='inceptionv3', help=\"The target classifier\")\n\nparser.add_argument('--outf', default='./logs', help='folder to output images and model checkpoints')\nparser.add_argument('--manualSeed', type=int, default=1338, help='manual seed')\n\nopt = parser.parse_args()\nprint(opt)\n\ntry:\n    os.makedirs(opt.outf)\nexcept OSError:\n    pass\n\nif opt.manualSeed is None:\n    opt.manualSeed = random.randint(1, 10000)\nprint(\"Random Seed: \", opt.manualSeed)\nrandom.seed(opt.manualSeed)\nnp.random.seed(opt.manualSeed)\ntorch.manual_seed(opt.manualSeed)\nif opt.cuda:\n    torch.cuda.manual_seed_all(opt.manualSeed)\n\ncudnn.benchmark = True\n\nif torch.cuda.is_available() and not opt.cuda:\n    print(\"WARNING: You have a CUDA device, so you should probably run with --cuda\")\n\ntarget = opt.target\nconf_target = opt.conf_target\nmax_count = opt.max_count\npatch_type = opt.patch_type\npatch_size = opt.patch_size\nimage_size = opt.image_size\ntrain_size = opt.train_size\ntest_size = opt.test_size\nplot_all = opt.plot_all \n\nassert train_size + test_size <= 50000, \"Traing set size + Test set size > Total dataset size\"\n\nprint(\"=> creating model \")\nnetClassifier = pretrainedmodels.__dict__[opt.netClassifier](num_classes=1000, pretrained='imagenet')\nif opt.cuda:\n    netClassifier.cuda()\n\n\nprint('==> Preparing data..')\nnormalize = transforms.Normalize(mean=netClassifier.mean,\n                                 std=netClassifier.std)\nidx = np.arange(50000)\nnp.random.shuffle(idx)\ntraining_idx = idx[:train_size]\ntest_idx = idx[train_size:test_size]\n\ntrain_loader = torch.utils.data.DataLoader(\n    dset.ImageFolder('./imagenetdata/val', transforms.Compose([\n        transforms.Scale(round(max(netClassifier.input_size)*1.050)),\n        transforms.CenterCrop(max(netClassifier.input_size)),\n        transforms.ToTensor(),\n        ToSpaceBGR(netClassifier.input_space=='BGR'),\n        ToRange255(max(netClassifier.input_range)==255),\n        normalize,\n    ])),\n    batch_size=1, shuffle=False, sampler=SubsetRandomSampler(training_idx),\n    num_workers=opt.workers, pin_memory=True)\n \ntest_loader = torch.utils.data.DataLoader(\n    dset.ImageFolder('./imagenetdata/val', transforms.Compose([\n        transforms.Scale(round(max(netClassifier.input_size)*1.050)),\n        transforms.CenterCrop(max(netClassifier.input_size)),\n        transforms.ToTensor(),\n        ToSpaceBGR(netClassifier.input_space=='BGR'),\n        ToRange255(max(netClassifier.input_range)==255),\n        normalize,\n    ])),\n    batch_size=1, shuffle=False, sampler=SubsetRandomSampler(test_idx),\n    num_workers=opt.workers, pin_memory=True)\n\nmin_in, max_in = netClassifier.input_range[0], netClassifier.input_range[1]\nmin_in, max_in = np.array([min_in, min_in, min_in]), np.array([max_in, max_in, max_in])\nmean, std = np.array(netClassifier.mean), np.array(netClassifier.std) \nmin_out, max_out = np.min((min_in-mean)/std), np.max((max_in-mean)/std)\n\n\ndef train(epoch, patch, patch_shape):\n    netClassifier.eval()\n    success = 0\n    total = 0\n    recover_time = 0\n    for batch_idx, (data, labels) in enumerate(train_loader):\n        if opt.cuda:\n            data = data.cuda()\n            labels = labels.cuda()\n        data, labels = Variable(data), Variable(labels)\n\n        prediction = netClassifier(data)\n \n        # only computer adversarial examples on examples that are originally classified correctly        \n        if prediction.data.max(1)[1][0] != labels.data[0]:\n            continue\n     \n        total += 1\n        \n        # transform path\n        data_shape = data.data.cpu().numpy().shape\n        if patch_type == 'circle':\n            patch, mask, patch_shape = circle_transform(patch, data_shape, patch_shape, image_size)\n        elif patch_type == 'square':\n            patch, mask  = square_transform(patch, data_shape, patch_shape, image_size)\n        patch, mask = torch.FloatTensor(patch), torch.FloatTensor(mask)\n        if opt.cuda:\n            patch, mask = patch.cuda(), mask.cuda()\n        patch, mask = Variable(patch), Variable(mask)\n \n        adv_x, mask, patch = attack(data, patch, mask)\n        \n        adv_label = netClassifier(adv_x).data.max(1)[1][0]\n        ori_label = labels.data[0]\n        \n        if adv_label == target:\n            success += 1\n      \n            if plot_all == 1: \n                # plot source image\n                vutils.save_image(data.data, \"./%s/%d_%d_original.png\" %(opt.outf, batch_idx, ori_label), normalize=True)\n                \n                # plot adversarial image\n                vutils.save_image(adv_x.data, \"./%s/%d_%d_adversarial.png\" %(opt.outf, batch_idx, adv_label), normalize=True)\n \n        masked_patch = torch.mul(mask, patch)\n        patch = masked_patch.data.cpu().numpy()\n        new_patch = np.zeros(patch_shape)\n        for i in range(new_patch.shape[0]): \n            for j in range(new_patch.shape[1]): \n                new_patch[i][j] = submatrix(patch[i][j])\n \n        patch = new_patch\n\n        # log to file  \n        progress_bar(batch_idx, len(train_loader), \"Train Patch Success: {:.3f}\".format(success/total))\n\n    return patch\n\ndef test(epoch, patch, patch_shape):\n    netClassifier.eval()\n    success = 0\n    total = 0\n    for batch_idx, (data, labels) in enumerate(test_loader):\n        if opt.cuda:\n            data = data.cuda()\n            labels = labels.cuda()\n        data, labels = Variable(data), Variable(labels)\n\n        prediction = netClassifier(data)\n\n        # only computer adversarial examples on examples that are originally classified correctly        \n        if prediction.data.max(1)[1][0] != labels.data[0]:\n            continue\n      \n        total += 1 \n        \n        # transform path\n        data_shape = data.data.cpu().numpy().shape\n        if patch_type == 'circle':\n            patch, mask, patch_shape = circle_transform(patch, data_shape, patch_shape, image_size)\n        elif patch_type == 'square':\n            patch, mask = square_transform(patch, data_shape, patch_shape, image_size)\n        patch, mask = torch.FloatTensor(patch), torch.FloatTensor(mask)\n        if opt.cuda:\n            patch, mask = patch.cuda(), mask.cuda()\n        patch, mask = Variable(patch), Variable(mask)\n \n        adv_x = torch.mul((1-mask),data) + torch.mul(mask,patch)\n        adv_x = torch.clamp(adv_x, min_out, max_out)\n        \n        adv_label = netClassifier(adv_x).data.max(1)[1][0]\n        ori_label = labels.data[0]\n        \n        if adv_label == target:\n            success += 1\n       \n        masked_patch = torch.mul(mask, patch)\n        patch = masked_patch.data.cpu().numpy()\n        new_patch = np.zeros(patch_shape)\n        for i in range(new_patch.shape[0]): \n            for j in range(new_patch.shape[1]): \n                new_patch[i][j] = submatrix(patch[i][j])\n \n        patch = new_patch\n\n        # log to file  \n        progress_bar(batch_idx, len(test_loader), \"Test Success: {:.3f}\".format(success/total))\n\ndef attack(x, patch, mask):\n    netClassifier.eval()\n\n    x_out = F.softmax(netClassifier(x))\n    target_prob = x_out.data[0][target]\n\n    adv_x = torch.mul((1-mask),x) + torch.mul(mask,patch)\n    \n    count = 0 \n   \n    while conf_target > target_prob:\n        count += 1\n        adv_x = Variable(adv_x.data, requires_grad=True)\n        adv_out = F.log_softmax(netClassifier(adv_x))\n       \n        adv_out_probs, adv_out_labels = adv_out.max(1)\n        \n        Loss = -adv_out[0][target]\n        Loss.backward()\n     \n        adv_grad = adv_x.grad.clone()\n        \n        adv_x.grad.data.zero_()\n       \n        patch -= adv_grad \n        \n        adv_x = torch.mul((1-mask),x) + torch.mul(mask,patch)\n        adv_x = torch.clamp(adv_x, min_out, max_out)\n \n        out = F.softmax(netClassifier(adv_x))\n        target_prob = out.data[0][target]\n        #y_argmax_prob = out.data.max(1)[0][0]\n        \n        #print(count, conf_target, target_prob, y_argmax_prob)  \n\n        if count >= opt.max_count:\n            break\n\n\n    return adv_x, mask, patch \n\n\nif __name__ == '__main__':\n    if patch_type == 'circle':\n        patch, patch_shape = init_patch_circle(image_size, patch_size) \n    elif patch_type == 'square':\n        patch, patch_shape = init_patch_square(image_size, patch_size) \n    else:\n        sys.exit(\"Please choose a square or circle patch\")\n    \n    for epoch in range(1, opt.epochs + 1):\n        patch = train(epoch, patch, patch_shape)\n        test(epoch, patch, patch_shape)\n"
  },
  {
    "path": "pretrained_models_pytorch/LICENSE",
    "content": "BSD 3-Clause License\n\nCopyright (c) 2017, Remi Cadene\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n* Redistributions of source code must retain the above copyright notice, this\n  list of conditions and the following disclaimer.\n\n* Redistributions in binary form must reproduce the above copyright notice,\n  this list of conditions and the following disclaimer in the documentation\n  and/or other materials provided with the distribution.\n\n* Neither the name of the copyright holder nor the names of its\n  contributors may be used to endorse or promote products derived from\n  this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
  },
  {
    "path": "pretrained_models_pytorch/README.md",
    "content": "# Pretrained models for Pytorch (Work in progress)\n\nThe goal of this repo is:\n\n- to help to reproduce research papers results (transfer learning setups for instance),\n- to access pretrained ConvNets with a unique interface/API inspired by torchvision.\n\nNews:\n\n- 16/11/2017: nasnet-a-large pretrained model ported by T. Durand and R. Cadene\n- 22/07/2017: torchvision pretrained models\n- 22/07/2017: momentum in inceptionv4 and inceptionresnetv2 to 0.1\n- 17/07/2017: model.input_range attribut\n- 17/07/2017: BNInception pretrained on Imagenet\n\n## Summary\n\n- [Installation](https://github.com/Cadene/pretrained-models.pytorch#installation)\n- [Toy example](https://github.com/Cadene/pretrained-models.pytorch#toy-example)\n- [Evaluation on ImageNet](https://github.com/Cadene/pretrained-models.pytorch#evaluation-on-imagenet)\n    - [Accuracy on valset](https://github.com/Cadene/pretrained-models.pytorch#accuracy-on-validation-set)\n    - [Reproducing results](https://github.com/Cadene/pretrained-models.pytorch#reproducing-results)\n- [Documentation](https://github.com/Cadene/pretrained-models.pytorch#documentation)\n    - [Available models](https://github.com/Cadene/pretrained-models.pytorch#available-models)\n        - [NasNetLarge](https://github.com/Cadene/pretrained-models.pytorch#nasnet)\n        - [BNInception](https://github.com/Cadene/pretrained-models.pytorch#bninception)\n        - [InceptionV3](https://github.com/Cadene/pretrained-models.pytorch#inception)\n        - [InceptionV4](https://github.com/Cadene/pretrained-models.pytorch#inception)\n        - [InceptionResNetV2](https://github.com/Cadene/pretrained-models.pytorch#inception)\n        - [ResNeXt101_64x4d](https://github.com/Cadene/pretrained-models.pytorch#resnext)\n        - [ResNeXt101_32x4d](https://github.com/Cadene/pretrained-models.pytorch#resnext)\n        - [ResNet18](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [ResNet34](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [ResNet50](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [ResNet101](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [ResNet152](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [FBResNet152](https://github.com/Cadene/pretrained-models.pytorch#facebook-resnet)\n        - [DenseNet121](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [DenseNet161](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [DenseNet169](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [DenseNet201](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [SqueezeNet1_0](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [SqueezeNet1_1](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [AlexNet](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [VGG11](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [VGG13](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [VGG16](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [VGG19](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [VGG11_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [VGG13_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [VGG16_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n        - [VGG19_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision)\n    - [Model API](https://github.com/Cadene/pretrained-models.pytorch#model-api)\n        - [model.input_size](https://github.com/Cadene/pretrained-models.pytorch#modelinput_size)\n        - [model.input_space](https://github.com/Cadene/pretrained-models.pytorch#modelinput_space)\n        - [model.input_range](https://github.com/Cadene/pretrained-models.pytorch#modelinput_range)\n        - [model.mean](https://github.com/Cadene/pretrained-models.pytorch#modelmean)\n        - [model.std](https://github.com/Cadene/pretrained-models.pytorch#modelstd)\n        - [model.features](https://github.com/Cadene/pretrained-models.pytorch#modelfeatures)\n        - [model.classif](https://github.com/Cadene/pretrained-models.pytorch#modelclassif)\n        - [model.forward](https://github.com/Cadene/pretrained-models.pytorch#modelforward)\n- [Reproducing porting](https://github.com/Cadene/pretrained-models.pytorch#reproducing)\n    - [ResNet*](https://github.com/Cadene/pretrained-models.pytorch#hand-porting-of-resnet152)\n    - [ResNeXt*](https://github.com/Cadene/pretrained-models.pytorch#automatic-porting-of-resnext)\n    - [Inception*](https://github.com/Cadene/pretrained-models.pytorch#hand-porting-of-inceptionv4-and-inceptionresnetv2)\n\n## Installation\n\n1. [python3 with anaconda](https://www.continuum.io/downloads)\n2. [pytorch with/out CUDA](http://pytorch.org)\n3. `git clone https://github.com/Cadene/pretrained-models.pytorch.git`\n\n\n## Toy example\n\n\n- See [test/toy-example.py](https://github.com/Cadene/pretrained-models.pytorch/blob/master/test/toy-example.py) to compute logits of classes appearance with pretrained models on imagenet.\n\n`python test/toy-example.py -a fbresnet152`\n\n```python\nfrom PIL import Image\nimport torch\nimport torchvision.transforms as transforms\n\nimport sys\nsys.path.append('yourdir/pretrained-models.pytorch') # if needed\nimport pretrainedmodels\n\n# Load Model\nmodel_name = 'inceptionresnetv4' #fbresnet152\nmodel = pretrainedmodels.__dict__[model_name](num_classes=1000, pretrained='imagenet')\nmodel.eval()\n\n# Load One Input Image\npath_img = 'data/cat.jpg'\nwith open(path_img, 'rb') as f:\n    with Image.open(f) as img:\n        input_data = img.convert(model.input_space)\n\ntf = transforms.Compose([\n    transforms.Scale(round(max(model.input_size)*1.143)),\n    transforms.CenterCrop(max(model.input_size)),\n    transforms.ToTensor(),\n    transforms.Normalize(mean=model.mean, std=model.std)\n])\n\ninput_data = tf(input_data)          # 3x400x225 -> 3x299x299\ninput_data = input_data.unsqueeze(0) # 3x299x299 -> 1x3x299x299\ninput = torch.autograd.Variable(input_data)\n\n# Load Imagenet Synsets\nwith open('data/imagenet_synsets.txt', 'r') as f:\n    synsets = f.readlines()\n\n# len(synsets)==1001\n# sysnets[0] == background\nsynsets = [x.strip() for x in synsets]\nsplits = [line.split(' ') for line in synsets]\nkey_to_classname = {spl[0]:' '.join(spl[1:]) for spl in splits}\n\nwith open('data/imagenet_classes.txt', 'r') as f:\n    class_id_to_key = f.readlines()\n\nclass_id_to_key = [x.strip() for x in class_id_to_key]\n\n# Make predictions\noutput = model(input) # size(1, 1000)\nmax, argmax = output.data.squeeze().max(0)\nclass_id = argmax[0]\nclass_key = class_id_to_key[class_id]\nclassname = key_to_classname[class_key]\n\nprint(path_img, 'is a', classname) \n```\n\n- See also [test/imagenet.py](https://github.com/Cadene/pretrained-models.pytorch/blob/master/test/imagenet.py) to evaluate pretrained models on imagenet.\n\n\n## Evaluation on imagenet\n\n### Accuracy on validation set (single model)\n\nModel | Version | Acc@1 | Acc@5\n--- | --- | --- | ---\nNASNet-A-Large | [Tensorflow](https://github.com/tensorflow/models/tree/master/slim) | 82.693 | 96.163\nNASNet-A-Large | Our porting | 82.566 | 96.086\nInceptionResNetV2 | [Tensorflow](https://github.com/tensorflow/models/tree/master/slim) | 80.4 | 95.3\nInceptionV4 | [Tensorflow](https://github.com/tensorflow/models/tree/master/slim) | 80.2 | 95.3\nInceptionResNetV2 | Our porting | 80.170 | 95.234\nInceptionV4 | Our porting | 80.062 | 94.926\nResNeXt101_64x4d | [Torch7](https://github.com/facebookresearch/ResNeXt) | 79.6 | 94.7\nResNeXt101_64x4d | Our porting | 78.956 | 94.252\nResNeXt101_32x4d | [Torch7](https://github.com/facebookresearch/ResNeXt) | 78.8 | 94.4\nResNet152 | [Pytorch](https://github.com/pytorch/vision#models) | 78.428 | 94.110\nResNeXt101_32x4d | Our porting | 78.188 | 93.886\nFBResNet152 | [Torch7](https://github.com/facebook/fb.resnet.torch) | 77.84 | 93.84\nDenseNet161 | [Pytorch](https://github.com/pytorch/vision#models) | 77.560 | 93.798\nFBResNet152 | Our porting | 77.386 | 93.594\nInceptionV3 | [Pytorch](https://github.com/pytorch/vision#models) | 77.294 | 93.454\nDenseNet201 | [Pytorch](https://github.com/pytorch/vision#models) | 77.152 | 93.548\nResNet101 | [Pytorch](https://github.com/pytorch/vision#models) | 77.438 | 93.672\nDenseNet169 | [Pytorch](https://github.com/pytorch/vision#models) | 76.026 | 92.992\nResNet50 | [Pytorch](https://github.com/pytorch/vision#models) | 76.002 | 92.980\nDenseNet121 | [Pytorch](https://github.com/pytorch/vision#models) | 74.646 | 92.136\nVGG19_BN | [Pytorch](https://github.com/pytorch/vision#models) | 74.266 | 92.066\nResNet34 | [Pytorch](https://github.com/pytorch/vision#models) | 73.554 | 91.456\nBNInception | [Caffe](https://github.com/Cadene/tensorflow-model-zoo.torch/pull/2) | 73.522 | 91.560\nVGG16_BN | [Pytorch](https://github.com/pytorch/vision#models) | 73.518 | 91.608\nVGG19 | [Pytorch](https://github.com/pytorch/vision#models) | 72.080 | 90.822\nVGG16 | [Pytorch](https://github.com/pytorch/vision#models) | 71.636 | 90.354\nVGG13_BN | [Pytorch](https://github.com/pytorch/vision#models) | 71.508 | 90.494\nVGG11_BN | [Pytorch](https://github.com/pytorch/vision#models) | 70.452 | 89.818\nResNet18 | [Pytorch](https://github.com/pytorch/vision#models) | 70.142 | 89.274\nVGG13 | [Pytorch](https://github.com/pytorch/vision#models) | 69.662 | 89.264\nVGG11 | [Pytorch](https://github.com/pytorch/vision#models) | 68.970 | 88.746\nSqueezeNet1_1 | [Pytorch](https://github.com/pytorch/vision#models) | 58.250 | 80.800\nSqueezeNet1_0 | [Pytorch](https://github.com/pytorch/vision#models) | 58.108 | 80.428\nAlexnet | [Pytorch](https://github.com/pytorch/vision#models) | 56.432 | 79.194\n\n\n\n\n\n\n\nNote: the Pytorch version of ResNet152 is not a porting of the Torch7 but has been retrained by facebook.\n\nBeware, the accuracy reported here is not always representative of the transferable capacity of the network on other tasks and datasets. You must try them all! :P\n    \n### Reproducing results\n\nDownload the ImageNet dataset and move validation images to labeled subfolders\n\n```\npython test/imagenet.py /local/data/imagenet_2012/images --arch resnext101_32x4d -e\n```\n\n\n## Documentation\n\n### Available models\n\n#### NASNet*\n\nSource: [TensorFlow Slim repo](https://github.com/tensorflow/models/tree/master/slim)\n\n- `nasnetlarge(num_classes=1000, pretrained='imagenet')`\n- `nasnetlarge(num_classes=1001, pretrained='imagenet+background')`\n\n#### FaceBook ResNet*\n\nSource: [Torch7 repo of FaceBook](https://github.com/facebook/fb.resnet.torch)\n\nThere are a bit different from the ResNet* of torchvision. ResNet152 is currently the only one available.\n\n- `fbresnet152(num_classes=1000, pretrained='imagenet')`\n\n#### Inception*\n\nSource: [TensorFlow Slim repo](https://github.com/tensorflow/models/tree/master/slim) and [Pytorch/Vision repo](https://github.com/pytorch/vision/tree/master/torchvision) for `inceptionv3`\n\n- `inceptionresnetv2(num_classes=1000, pretrained='imagenet')`\n- `inceptionresnetv2(num_classes=1001, pretrained='imagenet+background')`\n- `inceptionv4(num_classes=1000, pretrained='imagenet')`\n- `inceptionv4(num_classes=1001, pretrained='imagenet+background')`\n- `inceptionv3(num_classes=1000, pretrained='imagenet')`\n\n#### BNInception\n\nSource: [Trained with Caffe](https://github.com/Cadene/tensorflow-model-zoo.torch/pull/2) by [Xiong Yuanjun](http://yjxiong.me)\n\n- `bninception(num_classes=1000, pretrained='imagenet')`\n\n#### ResNeXt*\n\nSource: [ResNeXt repo of FaceBook](https://github.com/facebookresearch/ResNeXt)\n\n- `resnext101_32x4d(num_classes=1000, pretrained='imagenet')`\n- `resnext101_62x4d(num_classes=1000, pretrained='imagenet')`\n\n#### TorchVision\nSource: [Pytorch/Vision repo](https://github.com/pytorch/vision/tree/master/torchvision)\n\n(`inceptionv3` included in [Inception*](https://github.com/Cadene/pretrained-models.pytorch#inception))\n\n- `resnet18(num_classes=1000, pretrained='imagenet')`\n- `resnet34(num_classes=1000, pretrained='imagenet')`\n- `resnet50(num_classes=1000, pretrained='imagenet')`\n- `resnet101(num_classes=1000, pretrained='imagenet')`\n- `resnet152(num_classes=1000, pretrained='imagenet')`\n- `densenet121(num_classes=1000, pretrained='imagenet')`\n- `densenet161(num_classes=1000, pretrained='imagenet')`\n- `densenet169(num_classes=1000, pretrained='imagenet')`\n- `densenet201(num_classes=1000, pretrained='imagenet')`\n- `squeezenet1_0(num_classes=1000, pretrained='imagenet')`\n- `squeezenet1_1(num_classes=1000, pretrained='imagenet')`\n- `alexnet(num_classes=1000, pretrained='imagenet')`\n- `vgg11(num_classes=1000, pretrained='imagenet')`\n- `vgg13(num_classes=1000, pretrained='imagenet')`\n- `vgg16(num_classes=1000, pretrained='imagenet')`\n- `vgg19(num_classes=1000, pretrained='imagenet')`\n- `vgg11_bn(num_classes=1000, pretrained='imagenet')`\n- `vgg13_bn(num_classes=1000, pretrained='imagenet')`\n- `vgg16_bn(num_classes=1000, pretrained='imagenet')`\n- `vgg19_bn(num_classes=1000, pretrained='imagenet')`\n\n\n### Model API\n\nOnce a pretrained model has been loaded, you can use it that way.\n\n**Important note**: All image must be loaded using `PIL` which scales the pixel values between 0 and 1.\n\n#### `model.input_size`\n\nAttribut of type `list` composed of 3 numbers:\n\n- number of color channels,\n- height of the input image,\n- width of the input image.\n\nExample:\n\n- `[3, 299, 299]` for inception* networks,\n- `[3, 224, 224]` for resnet* networks.\n\n\n#### `model.input_space`\n\nAttribut of type `str` representating the color space of the image. Can be `RGB` or `BGR`.\n\n\n#### `model.input_range`\n\nAttribut of type `list` composed of 2 numbers:\n\n- min pixel value,\n- max pixel value.\n\nExample:\n\n- `[0, 1]` for resnet* and inception* networks,\n- `[0, 255]` for bninception network.\n\n\n#### `model.mean`\n\nAttribut of type `list` composed of 3 numbers which are used to normalize the input image (substract \"color-channel-wise\").\n\nExample:\n\n- `[0.5, 0.5, 0.5]` for inception* networks,\n- `[0.485, 0.456, 0.406]` for resnet* networks.\n\n\n#### `model.std`\n\nAttribut of type `list` composed of 3 numbers which are used to normalize the input image (divide \"color-channel-wise\").\n\nExample:\n\n- `[0.5, 0.5, 0.5]` for inception* networks,\n- `[0.229, 0.224, 0.225]` for resnet* networks.\n\n\n#### `model.features`\n\n/!\\ work in progress (may not be available)\n\nMethod which is used to extract the features from the image.\n\nExample when the model is loaded using `fbresnet152`:\n\n```python\nprint(input_224.size())            # (1,3,224,224)\noutput = model.features(input_224) \nprint(output.size())               # (1,2048,1,1)\n\n# print(input_448.size())          # (1,3,448,448)\noutput = model.features(input_448)\n# print(output.size())             # (1,2048,7,7)\n```\n\n\n#### `model.classif`\n\n/!\\ work in progress (may not be available)\n\nMethod which is used to classify the features from the image.\n\nExample when the model is loaded using `fbresnet152`:\n\n```python\noutput = model.features(input_224) \noutput = output.view(1,-1)\nprint(output.size())               # (1,2048)\noutput = model.classif(output)\nprint(output.size())               # (1,1000)\n```\n\n#### `model.forward`\n\nMethod used to call `model.features` and `model.classif`. It can be overwritten as desired.\n\n**Important note**: A good practice is to use `model.__call__` as your function of choice to forward an input to your model. See the example bellow.\n\n```python\n# Without model.__call__\noutput = model.forward(input_224)\nprint(output.size())      # (1,1000)\n\n# With model.__call__\noutput = model(input_224)\nprint(output.size())      # (1,1000)\n```\n\n\n## Reproducing\n\n### Hand porting of ResNet152\n\n```\nth pretrainedmodels/fbresnet/resnet152_dump.lua\npython pretrainedmodels/fbresnet/resnet152_load.py\n```\n\n### Automatic porting of ResNeXt\n\nhttps://github.com/clcarwin/convert_torch_to_pytorch\n\n### Hand porting of NASNet, InceptionV4 and InceptionResNetV2\n\nhttps://github.com/Cadene/tensorflow-model-zoo.torch\n\n\n## Acknowledgement\n\nThanks to the deep learning community and especially to the contributers of the pytorch ecosystem."
  },
  {
    "path": "pretrained_models_pytorch/data/imagenet_classes.txt",
    "content": "n01440764\nn01443537\nn01484850\nn01491361\nn01494475\nn01496331\nn01498041\nn01514668\nn01514859\nn01518878\nn01530575\nn01531178\nn01532829\nn01534433\nn01537544\nn01558993\nn01560419\nn01580077\nn01582220\nn01592084\nn01601694\nn01608432\nn01614925\nn01616318\nn01622779\nn01629819\nn01630670\nn01631663\nn01632458\nn01632777\nn01641577\nn01644373\nn01644900\nn01664065\nn01665541\nn01667114\nn01667778\nn01669191\nn01675722\nn01677366\nn01682714\nn01685808\nn01687978\nn01688243\nn01689811\nn01692333\nn01693334\nn01694178\nn01695060\nn01697457\nn01698640\nn01704323\nn01728572\nn01728920\nn01729322\nn01729977\nn01734418\nn01735189\nn01737021\nn01739381\nn01740131\nn01742172\nn01744401\nn01748264\nn01749939\nn01751748\nn01753488\nn01755581\nn01756291\nn01768244\nn01770081\nn01770393\nn01773157\nn01773549\nn01773797\nn01774384\nn01774750\nn01775062\nn01776313\nn01784675\nn01795545\nn01796340\nn01797886\nn01798484\nn01806143\nn01806567\nn01807496\nn01817953\nn01818515\nn01819313\nn01820546\nn01824575\nn01828970\nn01829413\nn01833805\nn01843065\nn01843383\nn01847000\nn01855032\nn01855672\nn01860187\nn01871265\nn01872401\nn01873310\nn01877812\nn01882714\nn01883070\nn01910747\nn01914609\nn01917289\nn01924916\nn01930112\nn01943899\nn01944390\nn01945685\nn01950731\nn01955084\nn01968897\nn01978287\nn01978455\nn01980166\nn01981276\nn01983481\nn01984695\nn01985128\nn01986214\nn01990800\nn02002556\nn02002724\nn02006656\nn02007558\nn02009229\nn02009912\nn02011460\nn02012849\nn02013706\nn02017213\nn02018207\nn02018795\nn02025239\nn02027492\nn02028035\nn02033041\nn02037110\nn02051845\nn02056570\nn02058221\nn02066245\nn02071294\nn02074367\nn02077923\nn02085620\nn02085782\nn02085936\nn02086079\nn02086240\nn02086646\nn02086910\nn02087046\nn02087394\nn02088094\nn02088238\nn02088364\nn02088466\nn02088632\nn02089078\nn02089867\nn02089973\nn02090379\nn02090622\nn02090721\nn02091032\nn02091134\nn02091244\nn02091467\nn02091635\nn02091831\nn02092002\nn02092339\nn02093256\nn02093428\nn02093647\nn02093754\nn02093859\nn02093991\nn02094114\nn02094258\nn02094433\nn02095314\nn02095570\nn02095889\nn02096051\nn02096177\nn02096294\nn02096437\nn02096585\nn02097047\nn02097130\nn02097209\nn02097298\nn02097474\nn02097658\nn02098105\nn02098286\nn02098413\nn02099267\nn02099429\nn02099601\nn02099712\nn02099849\nn02100236\nn02100583\nn02100735\nn02100877\nn02101006\nn02101388\nn02101556\nn02102040\nn02102177\nn02102318\nn02102480\nn02102973\nn02104029\nn02104365\nn02105056\nn02105162\nn02105251\nn02105412\nn02105505\nn02105641\nn02105855\nn02106030\nn02106166\nn02106382\nn02106550\nn02106662\nn02107142\nn02107312\nn02107574\nn02107683\nn02107908\nn02108000\nn02108089\nn02108422\nn02108551\nn02108915\nn02109047\nn02109525\nn02109961\nn02110063\nn02110185\nn02110341\nn02110627\nn02110806\nn02110958\nn02111129\nn02111277\nn02111500\nn02111889\nn02112018\nn02112137\nn02112350\nn02112706\nn02113023\nn02113186\nn02113624\nn02113712\nn02113799\nn02113978\nn02114367\nn02114548\nn02114712\nn02114855\nn02115641\nn02115913\nn02116738\nn02117135\nn02119022\nn02119789\nn02120079\nn02120505\nn02123045\nn02123159\nn02123394\nn02123597\nn02124075\nn02125311\nn02127052\nn02128385\nn02128757\nn02128925\nn02129165\nn02129604\nn02130308\nn02132136\nn02133161\nn02134084\nn02134418\nn02137549\nn02138441\nn02165105\nn02165456\nn02167151\nn02168699\nn02169497\nn02172182\nn02174001\nn02177972\nn02190166\nn02206856\nn02219486\nn02226429\nn02229544\nn02231487\nn02233338\nn02236044\nn02256656\nn02259212\nn02264363\nn02268443\nn02268853\nn02276258\nn02277742\nn02279972\nn02280649\nn02281406\nn02281787\nn02317335\nn02319095\nn02321529\nn02325366\nn02326432\nn02328150\nn02342885\nn02346627\nn02356798\nn02361337\nn02363005\nn02364673\nn02389026\nn02391049\nn02395406\nn02396427\nn02397096\nn02398521\nn02403003\nn02408429\nn02410509\nn02412080\nn02415577\nn02417914\nn02422106\nn02422699\nn02423022\nn02437312\nn02437616\nn02441942\nn02442845\nn02443114\nn02443484\nn02444819\nn02445715\nn02447366\nn02454379\nn02457408\nn02480495\nn02480855\nn02481823\nn02483362\nn02483708\nn02484975\nn02486261\nn02486410\nn02487347\nn02488291\nn02488702\nn02489166\nn02490219\nn02492035\nn02492660\nn02493509\nn02493793\nn02494079\nn02497673\nn02500267\nn02504013\nn02504458\nn02509815\nn02510455\nn02514041\nn02526121\nn02536864\nn02606052\nn02607072\nn02640242\nn02641379\nn02643566\nn02655020\nn02666196\nn02667093\nn02669723\nn02672831\nn02676566\nn02687172\nn02690373\nn02692877\nn02699494\nn02701002\nn02704792\nn02708093\nn02727426\nn02730930\nn02747177\nn02749479\nn02769748\nn02776631\nn02777292\nn02782093\nn02783161\nn02786058\nn02787622\nn02788148\nn02790996\nn02791124\nn02791270\nn02793495\nn02794156\nn02795169\nn02797295\nn02799071\nn02802426\nn02804414\nn02804610\nn02807133\nn02808304\nn02808440\nn02814533\nn02814860\nn02815834\nn02817516\nn02823428\nn02823750\nn02825657\nn02834397\nn02835271\nn02837789\nn02840245\nn02841315\nn02843684\nn02859443\nn02860847\nn02865351\nn02869837\nn02870880\nn02871525\nn02877765\nn02879718\nn02883205\nn02892201\nn02892767\nn02894605\nn02895154\nn02906734\nn02909870\nn02910353\nn02916936\nn02917067\nn02927161\nn02930766\nn02939185\nn02948072\nn02950826\nn02951358\nn02951585\nn02963159\nn02965783\nn02966193\nn02966687\nn02971356\nn02974003\nn02977058\nn02978881\nn02979186\nn02980441\nn02981792\nn02988304\nn02992211\nn02992529\nn02999410\nn03000134\nn03000247\nn03000684\nn03014705\nn03016953\nn03017168\nn03018349\nn03026506\nn03028079\nn03032252\nn03041632\nn03042490\nn03045698\nn03047690\nn03062245\nn03063599\nn03063689\nn03065424\nn03075370\nn03085013\nn03089624\nn03095699\nn03100240\nn03109150\nn03110669\nn03124043\nn03124170\nn03125729\nn03126707\nn03127747\nn03127925\nn03131574\nn03133878\nn03134739\nn03141823\nn03146219\nn03160309\nn03179701\nn03180011\nn03187595\nn03188531\nn03196217\nn03197337\nn03201208\nn03207743\nn03207941\nn03208938\nn03216828\nn03218198\nn03220513\nn03223299\nn03240683\nn03249569\nn03250847\nn03255030\nn03259280\nn03271574\nn03272010\nn03272562\nn03290653\nn03291819\nn03297495\nn03314780\nn03325584\nn03337140\nn03344393\nn03345487\nn03347037\nn03355925\nn03372029\nn03376595\nn03379051\nn03384352\nn03388043\nn03388183\nn03388549\nn03393912\nn03394916\nn03400231\nn03404251\nn03417042\nn03424325\nn03425413\nn03443371\nn03444034\nn03445777\nn03445924\nn03447447\nn03447721\nn03450230\nn03452741\nn03457902\nn03459775\nn03461385\nn03467068\nn03476684\nn03476991\nn03478589\nn03481172\nn03482405\nn03483316\nn03485407\nn03485794\nn03492542\nn03494278\nn03495258\nn03496892\nn03498962\nn03527444\nn03529860\nn03530642\nn03532672\nn03534580\nn03535780\nn03538406\nn03544143\nn03584254\nn03584829\nn03590841\nn03594734\nn03594945\nn03595614\nn03598930\nn03599486\nn03602883\nn03617480\nn03623198\nn03627232\nn03630383\nn03633091\nn03637318\nn03642806\nn03649909\nn03657121\nn03658185\nn03661043\nn03662601\nn03666591\nn03670208\nn03673027\nn03676483\nn03680355\nn03690938\nn03691459\nn03692522\nn03697007\nn03706229\nn03709823\nn03710193\nn03710637\nn03710721\nn03717622\nn03720891\nn03721384\nn03724870\nn03729826\nn03733131\nn03733281\nn03733805\nn03742115\nn03743016\nn03759954\nn03761084\nn03763968\nn03764736\nn03769881\nn03770439\nn03770679\nn03773504\nn03775071\nn03775546\nn03776460\nn03777568\nn03777754\nn03781244\nn03782006\nn03785016\nn03786901\nn03787032\nn03788195\nn03788365\nn03791053\nn03792782\nn03792972\nn03793489\nn03794056\nn03796401\nn03803284\nn03804744\nn03814639\nn03814906\nn03825788\nn03832673\nn03837869\nn03838899\nn03840681\nn03841143\nn03843555\nn03854065\nn03857828\nn03866082\nn03868242\nn03868863\nn03871628\nn03873416\nn03874293\nn03874599\nn03876231\nn03877472\nn03877845\nn03884397\nn03887697\nn03888257\nn03888605\nn03891251\nn03891332\nn03895866\nn03899768\nn03902125\nn03903868\nn03908618\nn03908714\nn03916031\nn03920288\nn03924679\nn03929660\nn03929855\nn03930313\nn03930630\nn03933933\nn03935335\nn03937543\nn03938244\nn03942813\nn03944341\nn03947888\nn03950228\nn03954731\nn03956157\nn03958227\nn03961711\nn03967562\nn03970156\nn03976467\nn03976657\nn03977966\nn03980874\nn03982430\nn03983396\nn03991062\nn03992509\nn03995372\nn03998194\nn04004767\nn04005630\nn04008634\nn04009552\nn04019541\nn04023962\nn04026417\nn04033901\nn04033995\nn04037443\nn04039381\nn04040759\nn04041544\nn04044716\nn04049303\nn04065272\nn04067472\nn04069434\nn04070727\nn04074963\nn04081281\nn04086273\nn04090263\nn04099969\nn04111531\nn04116512\nn04118538\nn04118776\nn04120489\nn04125021\nn04127249\nn04131690\nn04133789\nn04136333\nn04141076\nn04141327\nn04141975\nn04146614\nn04147183\nn04149813\nn04152593\nn04153751\nn04154565\nn04162706\nn04179913\nn04192698\nn04200800\nn04201297\nn04204238\nn04204347\nn04208210\nn04209133\nn04209239\nn04228054\nn04229816\nn04235860\nn04238763\nn04239074\nn04243546\nn04251144\nn04252077\nn04252225\nn04254120\nn04254680\nn04254777\nn04258138\nn04259630\nn04263257\nn04264628\nn04265275\nn04266014\nn04270147\nn04273569\nn04275548\nn04277352\nn04285008\nn04286575\nn04296562\nn04310018\nn04311004\nn04311174\nn04317175\nn04325704\nn04326547\nn04328186\nn04330267\nn04332243\nn04335435\nn04336792\nn04344873\nn04346328\nn04347754\nn04350905\nn04355338\nn04355933\nn04356056\nn04357314\nn04366367\nn04367480\nn04370456\nn04371430\nn04371774\nn04372370\nn04376876\nn04380533\nn04389033\nn04392985\nn04398044\nn04399382\nn04404412\nn04409515\nn04417672\nn04418357\nn04423845\nn04428191\nn04429376\nn04435653\nn04442312\nn04443257\nn04447861\nn04456115\nn04458633\nn04461696\nn04462240\nn04465501\nn04467665\nn04476259\nn04479046\nn04482393\nn04483307\nn04485082\nn04486054\nn04487081\nn04487394\nn04493381\nn04501370\nn04505470\nn04507155\nn04509417\nn04515003\nn04517823\nn04522168\nn04523525\nn04525038\nn04525305\nn04532106\nn04532670\nn04536866\nn04540053\nn04542943\nn04548280\nn04548362\nn04550184\nn04552348\nn04553703\nn04554684\nn04557648\nn04560804\nn04562935\nn04579145\nn04579432\nn04584207\nn04589890\nn04590129\nn04591157\nn04591713\nn04592741\nn04596742\nn04597913\nn04599235\nn04604644\nn04606251\nn04612504\nn04613696\nn06359193\nn06596364\nn06785654\nn06794110\nn06874185\nn07248320\nn07565083\nn07579787\nn07583066\nn07584110\nn07590611\nn07613480\nn07614500\nn07615774\nn07684084\nn07693725\nn07695742\nn07697313\nn07697537\nn07711569\nn07714571\nn07714990\nn07715103\nn07716358\nn07716906\nn07717410\nn07717556\nn07718472\nn07718747\nn07720875\nn07730033\nn07734744\nn07742313\nn07745940\nn07747607\nn07749582\nn07753113\nn07753275\nn07753592\nn07754684\nn07760859\nn07768694\nn07802026\nn07831146\nn07836838\nn07860988\nn07871810\nn07873807\nn07875152\nn07880968\nn07892512\nn07920052\nn07930864\nn07932039\nn09193705\nn09229709\nn09246464\nn09256479\nn09288635\nn09332890\nn09399592\nn09421951\nn09428293\nn09468604\nn09472597\nn09835506\nn10148035\nn10565667\nn11879895\nn11939491\nn12057211\nn12144580\nn12267677\nn12620546\nn12768682\nn12985857\nn12998815\nn13037406\nn13040303\nn13044778\nn13052670\nn13054560\nn13133613\nn15075141"
  },
  {
    "path": "pretrained_models_pytorch/data/imagenet_synsets.txt",
    "content": "????????? dummy class for index 0\nn02119789 kit fox, Vulpes macrotis\nn02100735 English setter\nn02110185 Siberian husky\nn02096294 Australian terrier\nn02102040 English springer, English springer spaniel\nn02066245 grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus\nn02509815 lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens\nn02124075 Egyptian cat\nn02417914 ibex, Capra ibex\nn02123394 Persian cat\nn02125311 cougar, puma, catamount, mountain lion, painter, panther, Felis concolor\nn02423022 gazelle\nn02346627 porcupine, hedgehog\nn02077923 sea lion\nn02110063 malamute, malemute, Alaskan malamute\nn02447366 badger\nn02109047 Great Dane\nn02089867 Walker hound, Walker foxhound\nn02102177 Welsh springer spaniel\nn02091134 whippet\nn02092002 Scottish deerhound, deerhound\nn02071294 killer whale, killer, orca, grampus, sea wolf, Orcinus orca\nn02442845 mink\nn02504458 African elephant, Loxodonta africana\nn02092339 Weimaraner\nn02098105 soft-coated wheaten terrier\nn02096437 Dandie Dinmont, Dandie Dinmont terrier\nn02114712 red wolf, maned wolf, Canis rufus, Canis niger\nn02105641 Old English sheepdog, bobtail\nn02128925 jaguar, panther, Panthera onca, Felis onca\nn02091635 otterhound, otter hound\nn02088466 bloodhound, sleuthhound\nn02096051 Airedale, Airedale terrier\nn02117135 hyena, hyaena\nn02138441 meerkat, mierkat\nn02097130 giant schnauzer\nn02493509 titi, titi monkey\nn02457408 three-toed sloth, ai, Bradypus tridactylus\nn02389026 sorrel\nn02443484 black-footed ferret, ferret, Mustela nigripes\nn02110341 dalmatian, coach dog, carriage dog\nn02089078 black-and-tan coonhound\nn02086910 papillon\nn02445715 skunk, polecat, wood pussy\nn02093256 Staffordshire bullterrier, Staffordshire bull terrier\nn02113978 Mexican hairless\nn02106382 Bouvier des Flandres, Bouviers des Flandres\nn02441942 weasel\nn02113712 miniature poodle\nn02113186 Cardigan, Cardigan Welsh corgi\nn02105162 malinois\nn02415577 bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis\nn02356798 fox squirrel, eastern fox squirrel, Sciurus niger\nn02488702 colobus, colobus monkey\nn02123159 tiger cat\nn02098413 Lhasa, Lhasa apso\nn02422699 impala, Aepyceros melampus\nn02114855 coyote, prairie wolf, brush wolf, Canis latrans\nn02094433 Yorkshire terrier\nn02111277 Newfoundland, Newfoundland dog\nn02132136 brown bear, bruin, Ursus arctos\nn02119022 red fox, Vulpes vulpes\nn02091467 Norwegian elkhound, elkhound\nn02106550 Rottweiler\nn02422106 hartebeest\nn02091831 Saluki, gazelle hound\nn02120505 grey fox, gray fox, Urocyon cinereoargenteus\nn02104365 schipperke\nn02086079 Pekinese, Pekingese, Peke\nn02112706 Brabancon griffon\nn02098286 West Highland white terrier\nn02095889 Sealyham terrier, Sealyham\nn02484975 guenon, guenon monkey\nn02137549 mongoose\nn02500267 indri, indris, Indri indri, Indri brevicaudatus\nn02129604 tiger, Panthera tigris\nn02090721 Irish wolfhound\nn02396427 wild boar, boar, Sus scrofa\nn02108000 EntleBucher\nn02391049 zebra\nn02412080 ram, tup\nn02108915 French bulldog\nn02480495 orangutan, orang, orangutang, Pongo pygmaeus\nn02110806 basenji\nn02128385 leopard, Panthera pardus\nn02107683 Bernese mountain dog\nn02085936 Maltese dog, Maltese terrier, Maltese\nn02094114 Norfolk terrier\nn02087046 toy terrier\nn02100583 vizsla, Hungarian pointer\nn02096177 cairn, cairn terrier\nn02494079 squirrel monkey, Saimiri sciureus\nn02105056 groenendael\nn02101556 clumber, clumber spaniel\nn02123597 Siamese cat, Siamese\nn02481823 chimpanzee, chimp, Pan troglodytes\nn02105505 komondor\nn02088094 Afghan hound, Afghan\nn02085782 Japanese spaniel\nn02489166 proboscis monkey, Nasalis larvatus\nn02364673 guinea pig, Cavia cobaya\nn02114548 white wolf, Arctic wolf, Canis lupus tundrarum\nn02134084 ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus\nn02480855 gorilla, Gorilla gorilla\nn02090622 borzoi, Russian wolfhound\nn02113624 toy poodle\nn02093859 Kerry blue terrier\nn02403003 ox\nn02097298 Scotch terrier, Scottish terrier, Scottie\nn02108551 Tibetan mastiff\nn02493793 spider monkey, Ateles geoffroyi\nn02107142 Doberman, Doberman pinscher\nn02096585 Boston bull, Boston terrier\nn02107574 Greater Swiss Mountain dog\nn02107908 Appenzeller\nn02086240 Shih-Tzu\nn02102973 Irish water spaniel\nn02112018 Pomeranian\nn02093647 Bedlington terrier\nn02397096 warthog\nn02437312 Arabian camel, dromedary, Camelus dromedarius\nn02483708 siamang, Hylobates syndactylus, Symphalangus syndactylus\nn02097047 miniature schnauzer\nn02106030 collie\nn02099601 golden retriever\nn02093991 Irish terrier\nn02110627 affenpinscher, monkey pinscher, monkey dog\nn02106166 Border collie\nn02326432 hare\nn02108089 boxer\nn02097658 silky terrier, Sydney silky\nn02088364 beagle\nn02111129 Leonberg\nn02100236 German short-haired pointer\nn02486261 patas, hussar monkey, Erythrocebus patas\nn02115913 dhole, Cuon alpinus\nn02486410 baboon\nn02487347 macaque\nn02099849 Chesapeake Bay retriever\nn02108422 bull mastiff\nn02104029 kuvasz\nn02492035 capuchin, ringtail, Cebus capucinus\nn02110958 pug, pug-dog\nn02099429 curly-coated retriever\nn02094258 Norwich terrier\nn02099267 flat-coated retriever\nn02395406 hog, pig, grunter, squealer, Sus scrofa\nn02112350 keeshond\nn02109961 Eskimo dog, husky\nn02101388 Brittany spaniel\nn02113799 standard poodle\nn02095570 Lakeland terrier\nn02128757 snow leopard, ounce, Panthera uncia\nn02101006 Gordon setter\nn02115641 dingo, warrigal, warragal, Canis dingo\nn02097209 standard schnauzer\nn02342885 hamster\nn02097474 Tibetan terrier, chrysanthemum dog\nn02120079 Arctic fox, white fox, Alopex lagopus\nn02095314 wire-haired fox terrier\nn02088238 basset, basset hound\nn02408429 water buffalo, water ox, Asiatic buffalo, Bubalus bubalis\nn02133161 American black bear, black bear, Ursus americanus, Euarctos americanus\nn02328150 Angora, Angora rabbit\nn02410509 bison\nn02492660 howler monkey, howler\nn02398521 hippopotamus, hippo, river horse, Hippopotamus amphibius\nn02112137 chow, chow chow\nn02510455 giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca\nn02093428 American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier\nn02105855 Shetland sheepdog, Shetland sheep dog, Shetland\nn02111500 Great Pyrenees\nn02085620 Chihuahua\nn02123045 tabby, tabby cat\nn02490219 marmoset\nn02099712 Labrador retriever\nn02109525 Saint Bernard, St Bernard\nn02454379 armadillo\nn02111889 Samoyed, Samoyede\nn02088632 bluetick\nn02090379 redbone\nn02443114 polecat, fitch, foulmart, foumart, Mustela putorius\nn02361337 marmot\nn02105412 kelpie\nn02483362 gibbon, Hylobates lar\nn02437616 llama\nn02107312 miniature pinscher\nn02325366 wood rabbit, cottontail, cottontail rabbit\nn02091032 Italian greyhound\nn02129165 lion, king of beasts, Panthera leo\nn02102318 cocker spaniel, English cocker spaniel, cocker\nn02100877 Irish setter, red setter\nn02074367 dugong, Dugong dugon\nn02504013 Indian elephant, Elephas maximus\nn02363005 beaver\nn02102480 Sussex spaniel\nn02113023 Pembroke, Pembroke Welsh corgi\nn02086646 Blenheim spaniel\nn02497673 Madagascar cat, ring-tailed lemur, Lemur catta\nn02087394 Rhodesian ridgeback\nn02127052 lynx, catamount\nn02116738 African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus\nn02488291 langur\nn02091244 Ibizan hound, Ibizan Podenco\nn02114367 timber wolf, grey wolf, gray wolf, Canis lupus\nn02130308 cheetah, chetah, Acinonyx jubatus\nn02089973 English foxhound\nn02105251 briard\nn02134418 sloth bear, Melursus ursinus, Ursus ursinus\nn02093754 Border terrier\nn02106662 German shepherd, German shepherd dog, German police dog, alsatian\nn02444819 otter\nn01882714 koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus\nn01871265 tusker\nn01872401 echidna, spiny anteater, anteater\nn01877812 wallaby, brush kangaroo\nn01873310 platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus\nn01883070 wombat\nn04086273 revolver, six-gun, six-shooter\nn04507155 umbrella\nn04147183 schooner\nn04254680 soccer ball\nn02672831 accordion, piano accordion, squeeze box\nn02219486 ant, emmet, pismire\nn02317335 starfish, sea star\nn01968897 chambered nautilus, pearly nautilus, nautilus\nn03452741 grand piano, grand\nn03642806 laptop, laptop computer\nn07745940 strawberry\nn02690373 airliner\nn04552348 warplane, military plane\nn02692877 airship, dirigible\nn02782093 balloon\nn04266014 space shuttle\nn03344393 fireboat\nn03447447 gondola\nn04273569 speedboat\nn03662601 lifeboat\nn02951358 canoe\nn04612504 yawl\nn02981792 catamaran\nn04483307 trimaran\nn03095699 container ship, containership, container vessel\nn03673027 liner, ocean liner\nn03947888 pirate, pirate ship\nn02687172 aircraft carrier, carrier, flattop, attack aircraft carrier\nn04347754 submarine, pigboat, sub, U-boat\nn04606251 wreck\nn03478589 half track\nn04389033 tank, army tank, armored combat vehicle, armoured combat vehicle\nn03773504 missile\nn02860847 bobsled, bobsleigh, bob\nn03218198 dogsled, dog sled, dog sleigh\nn02835271 bicycle-built-for-two, tandem bicycle, tandem\nn03792782 mountain bike, all-terrain bike, off-roader\nn03393912 freight car\nn03895866 passenger car, coach, carriage\nn02797295 barrow, garden cart, lawn cart, wheelbarrow\nn04204347 shopping cart\nn03791053 motor scooter, scooter\nn03384352 forklift\nn03272562 electric locomotive\nn04310018 steam locomotive\nn02704792 amphibian, amphibious vehicle\nn02701002 ambulance\nn02814533 beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon\nn02930766 cab, hack, taxi, taxicab\nn03100240 convertible\nn03594945 jeep, landrover\nn03670208 limousine, limo\nn03770679 minivan\nn03777568 Model T\nn04037443 racer, race car, racing car\nn04285008 sports car, sport car\nn03444034 go-kart\nn03445924 golfcart, golf cart\nn03785016 moped\nn04252225 snowplow, snowplough\nn03345487 fire engine, fire truck\nn03417042 garbage truck, dustcart\nn03930630 pickup, pickup truck\nn04461696 tow truck, tow car, wrecker\nn04467665 trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi\nn03796401 moving van\nn03977966 police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria\nn04065272 recreational vehicle, RV, R.V.\nn04335435 streetcar, tram, tramcar, trolley, trolley car\nn04252077 snowmobile\nn04465501 tractor\nn03776460 mobile home, manufactured home\nn04482393 tricycle, trike, velocipede\nn04509417 unicycle, monocycle\nn03538406 horse cart, horse-cart\nn03599486 jinrikisha, ricksha, rickshaw\nn03868242 oxcart\nn02804414 bassinet\nn03125729 cradle\nn03131574 crib, cot\nn03388549 four-poster\nn02870880 bookcase\nn03018349 china cabinet, china closet\nn03742115 medicine chest, medicine cabinet\nn03016953 chiffonier, commode\nn04380533 table lamp\nn03337140 file, file cabinet, filing cabinet\nn03891251 park bench\nn02791124 barber chair\nn04429376 throne\nn03376595 folding chair\nn04099969 rocking chair, rocker\nn04344873 studio couch, day bed\nn04447861 toilet seat\nn03179701 desk\nn03982430 pool table, billiard table, snooker table\nn03201208 dining table, board\nn03290653 entertainment center\nn04550184 wardrobe, closet, press\nn07742313 Granny Smith\nn07747607 orange\nn07749582 lemon\nn07753113 fig\nn07753275 pineapple, ananas\nn07753592 banana\nn07754684 jackfruit, jak, jack\nn07760859 custard apple\nn07768694 pomegranate\nn12267677 acorn\nn12620546 hip, rose hip, rosehip\nn13133613 ear, spike, capitulum\nn11879895 rapeseed\nn12144580 corn\nn12768682 buckeye, horse chestnut, conker\nn03854065 organ, pipe organ\nn04515003 upright, upright piano\nn03017168 chime, bell, gong\nn03249569 drum, membranophone, tympan\nn03447721 gong, tam-tam\nn03720891 maraca\nn03721384 marimba, xylophone\nn04311174 steel drum\nn02787622 banjo\nn02992211 cello, violoncello\nn04536866 violin, fiddle\nn03495258 harp\nn02676566 acoustic guitar\nn03272010 electric guitar\nn03110669 cornet, horn, trumpet, trump\nn03394916 French horn, horn\nn04487394 trombone\nn03494278 harmonica, mouth organ, harp, mouth harp\nn03840681 ocarina, sweet potato\nn03884397 panpipe, pandean pipe, syrinx\nn02804610 bassoon\nn03838899 oboe, hautboy, hautbois\nn04141076 sax, saxophone\nn03372029 flute, transverse flute\nn11939491 daisy\nn12057211 yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum\nn09246464 cliff, drop, drop-off\nn09468604 valley, vale\nn09193705 alp\nn09472597 volcano\nn09399592 promontory, headland, head, foreland\nn09421951 sandbar, sand bar\nn09256479 coral reef\nn09332890 lakeside, lakeshore\nn09428293 seashore, coast, seacoast, sea-coast\nn09288635 geyser\nn03498962 hatchet\nn03041632 cleaver, meat cleaver, chopper\nn03658185 letter opener, paper knife, paperknife\nn03954731 plane, carpenter's plane, woodworking plane\nn03995372 power drill\nn03649909 lawn mower, mower\nn03481172 hammer\nn03109150 corkscrew, bottle screw\nn02951585 can opener, tin opener\nn03970156 plunger, plumber's helper\nn04154565 screwdriver\nn04208210 shovel\nn03967562 plow, plough\nn03000684 chain saw, chainsaw\nn01514668 cock\nn01514859 hen\nn01518878 ostrich, Struthio camelus\nn01530575 brambling, Fringilla montifringilla\nn01531178 goldfinch, Carduelis carduelis\nn01532829 house finch, linnet, Carpodacus mexicanus\nn01534433 junco, snowbird\nn01537544 indigo bunting, indigo finch, indigo bird, Passerina cyanea\nn01558993 robin, American robin, Turdus migratorius\nn01560419 bulbul\nn01580077 jay\nn01582220 magpie\nn01592084 chickadee\nn01601694 water ouzel, dipper\nn01608432 kite\nn01614925 bald eagle, American eagle, Haliaeetus leucocephalus\nn01616318 vulture\nn01622779 great grey owl, great gray owl, Strix nebulosa\nn01795545 black grouse\nn01796340 ptarmigan\nn01797886 ruffed grouse, partridge, Bonasa umbellus\nn01798484 prairie chicken, prairie grouse, prairie fowl\nn01806143 peacock\nn01806567 quail\nn01807496 partridge\nn01817953 African grey, African gray, Psittacus erithacus\nn01818515 macaw\nn01819313 sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita\nn01820546 lorikeet\nn01824575 coucal\nn01828970 bee eater\nn01829413 hornbill\nn01833805 hummingbird\nn01843065 jacamar\nn01843383 toucan\nn01847000 drake\nn01855032 red-breasted merganser, Mergus serrator\nn01855672 goose\nn01860187 black swan, Cygnus atratus\nn02002556 white stork, Ciconia ciconia\nn02002724 black stork, Ciconia nigra\nn02006656 spoonbill\nn02007558 flamingo\nn02009912 American egret, great white heron, Egretta albus\nn02009229 little blue heron, Egretta caerulea\nn02011460 bittern\nn02012849 crane\nn02013706 limpkin, Aramus pictus\nn02018207 American coot, marsh hen, mud hen, water hen, Fulica americana\nn02018795 bustard\nn02025239 ruddy turnstone, Arenaria interpres\nn02027492 red-backed sandpiper, dunlin, Erolia alpina\nn02028035 redshank, Tringa totanus\nn02033041 dowitcher\nn02037110 oystercatcher, oyster catcher\nn02017213 European gallinule, Porphyrio porphyrio\nn02051845 pelican\nn02056570 king penguin, Aptenodytes patagonica\nn02058221 albatross, mollymawk\nn01484850 great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias\nn01491361 tiger shark, Galeocerdo cuvieri\nn01494475 hammerhead, hammerhead shark\nn01496331 electric ray, crampfish, numbfish, torpedo\nn01498041 stingray\nn02514041 barracouta, snoek\nn02536864 coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch\nn01440764 tench, Tinca tinca\nn01443537 goldfish, Carassius auratus\nn02526121 eel\nn02606052 rock beauty, Holocanthus tricolor\nn02607072 anemone fish\nn02643566 lionfish\nn02655020 puffer, pufferfish, blowfish, globefish\nn02640242 sturgeon\nn02641379 gar, garfish, garpike, billfish, Lepisosteus osseus\nn01664065 loggerhead, loggerhead turtle, Caretta caretta\nn01665541 leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea\nn01667114 mud turtle\nn01667778 terrapin\nn01669191 box turtle, box tortoise\nn01675722 banded gecko\nn01677366 common iguana, iguana, Iguana iguana\nn01682714 American chameleon, anole, Anolis carolinensis\nn01685808 whiptail, whiptail lizard\nn01687978 agama\nn01688243 frilled lizard, Chlamydosaurus kingi\nn01689811 alligator lizard\nn01692333 Gila monster, Heloderma suspectum\nn01693334 green lizard, Lacerta viridis\nn01694178 African chameleon, Chamaeleo chamaeleon\nn01695060 Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis\nn01704323 triceratops\nn01697457 African crocodile, Nile crocodile, Crocodylus niloticus\nn01698640 American alligator, Alligator mississipiensis\nn01728572 thunder snake, worm snake, Carphophis amoenus\nn01728920 ringneck snake, ring-necked snake, ring snake\nn01729322 hognose snake, puff adder, sand viper\nn01729977 green snake, grass snake\nn01734418 king snake, kingsnake\nn01735189 garter snake, grass snake\nn01737021 water snake\nn01739381 vine snake\nn01740131 night snake, Hypsiglena torquata\nn01742172 boa constrictor, Constrictor constrictor\nn01744401 rock python, rock snake, Python sebae\nn01748264 Indian cobra, Naja naja\nn01749939 green mamba\nn01751748 sea snake\nn01753488 horned viper, cerastes, sand viper, horned asp, Cerastes cornutus\nn01755581 diamondback, diamondback rattlesnake, Crotalus adamanteus\nn01756291 sidewinder, horned rattlesnake, Crotalus cerastes\nn01629819 European fire salamander, Salamandra salamandra\nn01630670 common newt, Triturus vulgaris\nn01631663 eft\nn01632458 spotted salamander, Ambystoma maculatum\nn01632777 axolotl, mud puppy, Ambystoma mexicanum\nn01641577 bullfrog, Rana catesbeiana\nn01644373 tree frog, tree-frog\nn01644900 tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui\nn04579432 whistle\nn04592741 wing\nn03876231 paintbrush\nn03483316 hand blower, blow dryer, blow drier, hair dryer, hair drier\nn03868863 oxygen mask\nn04251144 snorkel\nn03691459 loudspeaker, speaker, speaker unit, loudspeaker system, speaker system\nn03759954 microphone, mike\nn04152593 screen, CRT screen\nn03793489 mouse, computer mouse\nn03271574 electric fan, blower\nn03843555 oil filter\nn04332243 strainer\nn04265275 space heater\nn04330267 stove\nn03467068 guillotine\nn02794156 barometer\nn04118776 rule, ruler\nn03841143 odometer, hodometer, mileometer, milometer\nn04141975 scale, weighing machine\nn02708093 analog clock\nn03196217 digital clock\nn04548280 wall clock\nn03544143 hourglass\nn04355338 sundial\nn03891332 parking meter\nn04328186 stopwatch, stop watch\nn03197337 digital watch\nn04317175 stethoscope\nn04376876 syringe\nn03706229 magnetic compass\nn02841315 binoculars, field glasses, opera glasses\nn04009552 projector\nn04356056 sunglasses, dark glasses, shades\nn03692522 loupe, jeweler's loupe\nn04044716 radio telescope, radio reflector\nn02879718 bow\nn02950826 cannon\nn02749479 assault rifle, assault gun\nn04090263 rifle\nn04008634 projectile, missile\nn03085013 computer keyboard, keypad\nn04505470 typewriter keyboard\nn03126707 crane\nn03666591 lighter, light, igniter, ignitor\nn02666196 abacus\nn02977058 cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM\nn04238763 slide rule, slipstick\nn03180011 desktop computer\nn03485407 hand-held computer, hand-held microcomputer\nn03832673 notebook, notebook computer\nn06359193 web site, website, internet site, site\nn03496892 harvester, reaper\nn04428191 thresher, thrasher, threshing machine\nn04004767 printer\nn04243546 slot, one-armed bandit\nn04525305 vending machine\nn04179913 sewing machine\nn03602883 joystick\nn04372370 switch, electric switch, electrical switch\nn03532672 hook, claw\nn02974003 car wheel\nn03874293 paddlewheel, paddle wheel\nn03944341 pinwheel\nn03992509 potter's wheel\nn03425413 gas pump, gasoline pump, petrol pump, island dispenser\nn02966193 carousel, carrousel, merry-go-round, roundabout, whirligig\nn04371774 swing\nn04067472 reel\nn04040759 radiator\nn04019541 puck, hockey puck\nn03492542 hard disc, hard disk, fixed disk\nn04355933 sunglass\nn03929660 pick, plectrum, plectron\nn02965783 car mirror\nn04258138 solar dish, solar collector, solar furnace\nn04074963 remote control, remote\nn03208938 disk brake, disc brake\nn02910353 buckle\nn03476684 hair slide\nn03627232 knot\nn03075370 combination lock\nn03874599 padlock\nn03804744 nail\nn04127249 safety pin\nn04153751 screw\nn03803284 muzzle\nn04162706 seat belt, seatbelt\nn04228054 ski\nn02948072 candle, taper, wax light\nn03590841 jack-o'-lantern\nn04286575 spotlight, spot\nn04456115 torch\nn03814639 neck brace\nn03933933 pier\nn04485082 tripod\nn03733131 maypole\nn03794056 mousetrap\nn04275548 spider web, spider's web\nn01768244 trilobite\nn01770081 harvestman, daddy longlegs, Phalangium opilio\nn01770393 scorpion\nn01773157 black and gold garden spider, Argiope aurantia\nn01773549 barn spider, Araneus cavaticus\nn01773797 garden spider, Aranea diademata\nn01774384 black widow, Latrodectus mactans\nn01774750 tarantula\nn01775062 wolf spider, hunting spider\nn01776313 tick\nn01784675 centipede\nn01990800 isopod\nn01978287 Dungeness crab, Cancer magister\nn01978455 rock crab, Cancer irroratus\nn01980166 fiddler crab\nn01981276 king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica\nn01983481 American lobster, Northern lobster, Maine lobster, Homarus americanus\nn01984695 spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish\nn01985128 crayfish, crawfish, crawdad, crawdaddy\nn01986214 hermit crab\nn02165105 tiger beetle\nn02165456 ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle\nn02167151 ground beetle, carabid beetle\nn02168699 long-horned beetle, longicorn, longicorn beetle\nn02169497 leaf beetle, chrysomelid\nn02172182 dung beetle\nn02174001 rhinoceros beetle\nn02177972 weevil\nn02190166 fly\nn02206856 bee\nn02226429 grasshopper, hopper\nn02229544 cricket\nn02231487 walking stick, walkingstick, stick insect\nn02233338 cockroach, roach\nn02236044 mantis, mantid\nn02256656 cicada, cicala\nn02259212 leafhopper\nn02264363 lacewing, lacewing fly\nn02268443 dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk\nn02268853 damselfly\nn02276258 admiral\nn02277742 ringlet, ringlet butterfly\nn02279972 monarch, monarch butterfly, milkweed butterfly, Danaus plexippus\nn02280649 cabbage butterfly\nn02281406 sulphur butterfly, sulfur butterfly\nn02281787 lycaenid, lycaenid butterfly\nn01910747 jellyfish\nn01914609 sea anemone, anemone\nn01917289 brain coral\nn01924916 flatworm, platyhelminth\nn01930112 nematode, nematode worm, roundworm\nn01943899 conch\nn01944390 snail\nn01945685 slug\nn01950731 sea slug, nudibranch\nn01955084 chiton, coat-of-mail shell, sea cradle, polyplacophore\nn02319095 sea urchin\nn02321529 sea cucumber, holothurian\nn03584829 iron, smoothing iron\nn03297495 espresso maker\nn03761084 microwave, microwave oven\nn03259280 Dutch oven\nn04111531 rotisserie\nn04442312 toaster\nn04542943 waffle iron\nn04517823 vacuum, vacuum cleaner\nn03207941 dishwasher, dish washer, dishwashing machine\nn04070727 refrigerator, icebox\nn04554684 washer, automatic washer, washing machine\nn03133878 Crock Pot\nn03400231 frying pan, frypan, skillet\nn04596742 wok\nn02939185 caldron, cauldron\nn03063689 coffeepot\nn04398044 teapot\nn04270147 spatula\nn02699494 altar\nn04486054 triumphal arch\nn03899768 patio, terrace\nn04311004 steel arch bridge\nn04366367 suspension bridge\nn04532670 viaduct\nn02793495 barn\nn03457902 greenhouse, nursery, glasshouse\nn03877845 palace\nn03781244 monastery\nn03661043 library\nn02727426 apiary, bee house\nn02859443 boathouse\nn03028079 church, church building\nn03788195 mosque\nn04346328 stupa, tope\nn03956157 planetarium\nn04081281 restaurant, eating house, eating place, eatery\nn03032252 cinema, movie theater, movie theatre, movie house, picture palace\nn03529860 home theater, home theatre\nn03697007 lumbermill, sawmill\nn03065424 coil, spiral, volute, whorl, helix\nn03837869 obelisk\nn04458633 totem pole\nn02980441 castle\nn04005630 prison, prison house\nn03461385 grocery store, grocery, food market, market\nn02776631 bakery, bakeshop, bakehouse\nn02791270 barbershop\nn02871525 bookshop, bookstore, bookstall\nn02927161 butcher shop, meat market\nn03089624 confectionery, confectionary, candy store\nn04200800 shoe shop, shoe-shop, shoe store\nn04443257 tobacco shop, tobacconist shop, tobacconist\nn04462240 toyshop\nn03388043 fountain\nn03042490 cliff dwelling\nn04613696 yurt\nn03216828 dock, dockage, docking facility\nn02892201 brass, memorial tablet, plaque\nn03743016 megalith, megalithic structure\nn02788148 bannister, banister, balustrade, balusters, handrail\nn02894605 breakwater, groin, groyne, mole, bulwark, seawall, jetty\nn03160309 dam, dike, dyke\nn03000134 chainlink fence\nn03930313 picket fence, paling\nn04604644 worm fence, snake fence, snake-rail fence, Virginia fence\nn04326547 stone wall\nn03459775 grille, radiator grille\nn04239074 sliding door\nn04501370 turnstile\nn03792972 mountain tent\nn04149813 scoreboard\nn03530642 honeycomb\nn03961711 plate rack\nn03903868 pedestal, plinth, footstall\nn02814860 beacon, lighthouse, beacon light, pharos\nn07711569 mashed potato\nn07720875 bell pepper\nn07714571 head cabbage\nn07714990 broccoli\nn07715103 cauliflower\nn07716358 zucchini, courgette\nn07716906 spaghetti squash\nn07717410 acorn squash\nn07717556 butternut squash\nn07718472 cucumber, cuke\nn07718747 artichoke, globe artichoke\nn07730033 cardoon\nn07734744 mushroom\nn04209239 shower curtain\nn03594734 jean, blue jean, denim\nn02971356 carton\nn03485794 handkerchief, hankie, hanky, hankey\nn04133789 sandal\nn02747177 ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin\nn04125021 safe\nn07579787 plate\nn03814906 necklace\nn03134739 croquet ball\nn03404251 fur coat\nn04423845 thimble\nn03877472 pajama, pyjama, pj's, jammies\nn04120489 running shoe\nn03062245 cocktail shaker\nn03014705 chest\nn03717622 manhole cover\nn03777754 modem\nn04493381 tub, vat\nn04476259 tray\nn02777292 balance beam, beam\nn07693725 bagel, beigel\nn03998194 prayer rug, prayer mat\nn03617480 kimono\nn07590611 hot pot, hotpot\nn04579145 whiskey jug\nn03623198 knee pad\nn07248320 book jacket, dust cover, dust jacket, dust wrapper\nn04277352 spindle\nn04229816 ski mask\nn02823428 beer bottle\nn03127747 crash helmet\nn02877765 bottlecap\nn04435653 tile roof\nn03724870 mask\nn03710637 maillot\nn03920288 Petri dish\nn03379051 football helmet\nn02807133 bathing cap, swimming cap\nn04399382 teddy, teddy bear\nn03527444 holster\nn03983396 pop bottle, soda bottle\nn03924679 photocopier\nn04532106 vestment\nn06785654 crossword puzzle, crossword\nn03445777 golf ball\nn07613480 trifle\nn04350905 suit, suit of clothes\nn04562935 water tower\nn03325584 feather boa, boa\nn03045698 cloak\nn07892512 red wine\nn03250847 drumstick\nn04192698 shield, buckler\nn03026506 Christmas stocking\nn03534580 hoopskirt, crinoline\nn07565083 menu\nn04296562 stage\nn02869837 bonnet, poke bonnet\nn07871810 meat loaf, meatloaf\nn02799071 baseball\nn03314780 face powder\nn04141327 scabbard\nn04357314 sunscreen, sunblock, sun blocker\nn02823750 beer glass\nn13052670 hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa\nn07583066 guacamole\nn03637318 lampshade, lamp shade\nn04599235 wool, woolen, woollen\nn07802026 hay\nn02883205 bow tie, bow-tie, bowtie\nn03709823 mailbag, postbag\nn04560804 water jug\nn02909870 bucket, pail\nn03207743 dishrag, dishcloth\nn04263257 soup bowl\nn07932039 eggnog\nn03786901 mortar\nn04479046 trench coat\nn03873416 paddle, boat paddle\nn02999410 chain\nn04367480 swab, swob, mop\nn03775546 mixing bowl\nn07875152 potpie\nn04591713 wine bottle\nn04201297 shoji\nn02916936 bulletproof vest\nn03240683 drilling platform, offshore rig\nn02840245 binder, ring-binder\nn02963159 cardigan\nn04370456 sweatshirt\nn03991062 pot, flowerpot\nn02843684 birdhouse\nn03482405 hamper\nn03942813 ping-pong ball\nn03908618 pencil box, pencil case\nn03902125 pay-phone, pay-station\nn07584110 consomme\nn02730930 apron\nn04023962 punching bag, punch bag, punching ball, punchball\nn02769748 backpack, back pack, knapsack, packsack, rucksack, haversack\nn10148035 groom, bridegroom\nn02817516 bearskin, busby, shako\nn03908714 pencil sharpener\nn02906734 broom\nn03788365 mosquito net\nn02667093 abaya\nn03787032 mortarboard\nn03980874 poncho\nn03141823 crutch\nn03976467 Polaroid camera, Polaroid Land camera\nn04264628 space bar\nn07930864 cup\nn04039381 racket, racquet\nn06874185 traffic light, traffic signal, stoplight\nn04033901 quill, quill pen\nn04041544 radio, wireless\nn07860988 dough\nn03146219 cuirass\nn03763968 military uniform\nn03676483 lipstick, lip rouge\nn04209133 shower cap\nn03782006 monitor\nn03857828 oscilloscope, scope, cathode-ray oscilloscope, CRO\nn03775071 mitten\nn02892767 brassiere, bra, bandeau\nn07684084 French loaf\nn04522168 vase\nn03764736 milk can\nn04118538 rugby ball\nn03887697 paper towel\nn13044778 earthstar\nn03291819 envelope\nn03770439 miniskirt, mini\nn03124170 cowboy hat, ten-gallon hat\nn04487081 trolleybus, trolley coach, trackless trolley\nn03916031 perfume, essence\nn02808440 bathtub, bathing tub, bath, tub\nn07697537 hotdog, hot dog, red hot\nn12985857 coral fungus\nn02917067 bullet train, bullet\nn03938244 pillow\nn15075141 toilet tissue, toilet paper, bathroom tissue\nn02978881 cassette\nn02966687 carpenter's kit, tool kit\nn03633091 ladle\nn13040303 stinkhorn, carrion fungus\nn03690938 lotion\nn03476991 hair spray\nn02669723 academic gown, academic robe, judge's robe\nn03220513 dome\nn03127925 crate\nn04584207 wig\nn07880968 burrito\nn03937543 pill bottle\nn03000247 chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour\nn04418357 theater curtain, theatre curtain\nn04590129 window shade\nn02795169 barrel, cask\nn04553703 washbasin, handbasin, washbowl, lavabo, wash-hand basin\nn02783161 ballpoint, ballpoint pen, ballpen, Biro\nn02802426 basketball\nn02808304 bath towel\nn03124043 cowboy boot\nn03450230 gown\nn04589890 window screen\nn12998815 agaric\nn02992529 cellular telephone, cellular phone, cellphone, cell, mobile phone\nn03825788 nipple\nn02790996 barbell\nn03710193 mailbox, letter box\nn03630383 lab coat, laboratory coat\nn03347037 fire screen, fireguard\nn03769881 minibus\nn03871628 packet\nn03733281 maze, labyrinth\nn03976657 pole\nn03535780 horizontal bar, high bar\nn04259630 sombrero\nn03929855 pickelhaube\nn04049303 rain barrel\nn04548362 wallet, billfold, notecase, pocketbook\nn02979186 cassette player\nn06596364 comic book\nn03935335 piggy bank, penny bank\nn06794110 street sign\nn02825657 bell cote, bell cot\nn03388183 fountain pen\nn04591157 Windsor tie\nn04540053 volleyball\nn03866082 overskirt\nn04136333 sarong\nn04026417 purse\nn02865351 bolo tie, bolo, bola tie, bola\nn02834397 bib\nn03888257 parachute, chute\nn04235860 sleeping bag\nn04404412 television, television system\nn04371430 swimming trunks, bathing trunks\nn03733805 measuring cup\nn07920052 espresso\nn07873807 pizza, pizza pie\nn02895154 breastplate, aegis, egis\nn04204238 shopping basket\nn04597913 wooden spoon\nn04131690 saltshaker, salt shaker\nn07836838 chocolate sauce, chocolate syrup\nn09835506 ballplayer, baseball player\nn03443371 goblet\nn13037406 gyromitra\nn04336792 stretcher\nn04557648 water bottle\nn03187595 dial telephone, dial phone\nn04254120 soap dispenser\nn03595614 jersey, T-shirt, tee shirt\nn04146614 school bus\nn03598930 jigsaw puzzle\nn03958227 plastic bag\nn04069434 reflex camera\nn03188531 diaper, nappy, napkin\nn02786058 Band Aid\nn07615774 ice lolly, lolly, lollipop, popsicle\nn04525038 velvet\nn04409515 tennis ball\nn03424325 gasmask, respirator, gas helmet\nn03223299 doormat, welcome mat\nn03680355 Loafer\nn07614500 ice cream, icecream\nn07695742 pretzel\nn04033995 quilt, comforter, comfort, puff\nn03710721 maillot, tank suit\nn04392985 tape player\nn03047690 clog, geta, patten, sabot\nn03584254 iPod\nn13054560 bolete\nn10565667 scuba diver\nn03950228 pitcher, ewer\nn03729826 matchstick\nn02837789 bikini, two-piece\nn04254777 sock\nn02988304 CD player\nn03657121 lens cap, lens cover\nn04417672 thatch, thatched roof\nn04523525 vault\nn02815834 beaker\nn09229709 bubble\nn07697313 cheeseburger\nn03888605 parallel bars, bars\nn03355925 flagpole, flagstaff\nn03063599 coffee mug\nn04116512 rubber eraser, rubber, pencil eraser\nn04325704 stole\nn07831146 carbonara\nn03255030 dumbbell\nn00001740 entity\nn00001930 physical entity\nn00020827 matter\nn00020090 substance\nn00021265 food, nutrient\nn07566340 foodstuff, food product\nn07566863 starches\nn07710616 potato, white potato, Irish potato, murphy, spud, tater\nn07679356 bread, breadstuff, staff of life\nn07683786 loaf of bread, loaf\nn07681926 cracker\nn07680932 bun, roll\nn07809096 ingredient, fixings\nn07809368 flavorer, flavourer, flavoring, flavouring, seasoner, seasoning\nn07810907 condiment\nn07582609 dip\nn07829412 sauce\nn07838233 spaghetti sauce, pasta sauce\nn07882497 concoction, mixture, intermixture\nn07560652 fare\nn07570720 nutriment, nourishment, nutrition, sustenance, aliment, alimentation, victuals\nn07557434 dish\nn07588947 stew\nn07583197 soup\nn07712382 snack food\nn07695965 sandwich\nn07697100 hamburger, beefburger, burger\nn07556970 course\nn07579575 entree, main course\nn07609840 dessert, sweet, afters\nn07611358 frozen dessert\nn07612996 pudding, pud\nn07800091 feed, provender\nn07800740 fodder\nn07881800 beverage, drink, drinkable, potable\nn07929519 coffee, java\nn07884567 alcohol, alcoholic drink, alcoholic beverage, intoxicant, inebriant\nn07891726 wine, vino\nn07911371 mixed drink\nn07930554 punch\nn14778436 agent\nn03247620 drug\nn03248958 drug of abuse, street drug\nn03740161 medicine, medication, medicament, medicinal drug\nn03994008 powder\nn00019613 substance\nn14580897 material, stuff\nn14974264 paper\nn15074962 tissue, tissue paper\nn14939900 fluid\nn14940386 liquid\nn15046900 solid\nn07555863 food, solid food\nn07622061 baked goods\nn07705711 produce, green goods, green groceries, garden truck\nn07705931 edible fruit\nn07739125 apple\nn07739506 eating apple, dessert apple\nn07742704 berry\nn07747055 citrus, citrus fruit, citrous fruit\nn07707451 vegetable, veggie, veg\nn07710007 solanaceous vegetable\nn07720442 pepper\nn07720615 sweet pepper\nn07710283 root vegetable\nn07713395 cruciferous vegetable\nn07713895 cabbage, chou\nn07715561 squash\nn07715721 summer squash\nn07717070 winter squash\nn00007347 causal agent, cause, causal agency\nn00007846 person, individual, someone, somebody, mortal, soul\nn09613191 contestant\nn10439851 player, participant\nn09820263 athlete, jock\nn10072708 explorer, adventurer\nn10019552 diver, frogman, underwater diver\nn09626238 peer, equal, match, compeer\nn09816771 associate\nn10401829 participant\nn00002684 object, physical object\nn09287968 geological formation, formation\nn09366017 natural depression, depression\nn09366317 natural elevation, elevation\nn09359803 mountain, mount\nn09409512 ridge\nn09214060 bar\nn09406793 reef\nn09433442 shore\nn09443453 spring, fountain, outflow, outpouring, natural spring\nn00027167 location\nn08620061 point\nn08578706 geographic point, geographical point\nn04602044 workplace, work\nn00003553 whole, unit\nn00019128 natural object\nn09349797 mechanism\nn09214581 barrier\nn13086908 plant part, plant structure\nn13087625 plant organ\nn11675842 reproductive structure\nn13134947 fruit\nn13135832 seed\nn11689483 oilseed, oil-rich seed\nn12156819 grain, caryopsis\nn12157056 kernel\nn13138842 pome, false fruit\nn00004258 living thing, animate thing\nn00004475 organism, being\nn00017222 plant, flora, plant life\nn13083586 vascular plant, tracheophyte\nn11552386 spermatophyte, phanerogam, seed plant\nn11665372 angiosperm, flowering plant\nn11669921 flower\nn12041446 orchid, orchidaceous plant\nn12056217 lady's slipper, lady-slipper, ladies' slipper, slipper orchid\nn12992868 fungus\nn12997654 basidiomycete, basidiomycetous fungi\nn00015388 animal, animate being, beast, brute, creature, fauna\nn01905661 invertebrate\nn01767661 arthropod\nn01769347 arachnid, arachnoid\nn01772222 spider\nn01776192 acarine\nn01974773 crustacean\nn01975687 malacostracan crustacean\nn01976146 decapod crustacean, decapod\nn01976957 crab\nn01982650 lobster\nn01983048 true lobster\nn02159955 insect\nn02164464 beetle\nn02171453 lamellicorn beetle\nn02171869 scarabaeid beetle, scarabaeid, scarabaean\nn02188699 dipterous insect, two-winged insects, dipteran, dipteron\nn02206270 hymenopterous insect, hymenopteran, hymenopteron, hymenopter\nn02226183 orthopterous insect, orthopteron, orthopteran\nn02231052 phasmid, phasmid insect\nn02232951 dictyopterous insect\nn02246011 homopterous insect, homopteran\nn02263378 neuropteron, neuropteran, neuropterous insect\nn02268148 odonate\nn02274024 lepidopterous insect, lepidopteron, lepidopteran\nn02274259 butterfly\nn02274822 nymphalid, nymphalid butterfly, brush-footed butterfly, four-footed butterfly\nn02279637 danaid, danaid butterfly\nn02280458 pierid, pierid butterfly\nn01909422 coelenterate, cnidarian\nn01914163 anthozoan, actinozoan\nn01915811 coral\nn01916925 stony coral, madrepore, madriporian coral\nn01922303 worm\nn01940736 mollusk, mollusc, shellfish\nn01942177 gastropod, univalve\nn01968315 cephalopod, cephalopod mollusk\nn02316707 echinoderm\nn01317541 domestic animal, domesticated animal\nn02121808 domestic cat, house cat, Felis domesticus, Felis catus\nn02084071 dog, domestic dog, Canis familiaris\nn02087122 hunting dog\nn02098550 sporting dog, gun dog\nn02099997 pointer, Spanish pointer\nn02100399 setter\nn02101108 spaniel\nn02102605 water spaniel\nn02101861 springer spaniel, springer\nn02099029 retriever\nn02092468 terrier\nn02095050 fox terrier\nn02095412 wirehair, wirehaired terrier, wire-haired terrier\nn02095727 Welsh terrier\nn02096756 schnauzer\nn02093056 bullterrier, bull terrier\nn02087551 hound, hound dog\nn02090475 wolfhound\nn02088839 coonhound\nn02089555 foxhound\nn02090827 greyhound\nn02112826 corgi, Welsh corgi\nn02113335 poodle, poodle dog\nn02112497 griffon, Brussels griffon, Belgian griffon\nn02103406 working dog\nn02104523 shepherd dog, sheepdog, sheep dog\nn02104882 Belgian sheepdog, Belgian shepherd\nn02103841 watchdog, guard dog\nn02106966 pinscher\nn02109811 sled dog, sledge dog\nn02107420 Sennenhunde\nn02108254 mastiff\nn02108672 bulldog, English bulldog\nn02111626 spitz\nn02085374 toy dog, toy\nn02086346 toy spaniel\nn02086478 English toy spaniel\nn02152991 game\nn02153203 game bird\nn01795088 grouse\nn01802721 phasianid\nn01803078 pheasant\nn01805801 peafowl, bird of Juno\nn02384858 racer\nn01466257 chordate\nn01471682 vertebrate, craniate\nn01861778 mammal, mammalian\nn01886756 placental, placental mammal, eutherian, eutherian mammal\nn02323449 lagomorph, gnawing mammal\nn02323902 leporid, leporid mammal\nn02324045 rabbit, coney, cony\nn02469914 primate\nn02496913 lemur\nn02470325 ape\nn02470899 anthropoid ape\nn02480153 great ape, pongid\nn02483092 lesser ape\nn02484322 monkey\nn02489589 New World monkey, platyrrhine, platyrrhinian\nn02484473 Old World monkey, catarrhine\nn02503127 proboscidean, proboscidian\nn02503517 elephant\nn02075296 carnivore\nn02131653 bear\nn02441326 musteline mammal, mustelid, musteline\nn02507649 procyonid\nn02134971 viverrine, viverrine mammal\nn02083346 canine, canid\nn02115335 wild dog\nn02118333 fox\nn02114100 wolf\nn02120997 feline, felid\nn02121620 cat, true cat\nn02124623 wildcat\nn02127808 big cat, cat\nn02062017 aquatic mammal\nn02073250 sea cow, sirenian mammal, sirenian\nn02075927 pinniped mammal, pinniped, pinnatiped\nn02076196 seal\nn02076779 eared seal\nn02062430 cetacean, cetacean mammal, blower\nn02062744 whale\nn02066707 toothed whale\nn02068974 dolphin\nn02063224 baleen whale, whalebone whale\nn02370806 ungulate, hoofed mammal\nn02373336 odd-toed ungulate, perissodactyl, perissodactyl mammal\nn02374149 equine, equid\nn02374451 horse, Equus caballus\nn02394477 even-toed ungulate, artiodactyl, artiodactyl mammal\nn02437136 camel\nn02399000 ruminant\nn02401031 bovid\nn02411705 sheep\nn02419796 antelope\nn02414578 wild sheep\nn02415435 mountain sheep\nn02416519 goat, caprine animal\nn02417534 wild goat\nn02407959 Old World buffalo, buffalo\nn02402010 bovine\nn02402425 cattle, cows, kine, oxen, Bos taurus\nn02395003 swine\nn02453611 edentate\nn02456962 sloth, tree sloth\nn02453108 pachyderm\nn02329401 rodent, gnawer\nn02364520 cavy\nn02355227 squirrel\nn02355477 tree squirrel\nn01871543 prototherian\nn01871875 monotreme, egg-laying mammal\nn01873982 metatherian\nn01874434 marsupial, pouched mammal\nn01881171 phalanger, opossum, possum\nn01877134 kangaroo\nn01503061 bird\nn01517565 ratite, ratite bird, flightless bird\nn01524359 passerine, passeriform bird\nn01525720 oscine, oscine bird\nn01529672 finch\nn01537134 bunting\nn01557185 thrush\nn01560105 nightingale, Luscinia megarhynchos\nn01578575 corvine bird\nn01591697 titmouse, tit\nn01604330 bird of prey, raptor, raptorial bird\nn01605630 hawk\nn01613294 eagle, bird of Jove\nn01621127 owl, bird of Minerva, bird of night, hooter\nn01789386 gallinaceous bird, gallinacean\nn01816887 parrot\nn01819115 cockatoo\nn01820348 lory\nn01822602 cuculiform bird\nn01823013 cuckoo\nn01825930 coraciiform bird\nn01831712 apodiform bird\nn01838038 piciform bird\nn01844917 aquatic bird\nn01845132 waterfowl, water bird, waterbird\nn01845477 anseriform bird\nn01846331 duck\nn01852861 sea duck\nn01854415 merganser, fish duck, sawbill, sheldrake\nn01858441 swan\nn02000954 wading bird, wader\nn02002075 stork\nn02008041 heron\nn02008796 egret\nn02014941 rail\nn02018027 coot\nn02022684 shorebird, shore bird, limicoline bird\nn02023341 plover\nn02025043 turnstone\nn02026059 sandpiper\nn02031934 snipe\nn02016358 gallinule, marsh hen, water hen, swamphen\nn02016956 purple gallinule\nn02021795 seabird, sea bird, seafowl\nn02051474 pelecaniform seabird\nn02055658 sphenisciform seabird\nn02055803 penguin\nn02057731 pelagic bird, oceanic bird\nn01661091 reptile, reptilian\nn01661592 anapsid, anapsid reptile\nn01662622 chelonian, chelonian reptile\nn01662784 turtle\nn01663401 sea turtle, marine turtle\nn01661818 diapsid, diapsid reptile\nn01674216 saurian\nn01674464 lizard\nn01674990 gecko\nn01676755 iguanid, iguanid lizard\nn01685439 teiid lizard, teiid\nn01687665 agamid, agamid lizard\nn01689411 anguid lizard\nn01691951 venomous lizard\nn01692864 lacertid lizard, lacertid\nn01693783 chameleon, chamaeleon\nn01694709 monitor, monitor lizard, varan\nn01695681 archosaur, archosaurian, archosaurian reptile\nn01699831 dinosaur\nn01700470 ornithischian, ornithischian dinosaur\nn01703569 ceratopsian, horned dinosaur\nn01696633 crocodilian reptile, crocodilian\nn01698434 alligator, gator\nn01697178 crocodile\nn01726692 snake, serpent, ophidian\nn01727646 colubrid snake, colubrid\nn01741562 constrictor\nn01741943 boa\nn01743605 python\nn01745125 elapid, elapid snake\nn01747885 cobra\nn01749582 mamba\nn01749742 black mamba, Dendroaspis augusticeps\nn01752165 viper\nn01753959 pit viper\nn01754876 rattlesnake, rattler\nn01627424 amphibian\nn01639765 frog, toad, toad frog, anuran, batrachian, salientian\nn01640846 true frog, ranid\nn01629276 salamander\nn01630284 newt, triton\nn01632047 ambystomid, ambystomid salamander\nn01473806 aquatic vertebrate\nn02512053 fish\nn01480516 cartilaginous fish, chondrichthian\nn01482071 elasmobranch, selachian\nn01482330 shark\nn01483522 mackerel shark\nn01488918 requiem shark\nn01495701 ray\nn02512938 food fish\nn02534734 salmon\nn02514825 bony fish\nn02528163 teleost fish, teleost, teleostan\nn01428580 soft-finned fish, malacopterygian\nn01438208 cypriniform fish\nn01439121 cyprinid, cyprinid fish\nn02534559 salmonid\nn02552171 spiny-finned fish, acanthopterygian\nn02554730 percoid fish, percoid, percoidean\nn02605316 butterfly fish\nn02606384 damselfish, demoiselle\nn02642107 scorpaenoid, scorpaenoid fish\nn02642644 scorpaenid, scorpaenid fish\nn02652668 plectognath, plectognath fish\nn02638596 ganoid, ganoid fish\nn00021939 artifact, artefact\nn03575240 instrumentality, instrumentation\nn03183080 device\nn03800933 musical instrument, instrument\nn03614532 keyboard instrument\nn03928116 piano, pianoforte, forte-piano\nn03915437 percussion instrument, percussive instrument\nn04338517 stringed instrument\nn02880546 bowed stringed instrument, string\nn03025886 chordophone\nn03467517 guitar\nn04586932 wind instrument, wind\nn02891788 brass, brass instrument\nn03393324 free-reed instrument\nn03945615 pipe\nn04598582 woodwind, woodwind instrument, wood\nn02817799 beating-reed instrument, reed instrument, reed\nn03228016 double-reed instrument, double reed\nn04222847 single-reed instrument, single-reed woodwind\nn02676261 acoustic device\nn02688443 airfoil, aerofoil, control surface, surface\nn02730265 applicator, applier\nn02855089 blower\nn02895606 breathing device, breathing apparatus, breathing machine, ventilator\nn03269401 electrical device\nn04470953 transducer\nn03274561 electro-acoustic transducer\nn03277771 electronic device\nn03211117 display, video display\nn03320046 fan\nn03339643 filter\nn03508101 heater, warmer\nn03574816 instrument\nn03575691 instrument of execution\nn03733925 measuring instrument, measuring system, measuring device\nn03735637 measuring stick, measure, measuring rod\nn03753077 meter\nn04437953 timepiece, timekeeper, horologe\nn04555897 watch, ticker\nn03046257 clock\nn04134632 sandglass\nn04438304 timer\nn03739693 medical instrument\nn03813176 navigational instrument\nn03080497 compass\nn03852280 optical instrument\nn04272054 spectacles, specs, eyeglasses, glasses\nn04147495 scientific instrument\nn03709206 magnifier\nn03760671 microscope\nn03667829 light microscope\nn03484931 hand glass, simple microscope, magnifying glass\nn04403638 telescope, scope\nn02751295 astronomical telescope\nn04565375 weapon, arm, weapon system\nn03467984 gun\nn03343853 firearm, piece, small-arm\nn02759963 autoloader, self-loader\nn02760429 automatic firearm, automatic gun, automatic weapon\nn02760855 automatic rifle, automatic, machine rifle\nn03701391 machine gun\nn03948459 pistol, handgun, side arm, shooting iron\nn03614007 keyboard\nn03664675 lifting device\nn03699975 machine\nn03997484 power tool\nn03996145 power saw, saw, sawing machine\nn02938886 calculator, calculating machine\nn03082979 computer, computing machine, computing device, data processor, electronic computer, information processing system\nn02708224 analog computer, analogue computer\nn03196324 digital computer\nn03918480 personal computer, PC, microcomputer\nn03985232 portable computer\nn03322940 farm machine\nn04004475 printer, printing machine\nn04243941 slot machine, coin machine\nn04417180 textile machine\nn03738472 mechanism\nn03096960 control, controller\nn03736970 mechanical device\nn03700963 machine, simple machine\nn04574999 wheel\nn04021798 pump\nn04088797 ride\nn04586421 winder\nn04110955 rotating mechanism\nn03032811 circle, round\nn03208556 disk, disc\nn03744840 memory device, storage device\nn03706653 magnetic disk, magnetic disc, disk, disc\nn03851341 optical device\nn03656484 lens, lense, lens system\nn03099771 converging lens, convex lens\nn04069276 reflector\nn03773035 mirror\nn04081844 restraint, constraint\nn02889425 brake\nn02891188 brake system, brakes\nn03551084 hydraulic brake, hydraulic brakes\nn03323703 fastener, fastening, holdfast, fixing\nn03043958 clip\nn03682487 lock\nn03940256 pin\nn04125853 safety belt, life belt, safety harness\nn04120093 runner\nn04217718 signaling device\nn04263760 source of illumination\nn03636248 lamp\nn03640988 lantern\nn03665366 light, light source\nn04336034 strengthener, reinforcement\nn02887209 brace\nn04359589 support\nn04038440 rack, stand\nn04341414 structural member\nn04515129 upright, vertical\nn03988170 post\nn04474466 trap\nn04568557 web, entanglement\nn03563967 implement\nn04451818 tool\nn03154446 cutting implement\nn03154073 cutter, cutlery, cutting tool\nn03265032 edge tool\nn02764044 ax, axe\nn03623556 knife\nn03239726 drill\nn03418242 garden tool, lawn tool\nn03489162 hand tool\nn03848348 opener\nn02877962 bottle opener\nn04516672 utensil\nn03621049 kitchen utensil\nn03101986 cooking utensil, cookware\nn03101156 cooker\nn03880531 pan, cooking pan\nn03990474 pot\nn04500060 turner, food turner\nn03039947 cleaning implement, cleaning device, cleaning equipment\nn02908217 brush\nn03294833 eraser\nn04185071 sharpener\nn03837422 oar\nn04100174 rod\nn04608567 writing implement\nn03906997 pen\nn04317420 stick\nn04296261 staff\nn02788689 bar\nn03659292 lever\nn03613592 key\nn04285622 sports implement\nn03094503 container\nn04576211 wheeled vehicle\nn02834778 bicycle, bike, wheel, cycle\nn02959942 car, railcar, railway car, railroad car\nn03484083 handcart, pushcart, cart, go-cart\nn04170037 self-propelled vehicle\nn02740533 armored vehicle, armoured vehicle\nn03684823 locomotive, engine, locomotive engine, railway locomotive\nn03791235 motor vehicle, automotive vehicle\nn02958343 car, auto, automobile, machine, motorcar\nn03790512 motorcycle, bike\nn03769722 minibike, motorbike\nn04490091 truck, motortruck\nn04520170 van\nn03896419 passenger van\nn04464852 tracked vehicle\nn04467099 trailer, house trailer\nn04543158 wagon, waggon\nn02970849 cart\nn02801938 basket, handbasket\nn02773037 bag\nn04284002 spoon\nn04139859 savings bank, coin bank, money box, bank\nn03206908 dish\nn02880940 bowl\nn02839910 bin\nn04183329 shaker\nn04531098 vessel\nn02801525 basin\nn03593526 jar\nn02876657 bottle\nn03603722 jug\nn04388743 tank, storage tank\nn04078574 reservoir\nn03035510 cistern\nn03241496 drinking vessel\nn03797390 mug\nn02946921 can, tin, tin can\nn03438257 glass, drinking glass\nn04060904 receptacle\nn03210683 dispenser\nn03871083 package, parcel\nn02883344 box\nn04340750 strongbox, deedbox\nn03733644 measure\nn02974697 case\nn03294048 equipment\nn04285146 sports equipment\nn03446832 golf equipment\nn02799897 baseball equipment\nn03472232 gymnastic apparatus, exerciser\nn02802721 basketball equipment\nn03134853 croquet equipment\nn04571292 weight, free weight, exercising weight\nn03414162 game equipment\nn03413828 game\nn04028315 puzzle\nn02778669 ball\nn03926148 photographic equipment\nn02942699 camera, photographic camera\nn03430959 gear, paraphernalia, appurtenance\nn03619396 kit, outfit\nn04091097 rig\nn03241093 drill rig, drilling rig, oilrig, oil rig\nn04137444 satellite, artificial satellite, orbiter\nn04264914 spacecraft, ballistic capsule, space vehicle\nn03278248 electronic equipment\nn02757462 audio system, sound system\nn04077430 reproducer\nn04315948 stereo, stereophony, stereo system, stereophonic system\nn03916720 peripheral, computer peripheral, peripheral device\nn03163973 data input device, input device\nn04401088 telephone, phone, telephone set\nn04044498 radiotelephone, radiophone, wireless telephone\nn02727825 apparatus, setup\nn03257586 duplicator, copier\nn04077734 rescue equipment\nn04447443 toiletry, toilet articles\nn03128519 cream, ointment, emollient\nn03113152 cosmetic\nn03714235 makeup, make-up, war paint\nn03100490 conveyance, transport\nn04524313 vehicle\nn03125870 craft\nn02686568 aircraft\nn03510583 heavier-than-air craft\nn02691156 airplane, aeroplane, plane\nn03666917 lighter-than-air craft\nn04530566 vessel, watercraft\nn02858304 boat\nn03790230 motorboat, powerboat\nn04158807 sea boat\nn04244997 small boat\nn04128837 sailing vessel, sailing ship\nn04128499 sailboat, sailing boat\nn04194289 ship\nn02965300 cargo ship, cargo vessel\nn03896103 passenger ship\nn04552696 warship, war vessel, combat ship\nn04348184 submersible, submersible warship\nn03764276 military vehicle\nn04099429 rocket, projectile\nn04235291 sled, sledge, sleigh\nn03678362 litter\nn04019101 public transport\nn04468005 train, railroad train\nn03896233 passenger train\nn02924116 bus, autobus, coach, charabanc, double-decker, jitney, motorbus, motorcoach, omnibus, passenger vehicle\nn03091374 connection, connexion, connector, connecter, connective\nn02755352 attachment, bond\nn03664943 ligament\nn03405265 furnishing\nn03405725 furniture, piece of furniture, article of furniture\nn02766320 baby bed, baby's bed\nn02821943 bedroom furniture\nn02818832 bed\nn02933112 cabinet\nn03015254 chest of drawers, chest, bureau, dresser\nn03636649 lamp\nn03842156 office furniture\nn04161981 seat\nn02828884 bench\nn03001627 chair\nn03002210 chair of state\nn04256520 sofa, couch, lounge\nn03100346 convertible, sofa bed\nn04379243 table\nn04379964 table\nn04549122 wall unit\nn04118021 rug, carpet, carpeting\nn03151077 curtain, drape, drapery, mantle, pall\nn06254669 medium\nn06263609 print media\nn06263369 press, public press\nn06595351 magazine, mag\nn04377057 system\nn03078287 communication system\nn04400289 telecommunication system, telecom system, telecommunication equipment, telecom equipment\nn04341686 structure, construction\nn02733524 arch\nn02735688 area\nn02898711 bridge, span\nn02913152 building, edifice\nn03322570 farm building\nn03544360 house\nn04079244 residence\nn04073948 religious residence, cloister\nn03859280 outbuilding\nn04187547 shed\nn03953416 place of worship, house of prayer, house of God, house of worship\nn04210390 shrine\nn04417809 theater, theatre, house\nn02914991 building complex, complex\nn03956922 plant, works, industrial plant\nn03316406 factory, mill, manufacturing plant, manufactory\nn03074380 column, pillar\nn03171356 defensive structure, defense, defence\nn03385557 fortification, munition\nn03297735 establishment\nn03574555 institution\nn03907654 penal institution, penal facility\nn03111690 correctional institution\nn03953020 place of business, business establishment\nn03748162 mercantile establishment, retail store, sales outlet, outlet\nn03722288 marketplace, market place, mart, market\nn04202417 shop, store\nn03546340 housing, lodging, living accommodations\nn03259505 dwelling, home, domicile, abode, habitation, dwelling house\nn03638321 landing, landing place\nn03743902 memorial, monument\nn03839993 obstruction, obstructor, obstructer, impediment, impedimenta\nn02796623 barrier\nn03327234 fence, fencing\nn04046974 rail fence\nn03454707 grate, grating\nn03795580 movable barrier\nn03221720 door\nn03427296 gate\nn04191595 shelter\nn04411264 tent, collapsible shelter\nn04217882 signboard, sign\nn04361095 supporting structure\nn03391770 framework\nn04038727 rack\nn04360501 support\nn04460130 tower\nn03129123 creation\nn04007894 product, production\nn04599396 work, piece of work\nn06589574 publication\nn04188643 sheet, flat solid\nn02856463 board\nn03959936 plate\nn03309808 fabric, cloth, material, textile\nn03932670 piece of cloth, piece of material\nn04459362 towel\nn03122748 covering\nn04605726 wrapping, wrap, wrapper\nn03590306 jacket\nn04151940 screen, cover, covert, concealment\nn03380867 footwear, footgear\nn04199027 shoe\nn02872752 boot\nn03050026 cloth covering\nn03237639 dressing, medical dressing\nn02785648 bandage, patch\nn02680110 adhesive bandage\nn02820210 bedclothes, bed clothing, bedding\nn03366823 floor cover, floor covering\nn03727837 mat\nn03051540 clothing, article of clothing, vesture, wear, wearable, habiliment\nn02756098 attire, garb, dress\nn03206718 disguise\nn03476083 hairpiece, false hair, postiche\nn04015204 protective garment\nn02671780 accessory, accoutrement, accouterment\nn02827606 belt\nn03859495 outerwear, overclothes\nn03450516 gown, robe\nn03419014 garment\nn04531873 vest, waistcoat\nn04371563 swimsuit, swimwear, bathing suit, swimming costume, bathing costume\nn04489008 trouser, pant\nn04143897 scarf\nn03816005 neckwear\nn03815615 necktie, tie\nn04230808 skirt\nn03863923 overgarment, outer garment\nn03045337 cloak\nn03057021 coat\nn04049405 raincoat, waterproof\nn04370048 sweater, jumper\nn04021028 pullover, slipover\nn04097866 robe\nn04197391 shirt\nn04508163 undergarment, unmentionable\nn03490324 handwear, hand wear\nn03441112 glove\nn03502509 headdress, headgear\nn03513137 helmet\nn02954340 cap\nn03497657 hat, chapeau, lid\nn03381126 footwear\nn03540267 hosiery, hose\nn04434932 tights, leotards\nn04323819 stocking\nn04509592 uniform\nn03825080 nightwear, sleepwear, nightclothes\nn02728440 apparel, wearing apparel, dress, clothes\nn04603872 workwear\nn04596852 woman's clothing\nn03236735 dress, frock\nn04014297 protective covering, protective cover, protection\nn04192858 shield\nn03959701 plate, scale, shell\nn02740764 armor plate, armour plate, armor plating, plate armor, plate armour\nn03513376 helmet\nn02955065 cap\nn04105068 roof\nn02851099 blind, screen\nn04589190 window blind\nn04181718 shade\nn04151581 screen\nn02739668 armor, armour\nn02862048 body armor, body armour, suit of armor, suit of armour, coat of mail, cataphract\nn03725035 mask\nn03314378 face mask\nn04187061 sheath\nn04191943 shelter\nn02951843 canopy\nn04453910 top, cover\nn02954938 cap\nn03873064 padding, cushioning\nn03151500 cushion\nn03076708 commodity, trade good, good\nn03093574 consumer goods\nn03257877 durables, durable goods, consumer durables\nn02729837 appliance\nn03251766 dryer, drier\nn03528263 home appliance, household appliance\nn03620052 kitchen appliance\nn03063338 coffee maker\nn03862676 oven\nn04580493 white goods\nn03252064 drygoods, soft goods\nn04580298 white goods, household linen\nn03672352 linen\nn02807260 bath linen\nn03302121 excavation\nn03982060 pool\nn03169390 decoration, ornament, ornamentation\nn02681518 adornment\nn03597469 jewelry, jewellery\nn03178782 design, pattern, figure\nn03282591 emblem\nn03964744 plaything, toy\nn00022903 article\nn04550840 ware\nn04597804 woodenware\nn04381994 tableware\nn03153375 cutlery, eating utensil\nn03133538 crockery, dishware\nn04362025 surface\nn03536348 horizontal surface, level\nn03961939 platform\nn03892891 part, portion\nn03932203 piece\nn00002137 abstraction, abstract entity\nn00024264 attribute\nn00027807 shape, form\nn13865483 round shape\nn13899200 sphere\nn13899404 ball, globe, orb\nn09289709 globule\nn00033020 communication\nn06793231 sign\nn06791372 signal, signaling, sign\nn06873571 visual signal\nn06874019 light\nn00031921 relation\nn13809207 part, portion, component part, component, constituent\n"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/__init__.py",
    "content": "from .fbresnet import *\nfrom .resnext import *\nfrom .inceptionv4 import *\nfrom .inceptionresnetv2 import *\nfrom .bninception import *\nfrom .torchvision import *\nfrom .nasnet import *"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/bninception.py",
    "content": "import torch\nimport torch.nn as nn\nimport torch.utils.model_zoo as model_zoo\nimport os\nimport sys\n\n__all__ = ['BNInception', 'bninception']\n\npretrained_settings = {\n    'bninception': {\n        'imagenet': {\n            # Was ported using python2 (may trigger warning)\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/bn_inception-9f5701afb96c8044.pth',\n            # 'url': 'http://yjxiong.me/others/bn_inception-9f5701afb96c8044.pth',\n            'input_space': 'BGR',\n            'input_size': [3, 224, 224],\n            'input_range': [0, 255],\n            'mean': [104, 117, 128],\n            'std': [1, 1, 1],\n            'num_classes': 1000\n        }\n    }\n}\n\nclass BNInception(nn.Module):\n\n    def __init__(self, num_classes=1000):\n        super(BNInception, self).__init__()\n        inplace = True\n        self.conv1_7x7_s2 = nn.Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3))\n        self.conv1_7x7_s2_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.conv1_relu_7x7 = nn.ReLU (inplace)\n        self.pool1_3x3_s2 = nn.MaxPool2d ((3, 3), stride=(2, 2), dilation=(1, 1), ceil_mode=True)\n        self.conv2_3x3_reduce = nn.Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.conv2_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.conv2_relu_3x3_reduce = nn.ReLU (inplace)\n        self.conv2_3x3 = nn.Conv2d(64, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.conv2_3x3_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.conv2_relu_3x3 = nn.ReLU (inplace)\n        self.pool2_3x3_s2 = nn.MaxPool2d ((3, 3), stride=(2, 2), dilation=(1, 1), ceil_mode=True)\n        self.inception_3a_1x1 = nn.Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3a_1x1_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3a_relu_1x1 = nn.ReLU (inplace)\n        self.inception_3a_3x3_reduce = nn.Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3a_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3a_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_3a_3x3 = nn.Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_3a_3x3_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3a_relu_3x3 = nn.ReLU (inplace)\n        self.inception_3a_double_3x3_reduce = nn.Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3a_double_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3a_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_3a_double_3x3_1 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_3a_double_3x3_1_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3a_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_3a_double_3x3_2 = nn.Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_3a_double_3x3_2_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3a_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_3a_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True)\n        self.inception_3a_pool_proj = nn.Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3a_pool_proj_bn = nn.BatchNorm2d(32, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3a_relu_pool_proj = nn.ReLU (inplace)\n        self.inception_3b_1x1 = nn.Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3b_1x1_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3b_relu_1x1 = nn.ReLU (inplace)\n        self.inception_3b_3x3_reduce = nn.Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3b_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3b_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_3b_3x3 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_3b_3x3_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3b_relu_3x3 = nn.ReLU (inplace)\n        self.inception_3b_double_3x3_reduce = nn.Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3b_double_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3b_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_3b_double_3x3_1 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_3b_double_3x3_1_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3b_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_3b_double_3x3_2 = nn.Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_3b_double_3x3_2_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3b_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_3b_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True)\n        self.inception_3b_pool_proj = nn.Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3b_pool_proj_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3b_relu_pool_proj = nn.ReLU (inplace)\n        self.inception_3c_3x3_reduce = nn.Conv2d(320, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3c_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3c_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_3c_3x3 = nn.Conv2d(128, 160, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n        self.inception_3c_3x3_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3c_relu_3x3 = nn.ReLU (inplace)\n        self.inception_3c_double_3x3_reduce = nn.Conv2d(320, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_3c_double_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3c_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_3c_double_3x3_1 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_3c_double_3x3_1_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3c_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_3c_double_3x3_2 = nn.Conv2d(96, 96, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n        self.inception_3c_double_3x3_2_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_3c_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_3c_pool = nn.MaxPool2d ((3, 3), stride=(2, 2), dilation=(1, 1), ceil_mode=True)\n        self.inception_4a_1x1 = nn.Conv2d(576, 224, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4a_1x1_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4a_relu_1x1 = nn.ReLU (inplace)\n        self.inception_4a_3x3_reduce = nn.Conv2d(576, 64, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4a_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4a_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4a_3x3 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4a_3x3_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4a_relu_3x3 = nn.ReLU (inplace)\n        self.inception_4a_double_3x3_reduce = nn.Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4a_double_3x3_reduce_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4a_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4a_double_3x3_1 = nn.Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4a_double_3x3_1_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4a_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_4a_double_3x3_2 = nn.Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4a_double_3x3_2_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4a_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_4a_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True)\n        self.inception_4a_pool_proj = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4a_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4a_relu_pool_proj = nn.ReLU (inplace)\n        self.inception_4b_1x1 = nn.Conv2d(576, 192, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4b_1x1_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4b_relu_1x1 = nn.ReLU (inplace)\n        self.inception_4b_3x3_reduce = nn.Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4b_3x3_reduce_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4b_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4b_3x3 = nn.Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4b_3x3_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4b_relu_3x3 = nn.ReLU (inplace)\n        self.inception_4b_double_3x3_reduce = nn.Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4b_double_3x3_reduce_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4b_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4b_double_3x3_1 = nn.Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4b_double_3x3_1_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4b_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_4b_double_3x3_2 = nn.Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4b_double_3x3_2_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4b_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_4b_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True)\n        self.inception_4b_pool_proj = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4b_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4b_relu_pool_proj = nn.ReLU (inplace)\n        self.inception_4c_1x1 = nn.Conv2d(576, 160, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4c_1x1_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4c_relu_1x1 = nn.ReLU (inplace)\n        self.inception_4c_3x3_reduce = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4c_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4c_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4c_3x3 = nn.Conv2d(128, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4c_3x3_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4c_relu_3x3 = nn.ReLU (inplace)\n        self.inception_4c_double_3x3_reduce = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4c_double_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4c_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4c_double_3x3_1 = nn.Conv2d(128, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4c_double_3x3_1_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4c_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_4c_double_3x3_2 = nn.Conv2d(160, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4c_double_3x3_2_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4c_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_4c_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True)\n        self.inception_4c_pool_proj = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4c_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4c_relu_pool_proj = nn.ReLU (inplace)\n        self.inception_4d_1x1 = nn.Conv2d(608, 96, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4d_1x1_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4d_relu_1x1 = nn.ReLU (inplace)\n        self.inception_4d_3x3_reduce = nn.Conv2d(608, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4d_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4d_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4d_3x3 = nn.Conv2d(128, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4d_3x3_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4d_relu_3x3 = nn.ReLU (inplace)\n        self.inception_4d_double_3x3_reduce = nn.Conv2d(608, 160, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4d_double_3x3_reduce_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4d_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4d_double_3x3_1 = nn.Conv2d(160, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4d_double_3x3_1_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4d_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_4d_double_3x3_2 = nn.Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4d_double_3x3_2_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4d_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_4d_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True)\n        self.inception_4d_pool_proj = nn.Conv2d(608, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4d_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4d_relu_pool_proj = nn.ReLU (inplace)\n        self.inception_4e_3x3_reduce = nn.Conv2d(608, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4e_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4e_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4e_3x3 = nn.Conv2d(128, 192, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n        self.inception_4e_3x3_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4e_relu_3x3 = nn.ReLU (inplace)\n        self.inception_4e_double_3x3_reduce = nn.Conv2d(608, 192, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_4e_double_3x3_reduce_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4e_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_4e_double_3x3_1 = nn.Conv2d(192, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_4e_double_3x3_1_bn = nn.BatchNorm2d(256, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4e_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_4e_double_3x3_2 = nn.Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n        self.inception_4e_double_3x3_2_bn = nn.BatchNorm2d(256, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_4e_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_4e_pool = nn.MaxPool2d ((3, 3), stride=(2, 2), dilation=(1, 1), ceil_mode=True)\n        self.inception_5a_1x1 = nn.Conv2d(1056, 352, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_5a_1x1_bn = nn.BatchNorm2d(352, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5a_relu_1x1 = nn.ReLU (inplace)\n        self.inception_5a_3x3_reduce = nn.Conv2d(1056, 192, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_5a_3x3_reduce_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5a_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_5a_3x3 = nn.Conv2d(192, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_5a_3x3_bn = nn.BatchNorm2d(320, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5a_relu_3x3 = nn.ReLU (inplace)\n        self.inception_5a_double_3x3_reduce = nn.Conv2d(1056, 160, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_5a_double_3x3_reduce_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5a_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_5a_double_3x3_1 = nn.Conv2d(160, 224, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_5a_double_3x3_1_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5a_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_5a_double_3x3_2 = nn.Conv2d(224, 224, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_5a_double_3x3_2_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5a_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_5a_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True)\n        self.inception_5a_pool_proj = nn.Conv2d(1056, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_5a_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5a_relu_pool_proj = nn.ReLU (inplace)\n        self.inception_5b_1x1 = nn.Conv2d(1024, 352, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_5b_1x1_bn = nn.BatchNorm2d(352, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5b_relu_1x1 = nn.ReLU (inplace)\n        self.inception_5b_3x3_reduce = nn.Conv2d(1024, 192, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_5b_3x3_reduce_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5b_relu_3x3_reduce = nn.ReLU (inplace)\n        self.inception_5b_3x3 = nn.Conv2d(192, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_5b_3x3_bn = nn.BatchNorm2d(320, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5b_relu_3x3 = nn.ReLU (inplace)\n        self.inception_5b_double_3x3_reduce = nn.Conv2d(1024, 192, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_5b_double_3x3_reduce_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5b_relu_double_3x3_reduce = nn.ReLU (inplace)\n        self.inception_5b_double_3x3_1 = nn.Conv2d(192, 224, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_5b_double_3x3_1_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5b_relu_double_3x3_1 = nn.ReLU (inplace)\n        self.inception_5b_double_3x3_2 = nn.Conv2d(224, 224, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n        self.inception_5b_double_3x3_2_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5b_relu_double_3x3_2 = nn.ReLU (inplace)\n        self.inception_5b_pool = nn.MaxPool2d ((3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), ceil_mode=True)\n        self.inception_5b_pool_proj = nn.Conv2d(1024, 128, kernel_size=(1, 1), stride=(1, 1))\n        self.inception_5b_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True)\n        self.inception_5b_relu_pool_proj = nn.ReLU (inplace)\n        self.global_pool = nn.AvgPool2d (7, stride=1, padding=0, ceil_mode=True, count_include_pad=True)\n        self.fc = nn.Linear (1024, 1000)\n\n    def features(self, input):\n        conv1_7x7_s2_out = self.conv1_7x7_s2(input)\n        conv1_7x7_s2_bn_out = self.conv1_7x7_s2_bn(conv1_7x7_s2_out)\n        conv1_relu_7x7_out = self.conv1_relu_7x7(conv1_7x7_s2_bn_out)\n        pool1_3x3_s2_out = self.pool1_3x3_s2(conv1_7x7_s2_bn_out)\n        conv2_3x3_reduce_out = self.conv2_3x3_reduce(pool1_3x3_s2_out)\n        conv2_3x3_reduce_bn_out = self.conv2_3x3_reduce_bn(conv2_3x3_reduce_out)\n        conv2_relu_3x3_reduce_out = self.conv2_relu_3x3_reduce(conv2_3x3_reduce_bn_out)\n        conv2_3x3_out = self.conv2_3x3(conv2_3x3_reduce_bn_out)\n        conv2_3x3_bn_out = self.conv2_3x3_bn(conv2_3x3_out)\n        conv2_relu_3x3_out = self.conv2_relu_3x3(conv2_3x3_bn_out)\n        pool2_3x3_s2_out = self.pool2_3x3_s2(conv2_3x3_bn_out)\n        inception_3a_1x1_out = self.inception_3a_1x1(pool2_3x3_s2_out)\n        inception_3a_1x1_bn_out = self.inception_3a_1x1_bn(inception_3a_1x1_out)\n        inception_3a_relu_1x1_out = self.inception_3a_relu_1x1(inception_3a_1x1_bn_out)\n        inception_3a_3x3_reduce_out = self.inception_3a_3x3_reduce(pool2_3x3_s2_out)\n        inception_3a_3x3_reduce_bn_out = self.inception_3a_3x3_reduce_bn(inception_3a_3x3_reduce_out)\n        inception_3a_relu_3x3_reduce_out = self.inception_3a_relu_3x3_reduce(inception_3a_3x3_reduce_bn_out)\n        inception_3a_3x3_out = self.inception_3a_3x3(inception_3a_3x3_reduce_bn_out)\n        inception_3a_3x3_bn_out = self.inception_3a_3x3_bn(inception_3a_3x3_out)\n        inception_3a_relu_3x3_out = self.inception_3a_relu_3x3(inception_3a_3x3_bn_out)\n        inception_3a_double_3x3_reduce_out = self.inception_3a_double_3x3_reduce(pool2_3x3_s2_out)\n        inception_3a_double_3x3_reduce_bn_out = self.inception_3a_double_3x3_reduce_bn(inception_3a_double_3x3_reduce_out)\n        inception_3a_relu_double_3x3_reduce_out = self.inception_3a_relu_double_3x3_reduce(inception_3a_double_3x3_reduce_bn_out)\n        inception_3a_double_3x3_1_out = self.inception_3a_double_3x3_1(inception_3a_double_3x3_reduce_bn_out)\n        inception_3a_double_3x3_1_bn_out = self.inception_3a_double_3x3_1_bn(inception_3a_double_3x3_1_out)\n        inception_3a_relu_double_3x3_1_out = self.inception_3a_relu_double_3x3_1(inception_3a_double_3x3_1_bn_out)\n        inception_3a_double_3x3_2_out = self.inception_3a_double_3x3_2(inception_3a_double_3x3_1_bn_out)\n        inception_3a_double_3x3_2_bn_out = self.inception_3a_double_3x3_2_bn(inception_3a_double_3x3_2_out)\n        inception_3a_relu_double_3x3_2_out = self.inception_3a_relu_double_3x3_2(inception_3a_double_3x3_2_bn_out)\n        inception_3a_pool_out = self.inception_3a_pool(pool2_3x3_s2_out)\n        inception_3a_pool_proj_out = self.inception_3a_pool_proj(inception_3a_pool_out)\n        inception_3a_pool_proj_bn_out = self.inception_3a_pool_proj_bn(inception_3a_pool_proj_out)\n        inception_3a_relu_pool_proj_out = self.inception_3a_relu_pool_proj(inception_3a_pool_proj_bn_out)\n        inception_3a_output_out = torch.cat([inception_3a_1x1_bn_out,inception_3a_3x3_bn_out,inception_3a_double_3x3_2_bn_out,inception_3a_pool_proj_bn_out], 1)\n        inception_3b_1x1_out = self.inception_3b_1x1(inception_3a_output_out)\n        inception_3b_1x1_bn_out = self.inception_3b_1x1_bn(inception_3b_1x1_out)\n        inception_3b_relu_1x1_out = self.inception_3b_relu_1x1(inception_3b_1x1_bn_out)\n        inception_3b_3x3_reduce_out = self.inception_3b_3x3_reduce(inception_3a_output_out)\n        inception_3b_3x3_reduce_bn_out = self.inception_3b_3x3_reduce_bn(inception_3b_3x3_reduce_out)\n        inception_3b_relu_3x3_reduce_out = self.inception_3b_relu_3x3_reduce(inception_3b_3x3_reduce_bn_out)\n        inception_3b_3x3_out = self.inception_3b_3x3(inception_3b_3x3_reduce_bn_out)\n        inception_3b_3x3_bn_out = self.inception_3b_3x3_bn(inception_3b_3x3_out)\n        inception_3b_relu_3x3_out = self.inception_3b_relu_3x3(inception_3b_3x3_bn_out)\n        inception_3b_double_3x3_reduce_out = self.inception_3b_double_3x3_reduce(inception_3a_output_out)\n        inception_3b_double_3x3_reduce_bn_out = self.inception_3b_double_3x3_reduce_bn(inception_3b_double_3x3_reduce_out)\n        inception_3b_relu_double_3x3_reduce_out = self.inception_3b_relu_double_3x3_reduce(inception_3b_double_3x3_reduce_bn_out)\n        inception_3b_double_3x3_1_out = self.inception_3b_double_3x3_1(inception_3b_double_3x3_reduce_bn_out)\n        inception_3b_double_3x3_1_bn_out = self.inception_3b_double_3x3_1_bn(inception_3b_double_3x3_1_out)\n        inception_3b_relu_double_3x3_1_out = self.inception_3b_relu_double_3x3_1(inception_3b_double_3x3_1_bn_out)\n        inception_3b_double_3x3_2_out = self.inception_3b_double_3x3_2(inception_3b_double_3x3_1_bn_out)\n        inception_3b_double_3x3_2_bn_out = self.inception_3b_double_3x3_2_bn(inception_3b_double_3x3_2_out)\n        inception_3b_relu_double_3x3_2_out = self.inception_3b_relu_double_3x3_2(inception_3b_double_3x3_2_bn_out)\n        inception_3b_pool_out = self.inception_3b_pool(inception_3a_output_out)\n        inception_3b_pool_proj_out = self.inception_3b_pool_proj(inception_3b_pool_out)\n        inception_3b_pool_proj_bn_out = self.inception_3b_pool_proj_bn(inception_3b_pool_proj_out)\n        inception_3b_relu_pool_proj_out = self.inception_3b_relu_pool_proj(inception_3b_pool_proj_bn_out)\n        inception_3b_output_out = torch.cat([inception_3b_1x1_bn_out,inception_3b_3x3_bn_out,inception_3b_double_3x3_2_bn_out,inception_3b_pool_proj_bn_out], 1)\n        inception_3c_3x3_reduce_out = self.inception_3c_3x3_reduce(inception_3b_output_out)\n        inception_3c_3x3_reduce_bn_out = self.inception_3c_3x3_reduce_bn(inception_3c_3x3_reduce_out)\n        inception_3c_relu_3x3_reduce_out = self.inception_3c_relu_3x3_reduce(inception_3c_3x3_reduce_bn_out)\n        inception_3c_3x3_out = self.inception_3c_3x3(inception_3c_3x3_reduce_bn_out)\n        inception_3c_3x3_bn_out = self.inception_3c_3x3_bn(inception_3c_3x3_out)\n        inception_3c_relu_3x3_out = self.inception_3c_relu_3x3(inception_3c_3x3_bn_out)\n        inception_3c_double_3x3_reduce_out = self.inception_3c_double_3x3_reduce(inception_3b_output_out)\n        inception_3c_double_3x3_reduce_bn_out = self.inception_3c_double_3x3_reduce_bn(inception_3c_double_3x3_reduce_out)\n        inception_3c_relu_double_3x3_reduce_out = self.inception_3c_relu_double_3x3_reduce(inception_3c_double_3x3_reduce_bn_out)\n        inception_3c_double_3x3_1_out = self.inception_3c_double_3x3_1(inception_3c_double_3x3_reduce_bn_out)\n        inception_3c_double_3x3_1_bn_out = self.inception_3c_double_3x3_1_bn(inception_3c_double_3x3_1_out)\n        inception_3c_relu_double_3x3_1_out = self.inception_3c_relu_double_3x3_1(inception_3c_double_3x3_1_bn_out)\n        inception_3c_double_3x3_2_out = self.inception_3c_double_3x3_2(inception_3c_double_3x3_1_bn_out)\n        inception_3c_double_3x3_2_bn_out = self.inception_3c_double_3x3_2_bn(inception_3c_double_3x3_2_out)\n        inception_3c_relu_double_3x3_2_out = self.inception_3c_relu_double_3x3_2(inception_3c_double_3x3_2_bn_out)\n        inception_3c_pool_out = self.inception_3c_pool(inception_3b_output_out)\n        inception_3c_output_out = torch.cat([inception_3c_3x3_bn_out,inception_3c_double_3x3_2_bn_out,inception_3c_pool_out], 1)\n        inception_4a_1x1_out = self.inception_4a_1x1(inception_3c_output_out)\n        inception_4a_1x1_bn_out = self.inception_4a_1x1_bn(inception_4a_1x1_out)\n        inception_4a_relu_1x1_out = self.inception_4a_relu_1x1(inception_4a_1x1_bn_out)\n        inception_4a_3x3_reduce_out = self.inception_4a_3x3_reduce(inception_3c_output_out)\n        inception_4a_3x3_reduce_bn_out = self.inception_4a_3x3_reduce_bn(inception_4a_3x3_reduce_out)\n        inception_4a_relu_3x3_reduce_out = self.inception_4a_relu_3x3_reduce(inception_4a_3x3_reduce_bn_out)\n        inception_4a_3x3_out = self.inception_4a_3x3(inception_4a_3x3_reduce_bn_out)\n        inception_4a_3x3_bn_out = self.inception_4a_3x3_bn(inception_4a_3x3_out)\n        inception_4a_relu_3x3_out = self.inception_4a_relu_3x3(inception_4a_3x3_bn_out)\n        inception_4a_double_3x3_reduce_out = self.inception_4a_double_3x3_reduce(inception_3c_output_out)\n        inception_4a_double_3x3_reduce_bn_out = self.inception_4a_double_3x3_reduce_bn(inception_4a_double_3x3_reduce_out)\n        inception_4a_relu_double_3x3_reduce_out = self.inception_4a_relu_double_3x3_reduce(inception_4a_double_3x3_reduce_bn_out)\n        inception_4a_double_3x3_1_out = self.inception_4a_double_3x3_1(inception_4a_double_3x3_reduce_bn_out)\n        inception_4a_double_3x3_1_bn_out = self.inception_4a_double_3x3_1_bn(inception_4a_double_3x3_1_out)\n        inception_4a_relu_double_3x3_1_out = self.inception_4a_relu_double_3x3_1(inception_4a_double_3x3_1_bn_out)\n        inception_4a_double_3x3_2_out = self.inception_4a_double_3x3_2(inception_4a_double_3x3_1_bn_out)\n        inception_4a_double_3x3_2_bn_out = self.inception_4a_double_3x3_2_bn(inception_4a_double_3x3_2_out)\n        inception_4a_relu_double_3x3_2_out = self.inception_4a_relu_double_3x3_2(inception_4a_double_3x3_2_bn_out)\n        inception_4a_pool_out = self.inception_4a_pool(inception_3c_output_out)\n        inception_4a_pool_proj_out = self.inception_4a_pool_proj(inception_4a_pool_out)\n        inception_4a_pool_proj_bn_out = self.inception_4a_pool_proj_bn(inception_4a_pool_proj_out)\n        inception_4a_relu_pool_proj_out = self.inception_4a_relu_pool_proj(inception_4a_pool_proj_bn_out)\n        inception_4a_output_out = torch.cat([inception_4a_1x1_bn_out,inception_4a_3x3_bn_out,inception_4a_double_3x3_2_bn_out,inception_4a_pool_proj_bn_out], 1)\n        inception_4b_1x1_out = self.inception_4b_1x1(inception_4a_output_out)\n        inception_4b_1x1_bn_out = self.inception_4b_1x1_bn(inception_4b_1x1_out)\n        inception_4b_relu_1x1_out = self.inception_4b_relu_1x1(inception_4b_1x1_bn_out)\n        inception_4b_3x3_reduce_out = self.inception_4b_3x3_reduce(inception_4a_output_out)\n        inception_4b_3x3_reduce_bn_out = self.inception_4b_3x3_reduce_bn(inception_4b_3x3_reduce_out)\n        inception_4b_relu_3x3_reduce_out = self.inception_4b_relu_3x3_reduce(inception_4b_3x3_reduce_bn_out)\n        inception_4b_3x3_out = self.inception_4b_3x3(inception_4b_3x3_reduce_bn_out)\n        inception_4b_3x3_bn_out = self.inception_4b_3x3_bn(inception_4b_3x3_out)\n        inception_4b_relu_3x3_out = self.inception_4b_relu_3x3(inception_4b_3x3_bn_out)\n        inception_4b_double_3x3_reduce_out = self.inception_4b_double_3x3_reduce(inception_4a_output_out)\n        inception_4b_double_3x3_reduce_bn_out = self.inception_4b_double_3x3_reduce_bn(inception_4b_double_3x3_reduce_out)\n        inception_4b_relu_double_3x3_reduce_out = self.inception_4b_relu_double_3x3_reduce(inception_4b_double_3x3_reduce_bn_out)\n        inception_4b_double_3x3_1_out = self.inception_4b_double_3x3_1(inception_4b_double_3x3_reduce_bn_out)\n        inception_4b_double_3x3_1_bn_out = self.inception_4b_double_3x3_1_bn(inception_4b_double_3x3_1_out)\n        inception_4b_relu_double_3x3_1_out = self.inception_4b_relu_double_3x3_1(inception_4b_double_3x3_1_bn_out)\n        inception_4b_double_3x3_2_out = self.inception_4b_double_3x3_2(inception_4b_double_3x3_1_bn_out)\n        inception_4b_double_3x3_2_bn_out = self.inception_4b_double_3x3_2_bn(inception_4b_double_3x3_2_out)\n        inception_4b_relu_double_3x3_2_out = self.inception_4b_relu_double_3x3_2(inception_4b_double_3x3_2_bn_out)\n        inception_4b_pool_out = self.inception_4b_pool(inception_4a_output_out)\n        inception_4b_pool_proj_out = self.inception_4b_pool_proj(inception_4b_pool_out)\n        inception_4b_pool_proj_bn_out = self.inception_4b_pool_proj_bn(inception_4b_pool_proj_out)\n        inception_4b_relu_pool_proj_out = self.inception_4b_relu_pool_proj(inception_4b_pool_proj_bn_out)\n        inception_4b_output_out = torch.cat([inception_4b_1x1_bn_out,inception_4b_3x3_bn_out,inception_4b_double_3x3_2_bn_out,inception_4b_pool_proj_bn_out], 1)\n        inception_4c_1x1_out = self.inception_4c_1x1(inception_4b_output_out)\n        inception_4c_1x1_bn_out = self.inception_4c_1x1_bn(inception_4c_1x1_out)\n        inception_4c_relu_1x1_out = self.inception_4c_relu_1x1(inception_4c_1x1_bn_out)\n        inception_4c_3x3_reduce_out = self.inception_4c_3x3_reduce(inception_4b_output_out)\n        inception_4c_3x3_reduce_bn_out = self.inception_4c_3x3_reduce_bn(inception_4c_3x3_reduce_out)\n        inception_4c_relu_3x3_reduce_out = self.inception_4c_relu_3x3_reduce(inception_4c_3x3_reduce_bn_out)\n        inception_4c_3x3_out = self.inception_4c_3x3(inception_4c_3x3_reduce_bn_out)\n        inception_4c_3x3_bn_out = self.inception_4c_3x3_bn(inception_4c_3x3_out)\n        inception_4c_relu_3x3_out = self.inception_4c_relu_3x3(inception_4c_3x3_bn_out)\n        inception_4c_double_3x3_reduce_out = self.inception_4c_double_3x3_reduce(inception_4b_output_out)\n        inception_4c_double_3x3_reduce_bn_out = self.inception_4c_double_3x3_reduce_bn(inception_4c_double_3x3_reduce_out)\n        inception_4c_relu_double_3x3_reduce_out = self.inception_4c_relu_double_3x3_reduce(inception_4c_double_3x3_reduce_bn_out)\n        inception_4c_double_3x3_1_out = self.inception_4c_double_3x3_1(inception_4c_double_3x3_reduce_bn_out)\n        inception_4c_double_3x3_1_bn_out = self.inception_4c_double_3x3_1_bn(inception_4c_double_3x3_1_out)\n        inception_4c_relu_double_3x3_1_out = self.inception_4c_relu_double_3x3_1(inception_4c_double_3x3_1_bn_out)\n        inception_4c_double_3x3_2_out = self.inception_4c_double_3x3_2(inception_4c_double_3x3_1_bn_out)\n        inception_4c_double_3x3_2_bn_out = self.inception_4c_double_3x3_2_bn(inception_4c_double_3x3_2_out)\n        inception_4c_relu_double_3x3_2_out = self.inception_4c_relu_double_3x3_2(inception_4c_double_3x3_2_bn_out)\n        inception_4c_pool_out = self.inception_4c_pool(inception_4b_output_out)\n        inception_4c_pool_proj_out = self.inception_4c_pool_proj(inception_4c_pool_out)\n        inception_4c_pool_proj_bn_out = self.inception_4c_pool_proj_bn(inception_4c_pool_proj_out)\n        inception_4c_relu_pool_proj_out = self.inception_4c_relu_pool_proj(inception_4c_pool_proj_bn_out)\n        inception_4c_output_out = torch.cat([inception_4c_1x1_bn_out,inception_4c_3x3_bn_out,inception_4c_double_3x3_2_bn_out,inception_4c_pool_proj_bn_out], 1)\n        inception_4d_1x1_out = self.inception_4d_1x1(inception_4c_output_out)\n        inception_4d_1x1_bn_out = self.inception_4d_1x1_bn(inception_4d_1x1_out)\n        inception_4d_relu_1x1_out = self.inception_4d_relu_1x1(inception_4d_1x1_bn_out)\n        inception_4d_3x3_reduce_out = self.inception_4d_3x3_reduce(inception_4c_output_out)\n        inception_4d_3x3_reduce_bn_out = self.inception_4d_3x3_reduce_bn(inception_4d_3x3_reduce_out)\n        inception_4d_relu_3x3_reduce_out = self.inception_4d_relu_3x3_reduce(inception_4d_3x3_reduce_bn_out)\n        inception_4d_3x3_out = self.inception_4d_3x3(inception_4d_3x3_reduce_bn_out)\n        inception_4d_3x3_bn_out = self.inception_4d_3x3_bn(inception_4d_3x3_out)\n        inception_4d_relu_3x3_out = self.inception_4d_relu_3x3(inception_4d_3x3_bn_out)\n        inception_4d_double_3x3_reduce_out = self.inception_4d_double_3x3_reduce(inception_4c_output_out)\n        inception_4d_double_3x3_reduce_bn_out = self.inception_4d_double_3x3_reduce_bn(inception_4d_double_3x3_reduce_out)\n        inception_4d_relu_double_3x3_reduce_out = self.inception_4d_relu_double_3x3_reduce(inception_4d_double_3x3_reduce_bn_out)\n        inception_4d_double_3x3_1_out = self.inception_4d_double_3x3_1(inception_4d_double_3x3_reduce_bn_out)\n        inception_4d_double_3x3_1_bn_out = self.inception_4d_double_3x3_1_bn(inception_4d_double_3x3_1_out)\n        inception_4d_relu_double_3x3_1_out = self.inception_4d_relu_double_3x3_1(inception_4d_double_3x3_1_bn_out)\n        inception_4d_double_3x3_2_out = self.inception_4d_double_3x3_2(inception_4d_double_3x3_1_bn_out)\n        inception_4d_double_3x3_2_bn_out = self.inception_4d_double_3x3_2_bn(inception_4d_double_3x3_2_out)\n        inception_4d_relu_double_3x3_2_out = self.inception_4d_relu_double_3x3_2(inception_4d_double_3x3_2_bn_out)\n        inception_4d_pool_out = self.inception_4d_pool(inception_4c_output_out)\n        inception_4d_pool_proj_out = self.inception_4d_pool_proj(inception_4d_pool_out)\n        inception_4d_pool_proj_bn_out = self.inception_4d_pool_proj_bn(inception_4d_pool_proj_out)\n        inception_4d_relu_pool_proj_out = self.inception_4d_relu_pool_proj(inception_4d_pool_proj_bn_out)\n        inception_4d_output_out = torch.cat([inception_4d_1x1_bn_out,inception_4d_3x3_bn_out,inception_4d_double_3x3_2_bn_out,inception_4d_pool_proj_bn_out], 1)\n        inception_4e_3x3_reduce_out = self.inception_4e_3x3_reduce(inception_4d_output_out)\n        inception_4e_3x3_reduce_bn_out = self.inception_4e_3x3_reduce_bn(inception_4e_3x3_reduce_out)\n        inception_4e_relu_3x3_reduce_out = self.inception_4e_relu_3x3_reduce(inception_4e_3x3_reduce_bn_out)\n        inception_4e_3x3_out = self.inception_4e_3x3(inception_4e_3x3_reduce_bn_out)\n        inception_4e_3x3_bn_out = self.inception_4e_3x3_bn(inception_4e_3x3_out)\n        inception_4e_relu_3x3_out = self.inception_4e_relu_3x3(inception_4e_3x3_bn_out)\n        inception_4e_double_3x3_reduce_out = self.inception_4e_double_3x3_reduce(inception_4d_output_out)\n        inception_4e_double_3x3_reduce_bn_out = self.inception_4e_double_3x3_reduce_bn(inception_4e_double_3x3_reduce_out)\n        inception_4e_relu_double_3x3_reduce_out = self.inception_4e_relu_double_3x3_reduce(inception_4e_double_3x3_reduce_bn_out)\n        inception_4e_double_3x3_1_out = self.inception_4e_double_3x3_1(inception_4e_double_3x3_reduce_bn_out)\n        inception_4e_double_3x3_1_bn_out = self.inception_4e_double_3x3_1_bn(inception_4e_double_3x3_1_out)\n        inception_4e_relu_double_3x3_1_out = self.inception_4e_relu_double_3x3_1(inception_4e_double_3x3_1_bn_out)\n        inception_4e_double_3x3_2_out = self.inception_4e_double_3x3_2(inception_4e_double_3x3_1_bn_out)\n        inception_4e_double_3x3_2_bn_out = self.inception_4e_double_3x3_2_bn(inception_4e_double_3x3_2_out)\n        inception_4e_relu_double_3x3_2_out = self.inception_4e_relu_double_3x3_2(inception_4e_double_3x3_2_bn_out)\n        inception_4e_pool_out = self.inception_4e_pool(inception_4d_output_out)\n        inception_4e_output_out = torch.cat([inception_4e_3x3_bn_out,inception_4e_double_3x3_2_bn_out,inception_4e_pool_out], 1)\n        inception_5a_1x1_out = self.inception_5a_1x1(inception_4e_output_out)\n        inception_5a_1x1_bn_out = self.inception_5a_1x1_bn(inception_5a_1x1_out)\n        inception_5a_relu_1x1_out = self.inception_5a_relu_1x1(inception_5a_1x1_bn_out)\n        inception_5a_3x3_reduce_out = self.inception_5a_3x3_reduce(inception_4e_output_out)\n        inception_5a_3x3_reduce_bn_out = self.inception_5a_3x3_reduce_bn(inception_5a_3x3_reduce_out)\n        inception_5a_relu_3x3_reduce_out = self.inception_5a_relu_3x3_reduce(inception_5a_3x3_reduce_bn_out)\n        inception_5a_3x3_out = self.inception_5a_3x3(inception_5a_3x3_reduce_bn_out)\n        inception_5a_3x3_bn_out = self.inception_5a_3x3_bn(inception_5a_3x3_out)\n        inception_5a_relu_3x3_out = self.inception_5a_relu_3x3(inception_5a_3x3_bn_out)\n        inception_5a_double_3x3_reduce_out = self.inception_5a_double_3x3_reduce(inception_4e_output_out)\n        inception_5a_double_3x3_reduce_bn_out = self.inception_5a_double_3x3_reduce_bn(inception_5a_double_3x3_reduce_out)\n        inception_5a_relu_double_3x3_reduce_out = self.inception_5a_relu_double_3x3_reduce(inception_5a_double_3x3_reduce_bn_out)\n        inception_5a_double_3x3_1_out = self.inception_5a_double_3x3_1(inception_5a_double_3x3_reduce_bn_out)\n        inception_5a_double_3x3_1_bn_out = self.inception_5a_double_3x3_1_bn(inception_5a_double_3x3_1_out)\n        inception_5a_relu_double_3x3_1_out = self.inception_5a_relu_double_3x3_1(inception_5a_double_3x3_1_bn_out)\n        inception_5a_double_3x3_2_out = self.inception_5a_double_3x3_2(inception_5a_double_3x3_1_bn_out)\n        inception_5a_double_3x3_2_bn_out = self.inception_5a_double_3x3_2_bn(inception_5a_double_3x3_2_out)\n        inception_5a_relu_double_3x3_2_out = self.inception_5a_relu_double_3x3_2(inception_5a_double_3x3_2_bn_out)\n        inception_5a_pool_out = self.inception_5a_pool(inception_4e_output_out)\n        inception_5a_pool_proj_out = self.inception_5a_pool_proj(inception_5a_pool_out)\n        inception_5a_pool_proj_bn_out = self.inception_5a_pool_proj_bn(inception_5a_pool_proj_out)\n        inception_5a_relu_pool_proj_out = self.inception_5a_relu_pool_proj(inception_5a_pool_proj_bn_out)\n        inception_5a_output_out = torch.cat([inception_5a_1x1_bn_out,inception_5a_3x3_bn_out,inception_5a_double_3x3_2_bn_out,inception_5a_pool_proj_bn_out], 1)\n        inception_5b_1x1_out = self.inception_5b_1x1(inception_5a_output_out)\n        inception_5b_1x1_bn_out = self.inception_5b_1x1_bn(inception_5b_1x1_out)\n        inception_5b_relu_1x1_out = self.inception_5b_relu_1x1(inception_5b_1x1_bn_out)\n        inception_5b_3x3_reduce_out = self.inception_5b_3x3_reduce(inception_5a_output_out)\n        inception_5b_3x3_reduce_bn_out = self.inception_5b_3x3_reduce_bn(inception_5b_3x3_reduce_out)\n        inception_5b_relu_3x3_reduce_out = self.inception_5b_relu_3x3_reduce(inception_5b_3x3_reduce_bn_out)\n        inception_5b_3x3_out = self.inception_5b_3x3(inception_5b_3x3_reduce_bn_out)\n        inception_5b_3x3_bn_out = self.inception_5b_3x3_bn(inception_5b_3x3_out)\n        inception_5b_relu_3x3_out = self.inception_5b_relu_3x3(inception_5b_3x3_bn_out)\n        inception_5b_double_3x3_reduce_out = self.inception_5b_double_3x3_reduce(inception_5a_output_out)\n        inception_5b_double_3x3_reduce_bn_out = self.inception_5b_double_3x3_reduce_bn(inception_5b_double_3x3_reduce_out)\n        inception_5b_relu_double_3x3_reduce_out = self.inception_5b_relu_double_3x3_reduce(inception_5b_double_3x3_reduce_bn_out)\n        inception_5b_double_3x3_1_out = self.inception_5b_double_3x3_1(inception_5b_double_3x3_reduce_bn_out)\n        inception_5b_double_3x3_1_bn_out = self.inception_5b_double_3x3_1_bn(inception_5b_double_3x3_1_out)\n        inception_5b_relu_double_3x3_1_out = self.inception_5b_relu_double_3x3_1(inception_5b_double_3x3_1_bn_out)\n        inception_5b_double_3x3_2_out = self.inception_5b_double_3x3_2(inception_5b_double_3x3_1_bn_out)\n        inception_5b_double_3x3_2_bn_out = self.inception_5b_double_3x3_2_bn(inception_5b_double_3x3_2_out)\n        inception_5b_relu_double_3x3_2_out = self.inception_5b_relu_double_3x3_2(inception_5b_double_3x3_2_bn_out)\n        inception_5b_pool_out = self.inception_5b_pool(inception_5a_output_out)\n        inception_5b_pool_proj_out = self.inception_5b_pool_proj(inception_5b_pool_out)\n        inception_5b_pool_proj_bn_out = self.inception_5b_pool_proj_bn(inception_5b_pool_proj_out)\n        inception_5b_relu_pool_proj_out = self.inception_5b_relu_pool_proj(inception_5b_pool_proj_bn_out)\n        inception_5b_output_out = torch.cat([inception_5b_1x1_bn_out,inception_5b_3x3_bn_out,inception_5b_double_3x3_2_bn_out,inception_5b_pool_proj_bn_out], 1)\n        global_pool_out = self.global_pool(inception_5b_output_out)\n        return global_pool_out\n\n    def classif(self, features):\n        fc_out = self.fc(features.view(features.size(0), -1))\n        return fc_out\n\n    def forward(self, input):\n        features_out = self.features(input)\n        classif_out = self.classif(features_out)\n        return classif_out\n\ndef bninception(num_classes=1000, pretrained='imagenet'):\n    r\"\"\"BNInception model architecture from <https://arxiv.org/pdf/1502.03167.pdf>`_ paper.\n    \"\"\"\n    model = BNInception(num_classes=1000)\n    if pretrained is not None:\n        settings = pretrained_settings['bninception'][pretrained]\n        assert num_classes == settings['num_classes'], \\\n            \"num_classes should be {}, but is {}\".format(settings['num_classes'], num_classes)\n        model.load_state_dict(model_zoo.load_url(settings['url']))\n        model.input_space = settings['input_space']\n        model.input_size = settings['input_size']\n        model.input_range = settings['input_range']\n        model.mean = settings['mean']\n        model.std = settings['std']\n    return model\n\n\nif __name__ == '__main__':\n\n    model = bninception()"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/fbresnet/resnet152_dump.lua",
    "content": "require 'cutorch'\nrequire 'cunn'\nrequire 'cudnn'\nrequire 'image'\nvision=require 'torchnet-vision'\n\nnet=vision.models.resnet.load{filename='data/resnet152/net.t7',length=152}\nprint(net)\n\nrequire 'nn'\nnn.Module.parameters = function(self)\n   if self.weight and self.bias and self.running_mean and self.running_var then\n      return {self.weight, self.bias, self.running_mean, self.running_var}, {self.gradWeight, self.gradBias}\n\n   elseif self.weight and self.bias then\n      return {self.weight, self.bias}, {self.gradWeight, self.gradBias}\n   elseif self.weight then\n      return {self.weight}, {self.gradWeight}\n   elseif self.bias then\n      return {self.bias}, {self.gradBias}\n   else\n      return\n   end\nend\n\nnetparams, _ = net:parameters()\nprint(#netparams)\ntorch.save('data/resnet152/netparams.t7', netparams)\n\nnet=net:cuda()\nnet:evaluate()\n--p, gp = net:getParameters()\ninput = torch.ones(1,3,224,224)\ninput[{1,1,1,1}] = -1\ninput[1] = image.load('data/lena_224.png')\nprint(input:sum())\ninput = input:cuda()\noutput=net:forward(input)\n\nfor i=1, 11 do\n    torch.save('data/resnet152/output'..i..'.t7', net:get(i).output:float())\nend\n"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/fbresnet/resnet152_load.py",
    "content": "import torch.nn as nn\nimport math\nimport torch.utils.model_zoo as model_zoo\n\n\n__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',\n           'resnet152']\n\n\nmodel_urls = {\n    'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',\n    'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',\n    'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',\n    'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',\n    'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',\n}\n\n\ndef conv3x3(in_planes, out_planes, stride=1):\n    \"3x3 convolution with padding\"\n    return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,\n                     padding=1, bias=True)\n\n\nclass BasicBlock(nn.Module):\n    expansion = 1\n\n    def __init__(self, inplanes, planes, stride=1, downsample=None):\n        super(BasicBlock, self).__init__()\n        self.conv1 = conv3x3(inplanes, planes, stride)\n        self.bn1 = nn.BatchNorm2d(planes)\n        self.relu = nn.ReLU(inplace=True)\n        self.conv2 = conv3x3(planes, planes)\n        self.bn2 = nn.BatchNorm2d(planes)\n        self.downsample = downsample\n        self.stride = stride\n\n    def forward(self, x):\n        residual = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        out = self.conv2(out)\n        out = self.bn2(out)\n\n        if self.downsample is not None:\n            residual = self.downsample(x)\n\n        out += residual\n        out = self.relu(out)\n\n        return out\n\n\nclass Bottleneck(nn.Module):\n    expansion = 4\n\n    def __init__(self, inplanes, planes, stride=1, downsample=None):\n        super(Bottleneck, self).__init__()\n        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=True)\n        self.bn1 = nn.BatchNorm2d(planes)\n        self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,\n                               padding=1, bias=True)\n        self.bn2 = nn.BatchNorm2d(planes)\n        self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=True)\n        self.bn3 = nn.BatchNorm2d(planes * 4)\n        self.relu = nn.ReLU(inplace=True)\n        self.downsample = downsample\n        self.stride = stride\n\n    def forward(self, x):\n        residual = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        out = self.conv2(out)\n        out = self.bn2(out)\n        out = self.relu(out)\n\n        out = self.conv3(out)\n        out = self.bn3(out)\n\n        if self.downsample is not None:\n            residual = self.downsample(x)\n\n        out += residual\n        out = self.relu(out)\n\n        return out\n\nfrom torch.legacy import nn as nnl\n\nclass ResNet(nn.Module):\n\n    def __init__(self, block, layers, num_classes=1000):\n        self.inplanes = 64\n        super(ResNet, self).__init__()\n        self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,\n                                bias=True)\n        #self.conv1 = nnl.SpatialConvolution(3, 64, 7, 7, 2, 2, 3, 3)\n        self.bn1 = nn.BatchNorm2d(64)\n        self.relu = nn.ReLU(inplace=True)\n        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)\n        self.layer1 = self._make_layer(block, 64, layers[0])\n        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)\n        self.layer3 = self._make_layer(block, 256, layers[2], stride=2)\n        self.layer4 = self._make_layer(block, 512, layers[3], stride=2)\n        self.avgpool = nn.AvgPool2d(7)\n        self.fc = nn.Linear(512 * block.expansion, num_classes)\n\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d):\n                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n                m.weight.data.normal_(0, math.sqrt(2. / n))\n            elif isinstance(m, nn.BatchNorm2d):\n                m.weight.data.fill_(1)\n                m.bias.data.zero_()\n\n    def _make_layer(self, block, planes, blocks, stride=1):\n        downsample = None\n        if stride != 1 or self.inplanes != planes * block.expansion:\n            downsample = nn.Sequential(\n                nn.Conv2d(self.inplanes, planes * block.expansion,\n                          kernel_size=1, stride=stride, bias=True),\n                nn.BatchNorm2d(planes * block.expansion),\n            )\n\n        layers = []\n        layers.append(block(self.inplanes, planes, stride, downsample))\n        self.inplanes = planes * block.expansion\n        for i in range(1, blocks):\n            layers.append(block(self.inplanes, planes))\n\n        return nn.Sequential(*layers)\n\n    def forward(self, x):\n        x = self.conv1(x)\n        self.conv1_input = x.clone()\n        x = self.bn1(x)\n        x = self.relu(x)\n        x = self.maxpool(x)\n\n        x = self.layer1(x)\n        x = self.layer2(x)\n        x = self.layer3(x)\n        x = self.layer4(x)\n\n        x = self.avgpool(x)\n        x = x.view(x.size(0), -1)\n        x = self.fc(x)\n\n        return x\n\n\ndef resnet18(pretrained=False, **kwargs):\n    \"\"\"Constructs a ResNet-18 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)\n    if pretrained:\n        model.load_state_dict(model_zoo.load_url(model_urls['resnet18']))\n    return model\n\n\ndef resnet34(pretrained=False, **kwargs):\n    \"\"\"Constructs a ResNet-34 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs)\n    if pretrained:\n        model.load_state_dict(model_zoo.load_url(model_urls['resnet34']))\n    return model\n\n\ndef resnet50(pretrained=False, **kwargs):\n    \"\"\"Constructs a ResNet-50 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs)\n    if pretrained:\n        model.load_state_dict(model_zoo.load_url(model_urls['resnet50']))\n    return model\n\n\ndef resnet101(pretrained=False, **kwargs):\n    \"\"\"Constructs a ResNet-101 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs)\n    if pretrained:\n        model.load_state_dict(model_zoo.load_url(model_urls['resnet101']))\n    return model\n\n\ndef resnet152(pretrained=False, **kwargs):\n    \"\"\"Constructs a ResNet-152 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs)\n    if pretrained:\n        model.load_state_dict(model_zoo.load_url(model_urls['resnet152']))\n    return model\n\nimport torchfile\nfrom torch.utils.serialization import load_lua\nimport torch\nnetparams = torchfile.load('data/resnet152/netparams.t7')\n#netparams2 = load_lua('data/resnet152/netparams.t7')\n#import ipdb; ipdb.set_trace()\nnetoutputs = []\nfor i in range(1, 12):\n    path = 'data/resnet152/output{}.t7'.format(i)\n    out = load_lua(path)\n    #print(out.size())\n    if out.dim()==4:\n        pass#out.transpose_(2, 3)\n    netoutputs.append(out)\n\nnet = resnet152()\nstate_dict = net.state_dict()\n\nimport collections\ns = collections.OrderedDict()\n\n\ni=0\nfor key in state_dict.keys():\n    new = torch.from_numpy(netparams[i])\n    s[key] = new\n    if s[key].dim() == 4:\n        pass#s[key].transpose_(2,3)\n    i += 1\n\nnet.load_state_dict(s)\n\nnet.conv1.register_forward_hook(lambda self, input, output: \\\n    print('conv1', torch.dist(output.data, netoutputs[0])))\nnet.bn1.register_forward_hook(lambda self, input, output: \\\n    print('bn1', torch.dist(output.data, netoutputs[1])))\nnet.relu.register_forward_hook(lambda self, input, output: \\\n    print('relu', torch.dist(output.data, netoutputs[2])))\nnet.maxpool.register_forward_hook(lambda self, input, output: \\\n    print('maxpool', torch.dist(output.data, netoutputs[3])))\nnet.layer1.register_forward_hook(lambda self, input, output: \\\n    print('layer1', torch.dist(output.data, netoutputs[4])))\nnet.layer2.register_forward_hook(lambda self, input, output: \\\n    print('layer2', torch.dist(output.data, netoutputs[5])))\nnet.layer3.register_forward_hook(lambda self, input, output: \\\n    print('layer3', torch.dist(output.data, netoutputs[6])))\nnet.layer4.register_forward_hook(lambda self, input, output: \\\n    print('layer4', torch.dist(output.data, netoutputs[7])))\nnet.avgpool.register_forward_hook(lambda self, input, output: \\\n    print('avgpool', torch.dist(output.data, netoutputs[8])))\nnet.fc.register_forward_hook(lambda self, input, output: \\\n    print('fc', torch.dist(output.data, netoutputs[10])))\n\nnet.eval()\ninput_data = torch.ones(1,3,224,224)\ninput_data[0][0][0][0] = -1\nfrom PIL import Image\nimport torchvision.transforms as transforms\ninput_data[0] = transforms.ToTensor()(Image.open('data/lena_224.png'))\nprint('lena sum', input_data.sum())\ninput = torch.autograd.Variable(input_data)\noutput = net.forward(input)\n\ntorch.save(s, 'data/resnet152.pth')\n\n\n"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/fbresnet.py",
    "content": "import torch.nn as nn\nimport math\nimport torch.utils.model_zoo as model_zoo\n\n\n__all__ = ['FBResNet',\n           'fbresnet18', 'fbresnet34', 'fbresnet50', 'fbresnet101',\n           'fbresnet152']\n\npretrained_settings = {\n    'fbresnet152': {\n        'imagenet': {\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/resnet152-c11d722e.pth',\n            'input_space': 'RGB',\n            'input_size': [3, 224, 224],\n            'input_range': [0, 1],\n            'mean': [0.485, 0.456, 0.406],\n            'std': [0.229, 0.224, 0.225],\n            'num_classes': 1000\n        }\n    }\n}\n\n\ndef conv3x3(in_planes, out_planes, stride=1):\n    \"3x3 convolution with padding\"\n    return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,\n                     padding=1, bias=True)\n\n\nclass BasicBlock(nn.Module):\n    expansion = 1\n\n    def __init__(self, inplanes, planes, stride=1, downsample=None):\n        super(BasicBlock, self).__init__()\n        self.conv1 = conv3x3(inplanes, planes, stride)\n        self.bn1 = nn.BatchNorm2d(planes)\n        self.relu = nn.ReLU(inplace=True)\n        self.conv2 = conv3x3(planes, planes)\n        self.bn2 = nn.BatchNorm2d(planes)\n        self.downsample = downsample\n        self.stride = stride\n\n    def forward(self, x):\n        residual = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        out = self.conv2(out)\n        out = self.bn2(out)\n\n        if self.downsample is not None:\n            residual = self.downsample(x)\n\n        out += residual\n        out = self.relu(out)\n\n        return out\n\n\nclass Bottleneck(nn.Module):\n    expansion = 4\n\n    def __init__(self, inplanes, planes, stride=1, downsample=None):\n        super(Bottleneck, self).__init__()\n        self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=True)\n        self.bn1 = nn.BatchNorm2d(planes)\n        self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,\n                               padding=1, bias=True)\n        self.bn2 = nn.BatchNorm2d(planes)\n        self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=True)\n        self.bn3 = nn.BatchNorm2d(planes * 4)\n        self.relu = nn.ReLU(inplace=True)\n        self.downsample = downsample\n        self.stride = stride\n\n    def forward(self, x):\n        residual = x\n\n        out = self.conv1(x)\n        out = self.bn1(out)\n        out = self.relu(out)\n\n        out = self.conv2(out)\n        out = self.bn2(out)\n        out = self.relu(out)\n\n        out = self.conv3(out)\n        out = self.bn3(out)\n\n        if self.downsample is not None:\n            residual = self.downsample(x)\n\n        out += residual\n        out = self.relu(out)\n\n        return out\n\nclass FBResNet(nn.Module):\n\n    def __init__(self, block, layers, num_classes=1000):\n        self.inplanes = 64\n        # Special attributs\n        self.input_space = None\n        self.input_size = (299, 299, 3)\n        self.mean = None\n        self.std = None\n        super(FBResNet, self).__init__()\n        # Modules\n        self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,\n                                bias=True)\n        self.bn1 = nn.BatchNorm2d(64)\n        self.relu = nn.ReLU(inplace=True)\n        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)\n        self.layer1 = self._make_layer(block, 64, layers[0])\n        self.layer2 = self._make_layer(block, 128, layers[1], stride=2)\n        self.layer3 = self._make_layer(block, 256, layers[2], stride=2)\n        self.layer4 = self._make_layer(block, 512, layers[3], stride=2)\n        self.avgpool = nn.AvgPool2d(7)\n        self.fc = nn.Linear(512 * block.expansion, num_classes)\n\n        for m in self.modules():\n            if isinstance(m, nn.Conv2d):\n                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n                m.weight.data.normal_(0, math.sqrt(2. / n))\n            elif isinstance(m, nn.BatchNorm2d):\n                m.weight.data.fill_(1)\n                m.bias.data.zero_()\n\n    def _make_layer(self, block, planes, blocks, stride=1):\n        downsample = None\n        if stride != 1 or self.inplanes != planes * block.expansion:\n            downsample = nn.Sequential(\n                nn.Conv2d(self.inplanes, planes * block.expansion,\n                          kernel_size=1, stride=stride, bias=True),\n                nn.BatchNorm2d(planes * block.expansion),\n            )\n\n        layers = []\n        layers.append(block(self.inplanes, planes, stride, downsample))\n        self.inplanes = planes * block.expansion\n        for i in range(1, blocks):\n            layers.append(block(self.inplanes, planes))\n\n        return nn.Sequential(*layers)\n\n    def forward(self, x):\n        x = self.conv1(x)\n        self.conv1_input = x.clone()\n        x = self.bn1(x)\n        x = self.relu(x)\n        x = self.maxpool(x)\n\n        x = self.layer1(x)\n        x = self.layer2(x)\n        x = self.layer3(x)\n        x = self.layer4(x)\n\n        x = self.avgpool(x)\n        x = x.view(x.size(0), -1)\n        x = self.fc(x)\n\n        return x\n\n\ndef fbresnet18(num_classes=1000):\n    \"\"\"Constructs a ResNet-18 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = FBResNet(BasicBlock, [2, 2, 2, 2], num_classes=num_classes)\n    return model\n\n\ndef fbresnet34(num_classes=1000):\n    \"\"\"Constructs a ResNet-34 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = FBResNet(BasicBlock, [3, 4, 6, 3], num_classes=num_classes)\n    return model\n\n\ndef fbresnet50(num_classes=1000):\n    \"\"\"Constructs a ResNet-50 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = FBResNet(Bottleneck, [3, 4, 6, 3], num_classes=num_classes)\n    return model\n\n\ndef fbresnet101(num_classes=1000):\n    \"\"\"Constructs a ResNet-101 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = FBResNet(Bottleneck, [3, 4, 23, 3], num_classes=num_classes)\n    return model\n\n\ndef fbresnet152(num_classes=1000, pretrained='imagenet'):\n    \"\"\"Constructs a ResNet-152 model.\n\n    Args:\n        pretrained (bool): If True, returns a model pre-trained on ImageNet\n    \"\"\"\n    model = FBResNet(Bottleneck, [3, 8, 36, 3], num_classes=num_classes)\n    if pretrained is not None:\n        settings = pretrained_settings['fbresnet152'][pretrained]\n        assert num_classes == settings['num_classes'], \\\n            \"num_classes should be {}, but is {}\".format(settings['num_classes'], num_classes)\n        model.load_state_dict(model_zoo.load_url(settings['url']))\n        model.input_space = settings['input_space']\n        model.input_size = settings['input_size']\n        model.input_range = settings['input_range']\n        model.mean = settings['mean']\n        model.std = settings['std']\n    return model\n\n\n"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/inceptionresnetv2.py",
    "content": "import torch\nimport torch.nn as nn\nimport torch.utils.model_zoo as model_zoo\nimport os\nimport sys\n\n__all__ = ['InceptionResNetV2', 'inceptionresnetv2']\n\npretrained_settings = {\n    'inceptionresnetv2': {\n        'imagenet': {\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/inceptionresnetv2-d579a627.pth',\n            'input_space': 'RGB',\n            'input_size': [3, 299, 299],\n            'input_range': [0, 1],\n            'mean': [0.5, 0.5, 0.5],\n            'std': [0.5, 0.5, 0.5],\n            'num_classes': 1000\n        },\n        'imagenet+background': {\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/inceptionresnetv2-d579a627.pth',\n            'input_space': 'RGB',\n            'input_size': [3, 299, 299],\n            'input_range': [0, 1],\n            'mean': [0.5, 0.5, 0.5],\n            'std': [0.5, 0.5, 0.5],\n            'num_classes': 1001\n        }\n    }\n}\n\n\nclass BasicConv2d(nn.Module):\n\n    def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0):\n        super(BasicConv2d, self).__init__()\n        self.conv = nn.Conv2d(in_planes, out_planes,\n                              kernel_size=kernel_size, stride=stride,\n                              padding=padding, bias=False) # verify bias false\n        self.bn = nn.BatchNorm2d(out_planes,\n                                 eps=0.001, # value found in tensorflow\n                                 momentum=0.1, # default pytorch value\n                                 affine=True)\n        self.relu = nn.ReLU(inplace=False)\n\n    def forward(self, x):\n        x = self.conv(x)\n        x = self.bn(x)\n        x = self.relu(x)\n        return x\n\n\nclass Mixed_5b(nn.Module):\n\n    def __init__(self):\n        super(Mixed_5b, self).__init__()\n\n        self.branch0 = BasicConv2d(192, 96, kernel_size=1, stride=1)\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(192, 48, kernel_size=1, stride=1),\n            BasicConv2d(48, 64, kernel_size=5, stride=1, padding=2)\n        ) \n\n        self.branch2 = nn.Sequential(\n            BasicConv2d(192, 64, kernel_size=1, stride=1),\n            BasicConv2d(64, 96, kernel_size=3, stride=1, padding=1),\n            BasicConv2d(96, 96, kernel_size=3, stride=1, padding=1)\n        )\n\n        self.branch3 = nn.Sequential(\n            nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),\n            BasicConv2d(192, 64, kernel_size=1, stride=1)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        x3 = self.branch3(x)\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass Block35(nn.Module):\n\n    def __init__(self, scale=1.0):\n        super(Block35, self).__init__()\n\n        self.scale = scale\n\n        self.branch0 = BasicConv2d(320, 32, kernel_size=1, stride=1)\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(320, 32, kernel_size=1, stride=1),\n            BasicConv2d(32, 32, kernel_size=3, stride=1, padding=1)\n        )\n\n        self.branch2 = nn.Sequential(\n            BasicConv2d(320, 32, kernel_size=1, stride=1),\n            BasicConv2d(32, 48, kernel_size=3, stride=1, padding=1),\n            BasicConv2d(48, 64, kernel_size=3, stride=1, padding=1)\n        )\n\n        self.conv2d = nn.Conv2d(128, 320, kernel_size=1, stride=1)\n        self.relu = nn.ReLU(inplace=False)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        out = torch.cat((x0, x1, x2), 1)\n        out = self.conv2d(out)\n        out = out * self.scale + x\n        out = self.relu(out)\n        return out\n\n\nclass Mixed_6a(nn.Module):\n\n    def __init__(self):\n        super(Mixed_6a, self).__init__()\n        \n        self.branch0 = BasicConv2d(320, 384, kernel_size=3, stride=2)\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(320, 256, kernel_size=1, stride=1),\n            BasicConv2d(256, 256, kernel_size=3, stride=1, padding=1),\n            BasicConv2d(256, 384, kernel_size=3, stride=2)\n        )\n\n        self.branch2 = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        out = torch.cat((x0, x1, x2), 1)\n        return out\n\n\nclass Block17(nn.Module):\n\n    def __init__(self, scale=1.0):\n        super(Block17, self).__init__()\n\n        self.scale = scale\n\n        self.branch0 = BasicConv2d(1088, 192, kernel_size=1, stride=1)\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(1088, 128, kernel_size=1, stride=1),\n            BasicConv2d(128, 160, kernel_size=(1,7), stride=1, padding=(0,3)),\n            BasicConv2d(160, 192, kernel_size=(7,1), stride=1, padding=(3,0))\n        )\n\n        self.conv2d = nn.Conv2d(384, 1088, kernel_size=1, stride=1)\n        self.relu = nn.ReLU(inplace=False)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        out = torch.cat((x0, x1), 1)\n        out = self.conv2d(out)\n        out = out * self.scale + x\n        out = self.relu(out)\n        return out\n\n\nclass Mixed_7a(nn.Module):\n\n    def __init__(self):\n        super(Mixed_7a, self).__init__()\n        \n        self.branch0 = nn.Sequential(\n            BasicConv2d(1088, 256, kernel_size=1, stride=1),\n            BasicConv2d(256, 384, kernel_size=3, stride=2)\n        )\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(1088, 256, kernel_size=1, stride=1),\n            BasicConv2d(256, 288, kernel_size=3, stride=2)\n        )\n\n        self.branch2 = nn.Sequential(\n            BasicConv2d(1088, 256, kernel_size=1, stride=1),\n            BasicConv2d(256, 288, kernel_size=3, stride=1, padding=1),\n            BasicConv2d(288, 320, kernel_size=3, stride=2)\n        )\n\n        self.branch3 = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        x3 = self.branch3(x)\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass Block8(nn.Module):\n\n    def __init__(self, scale=1.0, noReLU=False):\n        super(Block8, self).__init__()\n\n        self.scale = scale\n        self.noReLU = noReLU\n\n        self.branch0 = BasicConv2d(2080, 192, kernel_size=1, stride=1)\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(2080, 192, kernel_size=1, stride=1),\n            BasicConv2d(192, 224, kernel_size=(1,3), stride=1, padding=(0,1)),\n            BasicConv2d(224, 256, kernel_size=(3,1), stride=1, padding=(1,0))\n        )\n\n        self.conv2d = nn.Conv2d(448, 2080, kernel_size=1, stride=1)\n        if not self.noReLU:\n            self.relu = nn.ReLU(inplace=False)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        out = torch.cat((x0, x1), 1)\n        out = self.conv2d(out)\n        out = out * self.scale + x\n        if not self.noReLU:\n            out = self.relu(out)\n        return out\n\n\nclass InceptionResNetV2(nn.Module):\n\n    def __init__(self, num_classes=1001):\n        super(InceptionResNetV2, self).__init__()\n        # Special attributs\n        self.input_space = None\n        self.input_size = (299, 299, 3)\n        self.mean = None\n        self.std = None\n        # Modules\n        self.conv2d_1a = BasicConv2d(3, 32, kernel_size=3, stride=2)\n        self.conv2d_2a = BasicConv2d(32, 32, kernel_size=3, stride=1)\n        self.conv2d_2b = BasicConv2d(32, 64, kernel_size=3, stride=1, padding=1)\n        self.maxpool_3a = nn.MaxPool2d(3, stride=2)\n        self.conv2d_3b = BasicConv2d(64, 80, kernel_size=1, stride=1)\n        self.conv2d_4a = BasicConv2d(80, 192, kernel_size=3, stride=1)\n        self.maxpool_5a = nn.MaxPool2d(3, stride=2)\n        self.mixed_5b = Mixed_5b()\n        self.repeat = nn.Sequential(\n            Block35(scale=0.17),\n            Block35(scale=0.17),\n            Block35(scale=0.17),\n            Block35(scale=0.17),\n            Block35(scale=0.17),\n            Block35(scale=0.17),\n            Block35(scale=0.17),\n            Block35(scale=0.17),\n            Block35(scale=0.17),\n            Block35(scale=0.17)\n        )\n        self.mixed_6a = Mixed_6a()\n        self.repeat_1 = nn.Sequential(\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10),\n            Block17(scale=0.10)\n        )\n        self.mixed_7a = Mixed_7a()\n        self.repeat_2 = nn.Sequential(\n            Block8(scale=0.20),\n            Block8(scale=0.20),\n            Block8(scale=0.20),\n            Block8(scale=0.20),\n            Block8(scale=0.20),\n            Block8(scale=0.20),\n            Block8(scale=0.20),\n            Block8(scale=0.20),\n            Block8(scale=0.20)\n        )\n        self.block8 = Block8(noReLU=True)\n        self.conv2d_7b = BasicConv2d(2080, 1536, kernel_size=1, stride=1)\n        self.avgpool_1a = nn.AvgPool2d(8, count_include_pad=False)\n        self.classif = nn.Linear(1536, num_classes)\n\n    def forward(self, x):\n        x = self.conv2d_1a(x)\n        x = self.conv2d_2a(x)\n        x = self.conv2d_2b(x)\n        x = self.maxpool_3a(x)\n        x = self.conv2d_3b(x)\n        x = self.conv2d_4a(x)\n        x = self.maxpool_5a(x)\n        x = self.mixed_5b(x)\n        x = self.repeat(x)\n        x = self.mixed_6a(x)\n        x = self.repeat_1(x)\n        x = self.mixed_7a(x)\n        x = self.repeat_2(x)\n        x = self.block8(x)\n        x = self.conv2d_7b(x)\n        x = self.avgpool_1a(x)\n        x = x.view(x.size(0), -1)\n        x = self.classif(x) \n        return x\n\ndef inceptionresnetv2(num_classes=1001, pretrained='imagenet'):\n    r\"\"\"InceptionResNetV2 model architecture from the\n    `\"InceptionV4, Inception-ResNet...\" <https://arxiv.org/abs/1602.07261>`_ paper.\n    \"\"\"\n    if pretrained:\n        settings = pretrained_settings['inceptionresnetv2'][pretrained]\n        assert num_classes == settings['num_classes'], \\\n            \"num_classes should be {}, but is {}\".format(settings['num_classes'], num_classes)\n\n        # both 'imagenet'&'imagenet+background' are loaded from same parameters\n        model = InceptionResNetV2(num_classes=1001)\n        model.load_state_dict(model_zoo.load_url(settings['url']))\n        \n        if pretrained == 'imagenet':\n            new_classif = nn.Linear(1536, 1000)\n            new_classif.weight.data = model.classif.weight.data[1:]\n            new_classif.bias.data = model.classif.bias.data[1:]\n            model.classif = new_classif\n        \n        model.input_space = settings['input_space']\n        model.input_size = settings['input_size']\n        model.input_range = settings['input_range']\n        \n        model.mean = settings['mean']\n        model.std = settings['std']\n    else:\n        model = InceptionResNetV2(num_classes=num_classes)\n    return model\n\n'''\nTEST\nRun this code with:\n```\ncd $HOME/pretrained-models.pytorch\npython -m pretrainedmodels.inceptionresnetv2\n```\n'''\nif __name__ == '__main__':\n\n    assert inceptionresnetv2(num_classes=10, pretrained=None)\n    print('success')\n    assert inceptionresnetv2(num_classes=1000, pretrained='imagenet')\n    print('success')\n    assert inceptionresnetv2(num_classes=1001, pretrained='imagenet+background')\n    print('success')\n\n    # fail\n    assert inceptionresnetv2(num_classes=1001, pretrained='imagenet')"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/inceptionv4.py",
    "content": "import torch\nimport torch.nn as nn\nimport torch.utils.model_zoo as model_zoo\nimport os\nimport sys\n\n__all__ = ['InceptionV4', 'inceptionv4']\n\npretrained_settings = {\n    'inceptionv4': {\n        'imagenet': {\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/inceptionv4-97ef9c30.pth',\n            'input_space': 'RGB',\n            'input_size': [3, 299, 299],\n            'input_range': [0, 1],\n            'mean': [0.5, 0.5, 0.5],\n            'std': [0.5, 0.5, 0.5],\n            'num_classes': 1000\n        },\n        'imagenet+background': {\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/inceptionv4-97ef9c30.pth',\n            'input_space': 'RGB',\n            'input_size': [3, 299, 299],\n            'input_range': [0, 1],\n            'mean': [0.5, 0.5, 0.5],\n            'std': [0.5, 0.5, 0.5],\n            'num_classes': 1001\n        }\n    }\n}\n\n\nclass BasicConv2d(nn.Module):\n\n    def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0):\n        super(BasicConv2d, self).__init__()\n        self.conv = nn.Conv2d(in_planes, out_planes,\n                              kernel_size=kernel_size, stride=stride,\n                              padding=padding, bias=False) # verify bias false\n        self.bn = nn.BatchNorm2d(out_planes,\n                                 eps=0.001, # value found in tensorflow\n                                 momentum=0.1, # default pytorch value\n                                 affine=True)\n        self.relu = nn.ReLU(inplace=True)\n\n    def forward(self, x):\n        x = self.conv(x)\n        x = self.bn(x)\n        x = self.relu(x)\n        return x\n\n\nclass Mixed_3a(nn.Module):\n\n    def __init__(self):\n        super(Mixed_3a, self).__init__()\n        self.maxpool = nn.MaxPool2d(3, stride=2)\n        self.conv = BasicConv2d(64, 96, kernel_size=3, stride=2)\n\n    def forward(self, x):\n        x0 = self.maxpool(x)\n        x1 = self.conv(x)\n        out = torch.cat((x0, x1), 1)\n        return out\n\n\nclass Mixed_4a(nn.Module):\n\n    def __init__(self):\n        super(Mixed_4a, self).__init__()\n\n        self.branch0 = nn.Sequential(\n            BasicConv2d(160, 64, kernel_size=1, stride=1),\n            BasicConv2d(64, 96, kernel_size=3, stride=1)\n        )\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(160, 64, kernel_size=1, stride=1),\n            BasicConv2d(64, 64, kernel_size=(1,7), stride=1, padding=(0,3)),\n            BasicConv2d(64, 64, kernel_size=(7,1), stride=1, padding=(3,0)),\n            BasicConv2d(64, 96, kernel_size=(3,3), stride=1)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        out = torch.cat((x0, x1), 1)\n        return out\n\n\nclass Mixed_5a(nn.Module):\n\n    def __init__(self):\n        super(Mixed_5a, self).__init__()\n        self.conv = BasicConv2d(192, 192, kernel_size=3, stride=2)\n        self.maxpool = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.conv(x)\n        x1 = self.maxpool(x)\n        out = torch.cat((x0, x1), 1)\n        return out\n\n\nclass Inception_A(nn.Module):\n\n    def __init__(self):\n        super(Inception_A, self).__init__()\n        self.branch0 = BasicConv2d(384, 96, kernel_size=1, stride=1)\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(384, 64, kernel_size=1, stride=1),\n            BasicConv2d(64, 96, kernel_size=3, stride=1, padding=1)\n        )\n\n        self.branch2 = nn.Sequential(\n            BasicConv2d(384, 64, kernel_size=1, stride=1),\n            BasicConv2d(64, 96, kernel_size=3, stride=1, padding=1),\n            BasicConv2d(96, 96, kernel_size=3, stride=1, padding=1)\n        )\n\n        self.branch3 = nn.Sequential(\n            nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),\n            BasicConv2d(384, 96, kernel_size=1, stride=1)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        x3 = self.branch3(x)\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass Reduction_A(nn.Module):\n\n    def __init__(self):\n        super(Reduction_A, self).__init__()\n        self.branch0 = BasicConv2d(384, 384, kernel_size=3, stride=2)\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(384, 192, kernel_size=1, stride=1),\n            BasicConv2d(192, 224, kernel_size=3, stride=1, padding=1),\n            BasicConv2d(224, 256, kernel_size=3, stride=2)\n        )\n        \n        self.branch2 = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        out = torch.cat((x0, x1, x2), 1)\n        return out\n\n\nclass Inception_B(nn.Module):\n\n    def __init__(self):\n        super(Inception_B, self).__init__()\n        self.branch0 = BasicConv2d(1024, 384, kernel_size=1, stride=1)\n        \n        self.branch1 = nn.Sequential(\n            BasicConv2d(1024, 192, kernel_size=1, stride=1),\n            BasicConv2d(192, 224, kernel_size=(1,7), stride=1, padding=(0,3)),\n            BasicConv2d(224, 256, kernel_size=(7,1), stride=1, padding=(3,0))\n        )\n\n        self.branch2 = nn.Sequential(\n            BasicConv2d(1024, 192, kernel_size=1, stride=1),\n            BasicConv2d(192, 192, kernel_size=(7,1), stride=1, padding=(3,0)),\n            BasicConv2d(192, 224, kernel_size=(1,7), stride=1, padding=(0,3)),\n            BasicConv2d(224, 224, kernel_size=(7,1), stride=1, padding=(3,0)),\n            BasicConv2d(224, 256, kernel_size=(1,7), stride=1, padding=(0,3))\n        )\n\n        self.branch3 = nn.Sequential(\n            nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),\n            BasicConv2d(1024, 128, kernel_size=1, stride=1)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        x3 = self.branch3(x)\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass Reduction_B(nn.Module):\n\n    def __init__(self):\n        super(Reduction_B, self).__init__()\n\n        self.branch0 = nn.Sequential(\n            BasicConv2d(1024, 192, kernel_size=1, stride=1),\n            BasicConv2d(192, 192, kernel_size=3, stride=2)\n        )\n\n        self.branch1 = nn.Sequential(\n            BasicConv2d(1024, 256, kernel_size=1, stride=1),\n            BasicConv2d(256, 256, kernel_size=(1,7), stride=1, padding=(0,3)),\n            BasicConv2d(256, 320, kernel_size=(7,1), stride=1, padding=(3,0)),\n            BasicConv2d(320, 320, kernel_size=3, stride=2)\n        )\n\n        self.branch2 = nn.MaxPool2d(3, stride=2)\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        x1 = self.branch1(x)\n        x2 = self.branch2(x)\n        out = torch.cat((x0, x1, x2), 1)\n        return out\n\n\nclass Inception_C(nn.Module):\n\n    def __init__(self):\n        super(Inception_C, self).__init__()\n\n        self.branch0 = BasicConv2d(1536, 256, kernel_size=1, stride=1)\n        \n        self.branch1_0 = BasicConv2d(1536, 384, kernel_size=1, stride=1)\n        self.branch1_1a = BasicConv2d(384, 256, kernel_size=(1,3), stride=1, padding=(0,1))\n        self.branch1_1b = BasicConv2d(384, 256, kernel_size=(3,1), stride=1, padding=(1,0))\n        \n        self.branch2_0 = BasicConv2d(1536, 384, kernel_size=1, stride=1)\n        self.branch2_1 = BasicConv2d(384, 448, kernel_size=(3,1), stride=1, padding=(1,0))\n        self.branch2_2 = BasicConv2d(448, 512, kernel_size=(1,3), stride=1, padding=(0,1))\n        self.branch2_3a = BasicConv2d(512, 256, kernel_size=(1,3), stride=1, padding=(0,1))\n        self.branch2_3b = BasicConv2d(512, 256, kernel_size=(3,1), stride=1, padding=(1,0))\n        \n        self.branch3 = nn.Sequential(\n            nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False),\n            BasicConv2d(1536, 256, kernel_size=1, stride=1)\n        )\n\n    def forward(self, x):\n        x0 = self.branch0(x)\n        \n        x1_0 = self.branch1_0(x)\n        x1_1a = self.branch1_1a(x1_0)\n        x1_1b = self.branch1_1b(x1_0)\n        x1 = torch.cat((x1_1a, x1_1b), 1)\n\n        x2_0 = self.branch2_0(x)\n        x2_1 = self.branch2_1(x2_0)\n        x2_2 = self.branch2_2(x2_1)\n        x2_3a = self.branch2_3a(x2_2)\n        x2_3b = self.branch2_3b(x2_2)\n        x2 = torch.cat((x2_3a, x2_3b), 1)\n\n        x3 = self.branch3(x)\n\n        out = torch.cat((x0, x1, x2, x3), 1)\n        return out\n\n\nclass InceptionV4(nn.Module):\n\n    def __init__(self, num_classes=1001):\n        super(InceptionV4, self).__init__()\n        # Special attributs\n        self.input_space = None\n        self.input_size = (299, 299, 3)\n        self.mean = None\n        self.std = None\n        # Modules\n        self.features = nn.Sequential(\n            BasicConv2d(3, 32, kernel_size=3, stride=2),\n            BasicConv2d(32, 32, kernel_size=3, stride=1),\n            BasicConv2d(32, 64, kernel_size=3, stride=1, padding=1),\n            Mixed_3a(),\n            Mixed_4a(),\n            Mixed_5a(),\n            Inception_A(),\n            Inception_A(),\n            Inception_A(),\n            Inception_A(),\n            Reduction_A(), # Mixed_6a\n            Inception_B(),\n            Inception_B(),\n            Inception_B(),\n            Inception_B(),\n            Inception_B(),\n            Inception_B(),\n            Inception_B(),\n            Reduction_B(), # Mixed_7a\n            Inception_C(),\n            Inception_C(),\n            Inception_C(),\n            nn.AvgPool2d(8, count_include_pad=False)\n        )\n        self.classif = nn.Linear(1536, num_classes)\n\n    def forward(self, x):\n        x = self.features(x)\n        x = x.view(x.size(0), -1)\n        x = self.classif(x) \n        return x\n\n\ndef inceptionv4(num_classes=1001, pretrained='imagenet'):\n    if pretrained:\n        settings = pretrained_settings['inceptionv4'][pretrained]\n        assert num_classes == settings['num_classes'], \\\n            \"num_classes should be {}, but is {}\".format(settings['num_classes'], num_classes)\n\n        # both 'imagenet'&'imagenet+background' are loaded from same parameters\n        model = InceptionV4(num_classes=1001)\n        model.load_state_dict(model_zoo.load_url(settings['url']))\n        \n        if pretrained == 'imagenet':\n            new_classif = nn.Linear(1536, 1000)\n            new_classif.weight.data = model.classif.weight.data[1:]\n            new_classif.bias.data = model.classif.bias.data[1:]\n            model.classif = new_classif\n        \n        model.input_space = settings['input_space']\n        model.input_size = settings['input_size']\n        model.input_range = settings['input_range']\n        model.mean = settings['mean']\n        model.std = settings['std']\n    else:\n        model = InceptionV4(num_classes=num_classes)\n    return model\n\n\n'''\nTEST\nRun this code with:\n```\ncd $HOME/pretrained-models.pytorch\npython -m pretrainedmodels.inceptionv4\n```\n'''\nif __name__ == '__main__':\n\n    assert inceptionv4(num_classes=10, pretrained=None)\n    print('success')\n    assert inceptionv4(num_classes=1000, pretrained='imagenet')\n    print('success')\n    assert inceptionv4(num_classes=1001, pretrained='imagenet+background')\n    print('success')\n\n    # fail\n    assert inceptionv4(num_classes=1001, pretrained='imagenet')"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/nasnet.py",
    "content": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.utils.model_zoo as model_zoo\nfrom torch.autograd import Variable\n\npretrained_settings = {\n    'nasnetalarge': {\n        'imagenet': {\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/nasnetalarge-dc8c1432.pth',\n            'input_space': 'RGB',\n            'input_size': [3, 331, 331], # resize 354\n            'input_range': [0, 1],\n            'mean': [0.5, 0.5, 0.5],\n            'std': [0.5, 0.5, 0.5],\n            'num_classes': 1000\n        },\n        'imagenet+background': {\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/nasnetalarge-dc8c1432.pth',\n            'input_space': 'RGB',\n            'input_size': [3, 331, 331], # resize 354\n            'input_range': [0, 1],\n            'mean': [0.5, 0.5, 0.5],\n            'std': [0.5, 0.5, 0.5],\n            'num_classes': 1001\n        }\n    }\n}\n\nclass MaxPoolPad(nn.Module):\n\n    def __init__(self):\n        super(MaxPoolPad, self).__init__()\n        self.pad = nn.ZeroPad2d((1, 0, 1, 0))\n        self.pool = nn.MaxPool2d(3, stride=2, padding=1)\n\n    def forward(self, x):\n        x = self.pad(x)\n        x = self.pool(x)\n        x = x[:, :, 1:, 1:]\n        return x\n\n\nclass AvgPoolPad(nn.Module):\n\n    def __init__(self, stride=2, padding=1):\n        super(AvgPoolPad, self).__init__()\n        self.pad = nn.ZeroPad2d((1, 0, 1, 0))\n        self.pool = nn.AvgPool2d(3, stride=stride, padding=padding, count_include_pad=False)\n\n    def forward(self, x):\n        x = self.pad(x)\n        x = self.pool(x)\n        x = x[:, :, 1:, 1:]\n        return x\n\n\nclass SeparableConv2d(nn.Module):\n\n    def __init__(self, in_channels, out_channels, dw_kernel, dw_stride, dw_padding, bias=False):\n        super(SeparableConv2d, self).__init__()\n        self.depthwise_conv2d = nn.Conv2d(in_channels, in_channels, dw_kernel,\n                                          stride=dw_stride,\n                                          padding=dw_padding,\n                                          bias=bias,\n                                          groups=in_channels)\n        self.pointwise_conv2d = nn.Conv2d(in_channels, out_channels, 1, stride=1, bias=bias)\n\n    def forward(self, x):\n        x = self.depthwise_conv2d(x)\n        x = self.pointwise_conv2d(x)\n        return x\n\n\nclass BranchSeparables(nn.Module):\n\n    def __init__(self, in_channels, out_channels, kernel_size, stride, padding, bias=False):\n        super(BranchSeparables, self).__init__()\n        self.relu = nn.ReLU()\n        self.separable_1 = SeparableConv2d(in_channels, in_channels, kernel_size, stride, padding, bias=bias)\n        self.bn_sep_1 = nn.BatchNorm2d(in_channels, eps=0.001, momentum=0.1, affine=True)\n        self.relu1 = nn.ReLU()\n        self.separable_2 = SeparableConv2d(in_channels, out_channels, kernel_size, 1, padding, bias=bias)\n        self.bn_sep_2 = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.1, affine=True)\n\n    def forward(self, x):\n        x = self.relu(x)\n        x = self.separable_1(x)\n        x = self.bn_sep_1(x)\n        x = self.relu1(x)\n        x = self.separable_2(x)\n        x = self.bn_sep_2(x)\n        return x\n\n\nclass BranchSeparablesStem(nn.Module):\n\n    def __init__(self, in_channels, out_channels, kernel_size, stride, padding, bias=False):\n        super(BranchSeparablesStem, self).__init__()\n        self.relu = nn.ReLU()\n        self.separable_1 = SeparableConv2d(in_channels, out_channels, kernel_size, stride, padding, bias=bias)\n        self.bn_sep_1 = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.1, affine=True)\n        self.relu1 = nn.ReLU()\n        self.separable_2 = SeparableConv2d(out_channels, out_channels, kernel_size, 1, padding, bias=bias)\n        self.bn_sep_2 = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.1, affine=True)\n\n    def forward(self, x):\n        x = self.relu(x)\n        x = self.separable_1(x)\n        x = self.bn_sep_1(x)\n        x = self.relu1(x)\n        x = self.separable_2(x)\n        x = self.bn_sep_2(x)\n        return x\n\n\nclass BranchSeparablesReduction(BranchSeparables):\n\n    def __init__(self, in_channels, out_channels, kernel_size, stride, padding, z_padding=1, bias=False):\n        BranchSeparables.__init__(self, in_channels, out_channels, kernel_size, stride, padding, bias)\n        self.padding = nn.ZeroPad2d((z_padding, 0, z_padding, 0))\n\n    def forward(self, x):\n        x = self.relu(x)\n        x = self.padding(x)\n        x = self.separable_1(x)\n        x = x[:, :, 1:, 1:].contiguous()\n        x = self.bn_sep_1(x)\n        x = self.relu1(x)\n        x = self.separable_2(x)\n        x = self.bn_sep_2(x)\n        return x\n\n\nclass CellStem0(nn.Module):\n\n    def __init__(self):\n        super(CellStem0, self).__init__()\n        self.conv_1x1 = nn.Sequential()\n        self.conv_1x1.add_module('relu', nn.ReLU())\n        self.conv_1x1.add_module('conv', nn.Conv2d(96, 42, 1, stride=1, bias=False))\n        self.conv_1x1.add_module('bn', nn.BatchNorm2d(42, eps=0.001, momentum=0.1, affine=True))\n\n        self.comb_iter_0_left = BranchSeparables(42, 42, 5, 2, 2)\n        self.comb_iter_0_right = BranchSeparablesStem(96, 42, 7, 2, 3, bias=False)\n\n        self.comb_iter_1_left = nn.MaxPool2d(3, stride=2, padding=1)\n        self.comb_iter_1_right = BranchSeparablesStem(96, 42, 7, 2, 3, bias=False)\n\n        self.comb_iter_2_left = nn.AvgPool2d(3, stride=2, padding=1, count_include_pad=False)\n        self.comb_iter_2_right = BranchSeparablesStem(96, 42, 5, 2, 2, bias=False)\n\n        self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n\n        self.comb_iter_4_left = BranchSeparables(42, 42, 3, 1, 1, bias=False)\n        self.comb_iter_4_right = nn.MaxPool2d(3, stride=2, padding=1)\n\n    def forward(self, x):\n        x1 = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x1)\n        x_comb_iter_0_right = self.comb_iter_0_right(x)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x1)\n        x_comb_iter_1_right = self.comb_iter_1_right(x)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x1)\n        x_comb_iter_2_right = self.comb_iter_2_right(x)\n        x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right\n\n        x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0)\n        x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0)\n        x_comb_iter_4_right = self.comb_iter_4_right(x1)\n        x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right\n\n        x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass CellStem1(nn.Module):\n\n    def __init__(self):\n        super(CellStem1, self).__init__()\n        self.conv_1x1 = nn.Sequential()\n        self.conv_1x1.add_module('relu', nn.ReLU())\n        self.conv_1x1.add_module('conv', nn.Conv2d(168, 84, 1, stride=1, bias=False))\n        self.conv_1x1.add_module('bn', nn.BatchNorm2d(84, eps=0.001, momentum=0.1, affine=True))\n\n        self.relu = nn.ReLU()\n        self.path_1 = nn.Sequential()\n        self.path_1.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False))\n        self.path_1.add_module('conv', nn.Conv2d(96, 42, 1, stride=1, bias=False))\n        self.path_2 = nn.ModuleList()\n        self.path_2.add_module('pad', nn.ZeroPad2d((0, 1, 0, 1)))\n        self.path_2.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False))\n        self.path_2.add_module('conv', nn.Conv2d(96, 42, 1, stride=1, bias=False))\n\n        self.final_path_bn = nn.BatchNorm2d(84, eps=0.001, momentum=0.1, affine=True)\n\n        self.comb_iter_0_left = BranchSeparables(84, 84, 5, 2, 2, bias=False)\n        self.comb_iter_0_right = BranchSeparables(84, 84, 7, 2, 3, bias=False)\n\n        self.comb_iter_1_left = nn.MaxPool2d(3, stride=2, padding=1)\n        self.comb_iter_1_right = BranchSeparables(84, 84, 7, 2, 3, bias=False)\n\n        self.comb_iter_2_left = nn.AvgPool2d(3, stride=2, padding=1, count_include_pad=False)\n        self.comb_iter_2_right = BranchSeparables(84, 84, 5, 2, 2, bias=False)\n\n        self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n\n        self.comb_iter_4_left = BranchSeparables(84, 84, 3, 1, 1, bias=False)\n        self.comb_iter_4_right = nn.MaxPool2d(3, stride=2, padding=1)\n\n    def forward(self, x_conv0, x_stem_0):\n        x_left = self.conv_1x1(x_stem_0)\n\n        x_relu = self.relu(x_conv0)\n        # path 1\n        x_path1 = self.path_1(x_relu)\n        # path 2\n        x_path2 = self.path_2.pad(x_relu)\n        x_path2 = x_path2[:, :, 1:, 1:]\n        x_path2 = self.path_2.avgpool(x_path2)\n        x_path2 = self.path_2.conv(x_path2)\n        # final path\n        x_right = self.final_path_bn(torch.cat([x_path1, x_path2], 1))\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_left)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_right)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_left)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_right)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_left)\n        x_comb_iter_2_right = self.comb_iter_2_right(x_right)\n        x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right\n\n        x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0)\n        x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0)\n        x_comb_iter_4_right = self.comb_iter_4_right(x_left)\n        x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right\n\n        x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass FirstCell(nn.Module):\n\n    def __init__(self, in_channels_left, out_channels_left, in_channels_right, out_channels_right):\n        super(FirstCell, self).__init__()\n        self.conv_1x1 = nn.Sequential()\n        self.conv_1x1.add_module('relu', nn.ReLU())\n        self.conv_1x1.add_module('conv', nn.Conv2d(in_channels_right, out_channels_right, 1, stride=1, bias=False))\n        self.conv_1x1.add_module('bn', nn.BatchNorm2d(out_channels_right, eps=0.001, momentum=0.1, affine=True))\n\n        self.relu = nn.ReLU()\n        self.path_1 = nn.Sequential()\n        self.path_1.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False))\n        self.path_1.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False))\n        self.path_2 = nn.ModuleList()\n        self.path_2.add_module('pad', nn.ZeroPad2d((0, 1, 0, 1)))\n        self.path_2.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False))\n        self.path_2.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False))\n\n        self.final_path_bn = nn.BatchNorm2d(out_channels_left * 2, eps=0.001, momentum=0.1, affine=True)\n\n        self.comb_iter_0_left = BranchSeparables(out_channels_right, out_channels_right, 5, 1, 2, bias=False)\n        self.comb_iter_0_right = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False)\n\n        self.comb_iter_1_left = BranchSeparables(out_channels_right, out_channels_right, 5, 1, 2, bias=False)\n        self.comb_iter_1_right = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False)\n\n        self.comb_iter_2_left = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n\n        self.comb_iter_3_left = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n        self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n\n        self.comb_iter_4_left = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False)\n\n    def forward(self, x, x_prev):\n        x_relu = self.relu(x_prev)\n        # path 1\n        x_path1 = self.path_1(x_relu)\n        # path 2\n        x_path2 = self.path_2.pad(x_relu)\n        x_path2 = x_path2[:, :, 1:, 1:]\n        x_path2 = self.path_2.avgpool(x_path2)\n        x_path2 = self.path_2.conv(x_path2)\n        # final path\n        x_left = self.final_path_bn(torch.cat([x_path1, x_path2], 1))\n\n        x_right = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_right)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_left)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_left)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_left)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_right)\n        x_comb_iter_2 = x_comb_iter_2_left + x_left\n\n        x_comb_iter_3_left = self.comb_iter_3_left(x_left)\n        x_comb_iter_3_right = self.comb_iter_3_right(x_left)\n        x_comb_iter_3 = x_comb_iter_3_left + x_comb_iter_3_right\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_right)\n        x_comb_iter_4 = x_comb_iter_4_left + x_right\n\n        x_out = torch.cat([x_left, x_comb_iter_0, x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass NormalCell(nn.Module):\n\n    def __init__(self, in_channels_left, out_channels_left, in_channels_right, out_channels_right):\n        super(NormalCell, self).__init__()\n        self.conv_prev_1x1 = nn.Sequential()\n        self.conv_prev_1x1.add_module('relu', nn.ReLU())\n        self.conv_prev_1x1.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False))\n        self.conv_prev_1x1.add_module('bn', nn.BatchNorm2d(out_channels_left, eps=0.001, momentum=0.1, affine=True))\n\n        self.conv_1x1 = nn.Sequential()\n        self.conv_1x1.add_module('relu', nn.ReLU())\n        self.conv_1x1.add_module('conv', nn.Conv2d(in_channels_right, out_channels_right, 1, stride=1, bias=False))\n        self.conv_1x1.add_module('bn', nn.BatchNorm2d(out_channels_right, eps=0.001, momentum=0.1, affine=True))\n\n        self.comb_iter_0_left = BranchSeparables(out_channels_right, out_channels_right, 5, 1, 2, bias=False)\n        self.comb_iter_0_right = BranchSeparables(out_channels_left, out_channels_left, 3, 1, 1, bias=False)\n\n        self.comb_iter_1_left = BranchSeparables(out_channels_left, out_channels_left, 5, 1, 2, bias=False)\n        self.comb_iter_1_right = BranchSeparables(out_channels_left, out_channels_left, 3, 1, 1, bias=False)\n\n        self.comb_iter_2_left = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n\n        self.comb_iter_3_left = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n        self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n\n        self.comb_iter_4_left = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False)\n\n    def forward(self, x, x_prev):\n        x_left = self.conv_prev_1x1(x_prev)\n        x_right = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_right)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_left)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_left)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_left)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_right)\n        x_comb_iter_2 = x_comb_iter_2_left + x_left\n\n        x_comb_iter_3_left = self.comb_iter_3_left(x_left)\n        x_comb_iter_3_right = self.comb_iter_3_right(x_left)\n        x_comb_iter_3 = x_comb_iter_3_left + x_comb_iter_3_right\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_right)\n        x_comb_iter_4 = x_comb_iter_4_left + x_right\n\n        x_out = torch.cat([x_left, x_comb_iter_0, x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass ReductionCell0(nn.Module):\n\n    def __init__(self, in_channels_left, out_channels_left, in_channels_right, out_channels_right):\n        super(ReductionCell0, self).__init__() \n        self.conv_prev_1x1 = nn.Sequential()\n        self.conv_prev_1x1.add_module('relu', nn.ReLU())\n        self.conv_prev_1x1.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False))\n        self.conv_prev_1x1.add_module('bn', nn.BatchNorm2d(out_channels_left, eps=0.001, momentum=0.1, affine=True))\n\n        self.conv_1x1 = nn.Sequential()\n        self.conv_1x1.add_module('relu', nn.ReLU())\n        self.conv_1x1.add_module('conv', nn.Conv2d(in_channels_right, out_channels_right, 1, stride=1, bias=False))\n        self.conv_1x1.add_module('bn', nn.BatchNorm2d(out_channels_right, eps=0.001, momentum=0.1, affine=True))\n\n        self.comb_iter_0_left = BranchSeparablesReduction(out_channels_right, out_channels_right, 5, 2, 2, bias=False)\n        self.comb_iter_0_right = BranchSeparablesReduction(out_channels_right, out_channels_right, 7, 2, 3, bias=False)\n\n        self.comb_iter_1_left = MaxPoolPad()\n        self.comb_iter_1_right = BranchSeparablesReduction(out_channels_right, out_channels_right, 7, 2, 3, bias=False)\n\n        self.comb_iter_2_left = AvgPoolPad()\n        self.comb_iter_2_right = BranchSeparablesReduction(out_channels_right, out_channels_right, 5, 2, 2, bias=False)\n\n        self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n\n        self.comb_iter_4_left = BranchSeparablesReduction(out_channels_right, out_channels_right, 3, 1, 1, bias=False)\n        self.comb_iter_4_right = MaxPoolPad()\n\n    def forward(self, x, x_prev):\n        x_left = self.conv_prev_1x1(x_prev)\n        x_right = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_right)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_left)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_right)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_left)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_right)\n        x_comb_iter_2_right = self.comb_iter_2_right(x_left)\n        x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right\n\n        x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0)\n        x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0)\n        x_comb_iter_4_right = self.comb_iter_4_right(x_right)\n        x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right\n\n        x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass ReductionCell1(nn.Module):\n\n    def __init__(self, in_channels_left, out_channels_left, in_channels_right, out_channels_right):\n        super(ReductionCell1, self).__init__()\n        self.conv_prev_1x1 = nn.Sequential()\n        self.conv_prev_1x1.add_module('relu', nn.ReLU())\n        self.conv_prev_1x1.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False))\n        self.conv_prev_1x1.add_module('bn', nn.BatchNorm2d(out_channels_left, eps=0.001, momentum=0.1, affine=True))\n\n        self.conv_1x1 = nn.Sequential()\n        self.conv_1x1.add_module('relu', nn.ReLU())\n        self.conv_1x1.add_module('conv', nn.Conv2d(in_channels_right, out_channels_right, 1, stride=1, bias=False))\n        self.conv_1x1.add_module('bn', nn.BatchNorm2d(out_channels_right, eps=0.001, momentum=0.1, affine=True))\n\n        self.comb_iter_0_left = BranchSeparables(out_channels_right, out_channels_right, 5, 2, 2, bias=False)\n        self.comb_iter_0_right = BranchSeparables(out_channels_right, out_channels_right, 7, 2, 3, bias=False)\n\n        self.comb_iter_1_left = nn.MaxPool2d(3, stride=2, padding=1)\n        self.comb_iter_1_right = BranchSeparables(out_channels_right, out_channels_right, 7, 2, 3, bias=False)\n\n        self.comb_iter_2_left = nn.AvgPool2d(3, stride=2, padding=1, count_include_pad=False)\n        self.comb_iter_2_right = BranchSeparables(out_channels_right, out_channels_right, 5, 2, 2, bias=False)\n\n        self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False)\n\n        self.comb_iter_4_left = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False)\n        self.comb_iter_4_right = nn.MaxPool2d(3, stride=2, padding=1)\n\n    def forward(self, x, x_prev):\n        x_left = self.conv_prev_1x1(x_prev)\n        x_right = self.conv_1x1(x)\n\n        x_comb_iter_0_left = self.comb_iter_0_left(x_right)\n        x_comb_iter_0_right = self.comb_iter_0_right(x_left)\n        x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right\n\n        x_comb_iter_1_left = self.comb_iter_1_left(x_right)\n        x_comb_iter_1_right = self.comb_iter_1_right(x_left)\n        x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right\n\n        x_comb_iter_2_left = self.comb_iter_2_left(x_right)\n        x_comb_iter_2_right = self.comb_iter_2_right(x_left)\n        x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right\n\n        x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0)\n        x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1\n\n        x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0)\n        x_comb_iter_4_right = self.comb_iter_4_right(x_right)\n        x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right\n\n        x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1)\n        return x_out\n\n\nclass NASNetALarge(nn.Module):\n\n    def __init__(self, num_classes=1001):\n        super(NASNetALarge, self).__init__()\n        self.num_classes = num_classes\n\n        self.conv0 = nn.Sequential()\n        self.conv0.add_module('conv', nn.Conv2d(in_channels=3, out_channels=96, kernel_size=3, padding=0, stride=2,\n                                                bias=False))\n        self.conv0.add_module('bn', nn.BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True))\n\n        self.cell_stem_0 = CellStem0()\n        self.cell_stem_1 = CellStem1()\n\n        self.cell_0 = FirstCell(in_channels_left=168, out_channels_left=84,\n                                in_channels_right=336, out_channels_right=168)\n        self.cell_1 = NormalCell(in_channels_left=336, out_channels_left=168,\n                                 in_channels_right=1008, out_channels_right=168)\n        self.cell_2 = NormalCell(in_channels_left=1008, out_channels_left=168,\n                                 in_channels_right=1008, out_channels_right=168)\n        self.cell_3 = NormalCell(in_channels_left=1008, out_channels_left=168,\n                                 in_channels_right=1008, out_channels_right=168)\n        self.cell_4 = NormalCell(in_channels_left=1008, out_channels_left=168,\n                                 in_channels_right=1008, out_channels_right=168)\n        self.cell_5 = NormalCell(in_channels_left=1008, out_channels_left=168,\n                                 in_channels_right=1008, out_channels_right=168)\n\n        self.reduction_cell_0 = ReductionCell0(in_channels_left=1008, out_channels_left=336,\n                                               in_channels_right=1008, out_channels_right=336)\n\n        self.cell_6 = FirstCell(in_channels_left=1008, out_channels_left=168,\n                                in_channels_right=1344, out_channels_right=336)\n        self.cell_7 = NormalCell(in_channels_left=1344, out_channels_left=336,\n                                 in_channels_right=2016, out_channels_right=336)\n        self.cell_8 = NormalCell(in_channels_left=2016, out_channels_left=336,\n                                 in_channels_right=2016, out_channels_right=336)\n        self.cell_9 = NormalCell(in_channels_left=2016, out_channels_left=336,\n                                 in_channels_right=2016, out_channels_right=336)\n        self.cell_10 = NormalCell(in_channels_left=2016, out_channels_left=336,\n                                  in_channels_right=2016, out_channels_right=336)\n        self.cell_11 = NormalCell(in_channels_left=2016, out_channels_left=336,\n                                  in_channels_right=2016, out_channels_right=336)\n\n        self.reduction_cell_1 = ReductionCell1(in_channels_left=2016, out_channels_left=672,\n                                               in_channels_right=2016, out_channels_right=672)\n\n        self.cell_12 = FirstCell(in_channels_left=2016, out_channels_left=336,\n                                 in_channels_right=2688, out_channels_right=672)\n        self.cell_13 = NormalCell(in_channels_left=2688, out_channels_left=672,\n                                  in_channels_right=4032, out_channels_right=672)\n        self.cell_14 = NormalCell(in_channels_left=4032, out_channels_left=672,\n                                  in_channels_right=4032, out_channels_right=672)\n        self.cell_15 = NormalCell(in_channels_left=4032, out_channels_left=672,\n                                  in_channels_right=4032, out_channels_right=672)\n        self.cell_16 = NormalCell(in_channels_left=4032, out_channels_left=672,\n                                  in_channels_right=4032, out_channels_right=672)\n        self.cell_17 = NormalCell(in_channels_left=4032, out_channels_left=672,\n                                  in_channels_right=4032, out_channels_right=672)\n\n        self.relu = nn.ReLU()\n        self.avgpool = nn.AvgPool2d(11, stride=1, padding=0)\n        self.dropout = nn.Dropout()\n        self.linear = nn.Linear(4032, self.num_classes)\n\n    def features(self, x):\n        x_conv0 = self.conv0(x)\n        x_stem_0 = self.cell_stem_0(x_conv0)\n        x_stem_1 = self.cell_stem_1(x_conv0, x_stem_0)\n\n        x_cell_0 = self.cell_0(x_stem_1, x_stem_0)\n        x_cell_1 = self.cell_1(x_cell_0, x_stem_1)\n        x_cell_2 = self.cell_2(x_cell_1, x_cell_0)\n        x_cell_3 = self.cell_3(x_cell_2, x_cell_1)\n        x_cell_4 = self.cell_4(x_cell_3, x_cell_2)\n        x_cell_5 = self.cell_5(x_cell_4, x_cell_3)\n\n        x_reduction_cell_0 = self.reduction_cell_0(x_cell_5, x_cell_4)\n\n        x_cell_6 = self.cell_6(x_reduction_cell_0, x_cell_4)\n        x_cell_7 = self.cell_7(x_cell_6, x_reduction_cell_0)\n        x_cell_8 = self.cell_8(x_cell_7, x_cell_6)\n        x_cell_9 = self.cell_9(x_cell_8, x_cell_7)\n        x_cell_10 = self.cell_10(x_cell_9, x_cell_8)\n        x_cell_11 = self.cell_11(x_cell_10, x_cell_9)\n\n        x_reduction_cell_1 = self.reduction_cell_1(x_cell_11, x_cell_10)\n\n        x_cell_12 = self.cell_12(x_reduction_cell_1, x_cell_10)\n        x_cell_13 = self.cell_13(x_cell_12, x_reduction_cell_1)\n        x_cell_14 = self.cell_14(x_cell_13, x_cell_12)\n        x_cell_15 = self.cell_15(x_cell_14, x_cell_13)\n        x_cell_16 = self.cell_16(x_cell_15, x_cell_14)\n        x_cell_17 = self.cell_17(x_cell_16, x_cell_15)\n\n        return x_cell_17\n\n    def classifier(self, x):\n        x = self.relu(x)\n        x = self.avgpool(x)\n        x = x.view(x.size(0), -1)\n        x = self.dropout(x)\n        x = self.linear(x)\n        return x\n\n    def forward(self, x):\n        x = self.features(x)\n        x = self.classifier(x)\n        return x\n\n\ndef nasnetalarge(num_classes=1001, pretrained='imagenet'):\n    r\"\"\"NASNetALarge model architecture from the\n    `\"NASNet\" <https://arxiv.org/abs/1707.07012>`_ paper.\n    \"\"\"\n    if pretrained:\n        settings = pretrained_settings['nasnetalarge'][pretrained]\n        assert num_classes == settings['num_classes'], \\\n            \"num_classes should be {}, but is {}\".format(settings['num_classes'], num_classes)\n\n        # both 'imagenet'&'imagenet+background' are loaded from same parameters\n        model = NASNetALarge(num_classes=1001)\n        model.load_state_dict(model_zoo.load_url(settings['url']))\n\n        if pretrained == 'imagenet':\n            new_linear = nn.Linear(model.linear.in_features, 1000)\n            new_linear.weight.data = model.linear.weight.data[1:]\n            new_linear.bias.data = model.linear.bias.data[1:]\n            model.linear = new_linear\n\n        model.input_space = settings['input_space']\n        model.input_size = settings['input_size']\n        model.input_range = settings['input_range']\n\n        model.mean = settings['mean']\n        model.std = settings['std']\n    else:\n        model = NASNetALarge(num_classes=num_classes)\n    return model\n\n\n\nif __name__ == \"__main__\":\n\n    model = NasNetALarge()\n\n    input = Variable(torch.randn(2,3,331,331))\n    output = model(input)\n    print(output.size())\n\n\n"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/resnext.py",
    "content": "import os\nfrom os.path import expanduser\nimport collections\nimport torch\nimport torch.nn as nn\nfrom torch.autograd import Variable\nfrom .resnext_features import resnext101_32x4d_features\nfrom .resnext_features import resnext101_64x4d_features\n\n__all__ = ['ResNeXt101_32x4d', 'resnext101_32x4d',\n           'ResNeXt101_64x4d', 'resnext101_64x4d']\n\npretrained_settings = {\n    'resnext101_32x4d': {\n        'imagenet': {\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/resnext101_32x4d.pth',\n            'input_space': 'RGB',\n            'input_size': [3, 224, 224],\n            'input_range': [0, 1],\n            'mean': [0.485, 0.456, 0.406],\n            'std': [0.229, 0.224, 0.225],\n            'num_classes': 1000\n        }\n    },\n    'resnext101_64x4d': {\n        'imagenet': {\n            'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/resnext101_64x4d.pth',\n            'input_space': 'RGB',\n            'input_size': [3, 224, 224],\n            'input_range': [0, 1],\n            'mean': [0.485, 0.456, 0.406],\n            'std': [0.229, 0.224, 0.225],\n            'num_classes': 1000\n        }\n    }\n}\n\nclass ResNeXt101_32x4d(nn.Module):\n\n    def __init__(self, nb_classes=1000):\n        super(ResNeXt101_32x4d, self).__init__()\n        self.features = resnext101_32x4d_features\n        self.avgpool = nn.AvgPool2d((7, 7), (1, 1))\n        self.fc = nn.Linear(2048, nb_classes)\n\n    def forward(self, input):\n        x = self.features(input)\n        x = self.avgpool(x)\n        x = x.view(x.size(0), -1)\n        x = self.fc(x)\n        return x\n\n\nclass ResNeXt101_64x4d(nn.Module):\n\n    def __init__(self, nb_classes=1000):\n        super(ResNeXt101_64x4d, self).__init__()\n        self.features = resnext101_64x4d_features\n        self.avgpool = nn.AvgPool2d((7, 7), (1, 1))\n        self.fc = nn.Linear(2048, nb_classes)\n\n    def forward(self, input):\n        x = self.features(input)\n        x = self.avgpool(x)\n        x = x.view(x.size(0), -1)\n        x = self.fc(x)\n        return x\n\n\ndef resnext101_32x4d(num_classes=1000, pretrained='imagenet'):\n    model = ResNeXt101_32x4d()\n    if pretrained:\n        settings = pretrained_settings['resnext101_32x4d'][pretrained]\n        assert num_classes == settings['num_classes'], \\\n            \"num_classes should be {}, but is {}\".format(settings['num_classes'], num_classes)\n\n        dir_models = os.path.join(expanduser(\"~\"), '.torch/resnext')\n        path_pth = os.path.join(dir_models, 'resnext101_32x4d.pth')\n        if not os.path.isfile(path_pth):\n            os.system('mkdir -p ' + dir_models)\n            os.system('wget {} -O {}'.format(settings['url'], path_pth))\n        state_dict_features = torch.load(path_pth)\n        state_dict_fc = collections.OrderedDict()\n        state_dict_fc['weight'] = state_dict_features['10.1.weight']\n        state_dict_fc['bias']   = state_dict_features['10.1.bias']\n        del state_dict_features['10.1.weight']\n        del state_dict_features['10.1.bias']\n        model.features.load_state_dict(state_dict_features)\n        model.fc.load_state_dict(state_dict_fc)\n\n        model.input_space = settings['input_space']\n        model.input_size = settings['input_size']\n        model.mean = settings['mean']\n        model.std = settings['std']\n\n    return model\n\ndef resnext101_64x4d(num_classes=1000, pretrained='imagenet'):\n    model = ResNeXt101_64x4d()\n    if pretrained:\n        settings = pretrained_settings['resnext101_64x4d'][pretrained]\n        assert num_classes == settings['num_classes'], \\\n            \"num_classes should be {}, but is {}\".format(settings['num_classes'], num_classes)\n\n        dir_models = os.path.join(expanduser(\"~\"), '.torch/resnext')\n        path_pth = os.path.join(dir_models, 'resnext101_64x4d.pth')\n        if not os.path.isfile(path_pth):\n            os.system('mkdir -p ' + dir_models)\n            os.system('wget {} -O {}'.format(settings['url'], path_pth))\n        state_dict_features = torch.load(path_pth)\n        state_dict_fc = collections.OrderedDict()\n        state_dict_fc['weight'] = state_dict_features['10.1.weight']\n        state_dict_fc['bias']   = state_dict_features['10.1.bias']\n        del state_dict_features['10.1.weight']\n        del state_dict_features['10.1.bias']\n        model.features.load_state_dict(state_dict_features)\n        model.fc.load_state_dict(state_dict_fc)\n\n        model.input_space = settings['input_space']\n        model.input_size = settings['input_size']\n        model.input_range = settings['input_range']\n        model.mean = settings['mean']\n        model.std = settings['std']\n\n    return model\n"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/resnext_features/__init__.py",
    "content": "from .resnext101_32x4d_features import resnext101_32x4d_features\nfrom .resnext101_64x4d_features import resnext101_64x4d_features"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/resnext_features/resnext101_32x4d_features.py",
    "content": "import torch\nimport torch.nn as nn\nfrom torch.autograd import Variable\nfrom functools import reduce\n\nclass LambdaBase(nn.Sequential):\n    def __init__(self, fn, *args):\n        super(LambdaBase, self).__init__(*args)\n        self.lambda_func = fn\n\n    def forward_prepare(self, input):\n        output = []\n        for module in self._modules.values():\n            output.append(module(input))\n        return output if output else input\n\nclass Lambda(LambdaBase):\n    def forward(self, input):\n        return self.lambda_func(self.forward_prepare(input))\n\nclass LambdaMap(LambdaBase):\n    def forward(self, input):\n        return list(map(self.lambda_func,self.forward_prepare(input)))\n\nclass LambdaReduce(LambdaBase):\n    def forward(self, input):\n        return reduce(self.lambda_func,self.forward_prepare(input))\n\nresnext101_32x4d_features = nn.Sequential( # Sequential,\n    nn.Conv2d(3,64,(7, 7),(2, 2),(3, 3),1,1,bias=False),\n    nn.BatchNorm2d(64),\n    nn.ReLU(),\n    nn.MaxPool2d((3, 3),(2, 2),(1, 1)),\n    nn.Sequential( # Sequential,\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(64,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(128),\n                        nn.ReLU(),\n                        nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(128),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(256),\n                ),\n                nn.Sequential( # Sequential,\n                    nn.Conv2d(64,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(256),\n                ),\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(128),\n                        nn.ReLU(),\n                        nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(128),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(256),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(128),\n                        nn.ReLU(),\n                        nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(128),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(256),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n    ),\n    nn.Sequential( # Sequential,\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(256,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                        nn.Conv2d(256,256,(3, 3),(2, 2),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(512),\n                ),\n                nn.Sequential( # Sequential,\n                    nn.Conv2d(256,512,(1, 1),(2, 2),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(512),\n                ),\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                        nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(512),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                        nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(512),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                        nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(512),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n    ),\n    nn.Sequential( # Sequential,\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(512,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(2, 2),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                nn.Sequential( # Sequential,\n                    nn.Conv2d(512,1024,(1, 1),(2, 2),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n    ),\n    nn.Sequential( # Sequential,\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(1024,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024,1024,(3, 3),(2, 2),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(2048),\n                ),\n                nn.Sequential( # Sequential,\n                    nn.Conv2d(1024,2048,(1, 1),(2, 2),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(2048),\n                ),\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(2048),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential( # Sequential,\n            LambdaMap(lambda x: x, # ConcatTable,\n                nn.Sequential( # Sequential,\n                    nn.Sequential( # Sequential,\n                        nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False),\n                    nn.BatchNorm2d(2048),\n                ),\n                Lambda(lambda x: x), # Identity,\n            ),\n            LambdaReduce(lambda x,y: x+y), # CAddTable,\n            nn.ReLU(),\n        ),\n    )\n)"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/resnext_features/resnext101_64x4d_features.py",
    "content": "import torch\nimport torch.nn as nn\nfrom torch.autograd import Variable\nfrom functools import reduce\n\nclass LambdaBase(nn.Sequential):\n    def __init__(self, fn, *args):\n        super(LambdaBase, self).__init__(*args)\n        self.lambda_func = fn\n\n    def forward_prepare(self, input):\n        output = []\n        for module in self._modules.values():\n            output.append(module(input))\n        return output if output else input\n\nclass Lambda(LambdaBase):\n    def forward(self, input):\n        return self.lambda_func(self.forward_prepare(input))\n\nclass LambdaMap(LambdaBase):\n    def forward(self, input):\n        return list(map(self.lambda_func,self.forward_prepare(input)))\n\nclass LambdaReduce(LambdaBase):\n    def forward(self, input):\n        return reduce(self.lambda_func,self.forward_prepare(input))\n\nresnext101_64x4d_features = nn.Sequential(#Sequential,\n    nn.Conv2d(3, 64, (7, 7), (2, 2), (3, 3), 1, 1, bias = False),\n    nn.BatchNorm2d(64),\n    nn.ReLU(),\n    nn.MaxPool2d((3, 3), (2, 2), (1, 1)),\n    nn.Sequential(#Sequential,\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(64, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                        nn.Conv2d(256, 256, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(256),\n                ),\n                nn.Sequential(#Sequential,\n                    nn.Conv2d(64, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(256),\n                ),\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                        nn.Conv2d(256, 256, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(256),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                        nn.Conv2d(256, 256, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(256),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(256),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n    ),\n    nn.Sequential(#Sequential,\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(256, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512, 512, (3, 3), (2, 2), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(512),\n                ),\n                nn.Sequential(#Sequential,\n                    nn.Conv2d(256, 512, (1, 1), (2, 2), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(512),\n                ),\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512, 512, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(512),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512, 512, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(512),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                        nn.Conv2d(512, 512, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(512),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(512),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n    ),\n    nn.Sequential(#Sequential,\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(512, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (2, 2), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                nn.Sequential(#Sequential,\n                    nn.Conv2d(512, 1024, (1, 1), (2, 2), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                        nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(1024),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(1024),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n    ),\n    nn.Sequential(#Sequential,\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(1024, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(2048),\n                        nn.ReLU(),\n                        nn.Conv2d(2048, 2048, (3, 3), (2, 2), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(2048),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(2048),\n                ),\n                nn.Sequential(#Sequential,\n                    nn.Conv2d(1024, 2048, (1, 1), (2, 2), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(2048),\n                ),\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(2048),\n                        nn.ReLU(),\n                        nn.Conv2d(2048, 2048, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(2048),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(2048),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n        nn.Sequential(#Sequential,\n            LambdaMap(lambda x: x, #ConcatTable,\n                nn.Sequential(#Sequential,\n                    nn.Sequential(#Sequential,\n                        nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                        nn.BatchNorm2d(2048),\n                        nn.ReLU(),\n                        nn.Conv2d(2048, 2048, (3, 3), (1, 1), (1, 1), 1, 64, bias = False),\n                        nn.BatchNorm2d(2048),\n                        nn.ReLU(),\n                    ),\n                    nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False),\n                    nn.BatchNorm2d(2048),\n                ),\n                Lambda(lambda x: x), #Identity,\n            ),\n            LambdaReduce(lambda x, y: x + y), #CAddTable,\n            nn.ReLU(),\n        ),\n    )\n)"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/torchvision.py",
    "content": "import torchvision.models as models\nimport torch.utils.model_zoo as model_zoo\n\n__all__ = [\n    'alexnet',\n    'densenet121', 'densenet169', 'densenet201', 'densenet161',\n    'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152',\n    'inceptionv3',\n    'squeezenet1_0', 'squeezenet1_1',\n    'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn',\n    'vgg19_bn', 'vgg19'\n]\n\nmodel_urls = {\n    'alexnet': 'https://download.pytorch.org/models/alexnet-owt-4df8aa71.pth',\n    'densenet121': 'https://download.pytorch.org/models/densenet121-241335ed.pth',\n    'densenet169': 'https://download.pytorch.org/models/densenet169-6f0f7f60.pth',\n    'densenet201': 'https://download.pytorch.org/models/densenet201-4c113574.pth',\n    'densenet161': 'https://download.pytorch.org/models/densenet161-17b70270.pth',   \n    'inceptionv3': 'https://download.pytorch.org/models/inception_v3_google-1a9a5a14.pth',\n    'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',\n    'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',\n    'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',\n    'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',\n    'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',\n    'squeezenet1_0': 'https://download.pytorch.org/models/squeezenet1_0-a815701f.pth',\n    'squeezenet1_1': 'https://download.pytorch.org/models/squeezenet1_1-f364aa15.pth',\n    'vgg11': 'https://download.pytorch.org/models/vgg11-bbd30ac9.pth',\n    'vgg13': 'https://download.pytorch.org/models/vgg13-c768596a.pth',\n    'vgg16': 'https://download.pytorch.org/models/vgg16-397923af.pth',\n    'vgg19': 'https://download.pytorch.org/models/vgg19-dcbb9e9d.pth',\n    'vgg11_bn': 'https://download.pytorch.org/models/vgg11_bn-6002323d.pth',\n    'vgg13_bn': 'https://download.pytorch.org/models/vgg13_bn-abd245e5.pth',\n    'vgg16_bn': 'https://download.pytorch.org/models/vgg16_bn-6c64b313.pth',\n    'vgg19_bn': 'https://download.pytorch.org/models/vgg19_bn-c79401a0.pth',\n    # 'vgg16_caffe': 'https://s3-us-west-2.amazonaws.com/jcjohns-models/vgg16-00b39a1b.pth',\n    # 'vgg19_caffe': 'https://s3-us-west-2.amazonaws.com/jcjohns-models/vgg19-d01eb7cb.pth'\n}\n\ninput_sizes = {}\nmeans = {}\nstds = {}\n\nfor model_name in __all__:\n    input_sizes[model_name] = [3, 224, 224]\n    means[model_name] = [0.485, 0.456, 0.406]\n    stds[model_name] = [0.229, 0.224, 0.225]\n\nfor model_name in ['inceptionv3']:\n    input_sizes[model_name] = [3, 299, 299]\n    means[model_name] = [0.5, 0.5, 0.5]\n    stds[model_name] = [0.5, 0.5, 0.5]\n\npretrained_settings = {}\n\nfor model_name in __all__:\n    pretrained_settings[model_name] = {\n        'imagenet': {\n            'url': model_urls[model_name],\n            'input_space': 'RGB',\n            'input_size': input_sizes[model_name],\n            'input_range': [0, 1],\n            'mean': means[model_name],\n            'std': stds[model_name],\n            'num_classes': 1000\n        }\n    }\n\n# for model_name in ['vgg16', 'vgg19']:\n#     pretrained_settings[model_name]['imagenet_caffe'] = {\n#         'url': model_urls[model_name + '_caffe'],\n#         'input_space': 'BGR',\n#         'input_size': input_sizes[model_name],\n#         'input_range': [0, 255],\n#         'mean': [103.939, 116.779, 123.68],\n#         'std': [1., 1., 1.],\n#         'num_classes': 1000\n#     }\n\ndef load_pretrained(model, num_classes, settings):\n    assert num_classes == settings['num_classes'], \\\n        \"num_classes should be {}, but is {}\".format(settings['num_classes'], num_classes)\n    model.load_state_dict(model_zoo.load_url(settings['url']))\n    model.input_space = settings['input_space']\n    model.input_size = settings['input_size']\n    model.input_range = settings['input_range']\n    model.mean = settings['mean']\n    model.std = settings['std']\n    return model\n\n\ndef alexnet(num_classes=1000, pretrained='imagenet'):\n    r\"\"\"AlexNet model architecture from the\n    `\"One weird trick...\" <https://arxiv.org/abs/1404.5997>`_ paper.\n    \"\"\"\n    model = models.alexnet(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['alexnet'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef densenet121(num_classes=1000, pretrained='imagenet'):\n    r\"\"\"Densenet-121 model from\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model = models.densenet121(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['densenet121'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef densenet169(num_classes=1000, pretrained='imagenet'):\n    r\"\"\"Densenet-169 model from\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model = models.densenet169(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['densenet169'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef densenet201(num_classes=1000, pretrained='imagenet'):\n    r\"\"\"Densenet-201 model from\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model = models.densenet201(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['densenet201'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef densenet161(num_classes=1000, pretrained='imagenet'):\n    r\"\"\"Densenet-161 model from\n    `\"Densely Connected Convolutional Networks\" <https://arxiv.org/pdf/1608.06993.pdf>`\n    \"\"\"\n    model = models.densenet161(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['densenet161'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef inceptionv3(num_classes=1000, pretrained='imagenet'):\n    r\"\"\"Inception v3 model architecture from\n    `\"Rethinking the Inception Architecture for Computer Vision\" <http://arxiv.org/abs/1512.00567>`_.\n    \"\"\"\n    model = models.inception_v3(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['inceptionv3'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef resnet18(num_classes=1000, pretrained='imagenet'):\n    \"\"\"Constructs a ResNet-18 model.\n    \"\"\"\n    model = models.resnet18(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['resnet18'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\ndef resnet34(num_classes=1000, pretrained='imagenet'):\n    \"\"\"Constructs a ResNet-34 model.\n    \"\"\"\n    model = models.resnet34(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['resnet34'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\ndef resnet50(num_classes=1000, pretrained='imagenet'):\n    \"\"\"Constructs a ResNet-50 model.\n    \"\"\"\n    model = models.resnet50(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['resnet50'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\ndef resnet101(num_classes=1000, pretrained='imagenet'):\n    \"\"\"Constructs a ResNet-101 model.\n    \"\"\"\n    model = models.resnet101(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['resnet101'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\ndef resnet152(num_classes=1000, pretrained='imagenet'):\n    \"\"\"Constructs a ResNet-152 model.\n    \"\"\"\n    model = models.resnet152(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['resnet152'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef squeezenet1_0(num_classes=1000, pretrained='imagenet'):\n    r\"\"\"SqueezeNet model architecture from the `\"SqueezeNet: AlexNet-level\n    accuracy with 50x fewer parameters and <0.5MB model size\"\n    <https://arxiv.org/abs/1602.07360>`_ paper.\n    \"\"\"\n    model = models.squeezenet1_0(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['squeezenet1_0'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\ndef squeezenet1_1(num_classes=1000, pretrained='imagenet'):\n    r\"\"\"SqueezeNet 1.1 model from the `official SqueezeNet repo\n    <https://github.com/DeepScale/SqueezeNet/tree/master/SqueezeNet_v1.1>`_.\n    SqueezeNet 1.1 has 2.4x less computation and slightly fewer parameters\n    than SqueezeNet 1.0, without sacrificing accuracy.\n    \"\"\"\n    model = models.squeezenet1_1(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['squeezenet1_1'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef vgg11(num_classes=1000, pretrained='imagenet'):\n    \"\"\"VGG 11-layer model (configuration \"A\")\n    \"\"\"\n    model = models.vgg11(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['vgg11'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef vgg11_bn(num_classes=1000, pretrained='imagenet'):\n    \"\"\"VGG 11-layer model (configuration \"A\") with batch normalization\n    \"\"\"\n    model = models.vgg11_bn(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['vgg11_bn'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef vgg13(num_classes=1000, pretrained='imagenet'):\n    \"\"\"VGG 13-layer model (configuration \"B\")\n    \"\"\"\n    model = models.vgg13(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['vgg13'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef vgg13_bn(num_classes=1000, pretrained='imagenet'):\n    \"\"\"VGG 13-layer model (configuration \"B\") with batch normalization\n    \"\"\"\n    model = models.vgg13_bn(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['vgg13_bn'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef vgg16(num_classes=1000, pretrained='imagenet'):\n    \"\"\"VGG 16-layer model (configuration \"D\")\n    \"\"\"\n    model = models.vgg16(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['vgg16'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef vgg16_bn(num_classes=1000, pretrained='imagenet'):\n    \"\"\"VGG 16-layer model (configuration \"D\") with batch normalization\n    \"\"\"\n    model = models.vgg16_bn(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['vgg16_bn'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef vgg19(num_classes=1000, pretrained='imagenet'):\n    \"\"\"VGG 19-layer model (configuration \"E\")\n    \"\"\"\n    model = models.vgg19(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['vgg19'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n\n\ndef vgg19_bn(num_classes=1000, pretrained='imagenet'):\n    \"\"\"VGG 19-layer model (configuration 'E') with batch normalization\n    \"\"\"\n    model = models.vgg19_bn(pretrained=False)\n    if pretrained is not None:\n        settings = pretrained_settings['vgg19_bn'][pretrained]\n        model = load_pretrained(model, num_classes, settings)\n    return model\n"
  },
  {
    "path": "pretrained_models_pytorch/pretrainedmodels/wideresnet.py",
    "content": "import os\nfrom os.path import expanduser\nimport hickle as hkl\nimport torch\nimport torch.nn.functional as F\nfrom torch.autograd import Variable\n\n__all__ = ['wideresnet50']\n\nmodel_urls = {\n    'wideresnet152': 'https://s3.amazonaws.com/pytorch/h5models/wide-resnet-50-2-export.hkl'\n}\n\ndef define_model(params):\n    def conv2d(input, params, base, stride=1, pad=0):\n        return F.conv2d(input, params[base + '.weight'],\n                        params[base + '.bias'], stride, pad)\n\n    def group(input, params, base, stride, n):\n        o = input\n        for i in range(0,n):\n            b_base = ('%s.block%d.conv') % (base, i)\n            x = o\n            o = conv2d(x, params, b_base + '0')\n            o = F.relu(o)\n            o = conv2d(o, params, b_base + '1', stride=i==0 and stride or 1, pad=1)\n            o = F.relu(o)\n            o = conv2d(o, params, b_base + '2')\n            if i == 0:\n                o += conv2d(x, params, b_base + '_dim', stride=stride)\n            else:\n                o += x\n            o = F.relu(o)\n        return o\n\n    # determine network size by parameters\n    blocks = [sum([re.match('group%d.block\\d+.conv0.weight'%j, k) is not None\n                   for k in params.keys()]) for j in range(4)]\n\n    def f(input, params, pooling_classif=True):\n        o = F.conv2d(input, params['conv0.weight'], params['conv0.bias'], 2, 3)\n        o = F.relu(o)\n        o = F.max_pool2d(o, 3, 2, 1)\n        o_g0 = group(o, params, 'group0', 1, blocks[0])\n        o_g1 = group(o_g0, params, 'group1', 2, blocks[1])\n        o_g2 = group(o_g1, params, 'group2', 2, blocks[2])\n        o_g3 = group(o_g2, params, 'group3', 2, blocks[3])\n        if pooling_classif:\n            o = F.avg_pool2d(o_g3, 7, 1, 0)\n            o = o.view(o.size(0), -1)\n            o = F.linear(o, params['fc.weight'], params['fc.bias'])\n        return o\n\n    return f\n\n\nclass WideResNet(nn.Module):\n\n    def __init__(self, pooling):\n        super(WideResNet, self).__init__()\n        self.pooling = pooling\n        self.params = params\n\n    def forward(self, x):\n        x = f(x, self.params, self.pooling)\n        return x\n\n\ndef wideresnet50(pooling):\n    dir_models = os.path.join(expanduser(\"~\"), '.torch/wideresnet')\n    path_hkl = os.path.join(dir_models, 'wideresnet50.hkl')\n    if os.path.isfile(path_hkl):\n        params = hkl.load(path_hkl)\n        # convert numpy arrays to torch Variables\n        for k,v in sorted(params.items()):\n            print k, v.shape\n            params[k] = Variable(torch.from_numpy(v), requires_grad=True)\n    else:\n        os.system('mkdir -p ' + dir_models)\n        os.system('wget {} -O {}'.format(model_urls['wideresnet50'], path_hkl))\n    f = define_model(params)\n    model = WideResNet(pooling)\n    return model\n\n\n"
  },
  {
    "path": "pretrained_models_pytorch/test/imagenet.py",
    "content": "import argparse\nimport os\nimport shutil\nimport time\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.parallel\nimport torch.backends.cudnn as cudnn\nimport torch.optim\nimport torch.utils.data\nimport torchvision.transforms as transforms\nimport torchvision.datasets as datasets\n\n#import torchvision.models as models\n\nimport sys\nsys.path.append('.')\nimport pretrainedmodels as models\n\n# models.__dict__['fbresnet152'] = pretrainedmodels.__dict__['fbresnet152']\n# models.__dict__['resnext101_32x4d'] = pretrainedmodels.__dict__['resnext101_32x4d']\n# models.__dict__['resnext101_64x4d'] = pretrainedmodels.__dict__['resnext101_64x4d']\n\nmodel_names = sorted(name for name in models.__dict__\n    if not name.startswith(\"__\")\n    and callable(models.__dict__[name]))\n\nparser = argparse.ArgumentParser(description='PyTorch ImageNet Training')\nparser.add_argument('data', metavar='DIR',\n                    help='path to dataset')\nparser.add_argument('--arch', '-a', metavar='ARCH', default='fbresnet152',\n                    choices=model_names,\n                    help='model architecture: ' +\n                        ' | '.join(model_names) +\n                        ' (default: fbresnet152)')\nparser.add_argument('-j', '--workers', default=4, type=int, metavar='N',\n                    help='number of data loading workers (default: 4)')\nparser.add_argument('--epochs', default=90, type=int, metavar='N',\n                    help='number of total epochs to run')\nparser.add_argument('--start-epoch', default=0, type=int, metavar='N',\n                    help='manual epoch number (useful on restarts)')\nparser.add_argument('-b', '--batch-size', default=256, type=int,\n                    metavar='N', help='mini-batch size (default: 256)')\nparser.add_argument('--lr', '--learning-rate', default=0.1, type=float,\n                    metavar='LR', help='initial learning rate')\nparser.add_argument('--momentum', default=0.9, type=float, metavar='M',\n                    help='momentum')\nparser.add_argument('--weight-decay', '--wd', default=1e-4, type=float,\n                    metavar='W', help='weight decay (default: 1e-4)')\nparser.add_argument('--print-freq', '-p', default=10, type=int,\n                    metavar='N', help='print frequency (default: 10)')\nparser.add_argument('--resume', default='', type=str, metavar='PATH',\n                    help='path to latest checkpoint (default: none)')\nparser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',\n                    help='evaluate model on validation set')\nparser.add_argument('--pretrained', default='imagenet', help='use pre-trained model')\n\nbest_prec1 = 0\n\n\nclass ToSpaceBGR(object):\n\n    def __init__(self, is_bgr):\n        self.is_bgr = is_bgr\n\n    def __call__(self, tensor):\n        if self.is_bgr:\n            new_tensor = tensor.clone()\n            new_tensor[0] = tensor[2]\n            new_tensor[2] = tensor[0]\n            tensor = new_tensor\n        return tensor\n\nclass ToRange255(object):\n\n    def __init__(self, is_255):\n        self.is_255 = is_255\n\n    def __call__(self, tensor):\n        if self.is_255:\n            tensor.mul_(255)\n        return tensor\n\ndef main():\n    global args, best_prec1\n    args = parser.parse_args()\n\n    # create model\n    print(\"=> creating model '{}'\".format(args.arch))\n    if args.pretrained.lower() not in ['false', 'none', 'not', 'no', '0']:\n        print(\"=> using pre-trained parameters '{}'\".format(args.pretrained))\n        model = models.__dict__[args.arch](num_classes=1000, pretrained=args.pretrained)\n    else:\n        model = models.__dict__[args.arch]()\n\n    # if args.arch.startswith('alexnet') or args.arch.startswith('vgg'):\n    #     model.features = torch.nn.DataParallel(model.features)\n    #     model.cuda()\n    # else:\n\n    # optionally resume from a checkpoint\n    if args.resume:\n        if os.path.isfile(args.resume):\n            print(\"=> loading checkpoint '{}'\".format(args.resume))\n            checkpoint = torch.load(args.resume)\n            args.start_epoch = checkpoint['epoch']\n            best_prec1 = checkpoint['best_prec1']\n            model.load_state_dict(checkpoint['state_dict'])\n            print(\"=> loaded checkpoint '{}' (epoch {})\"\n                  .format(args.resume, checkpoint['epoch']))\n        else:\n            print(\"=> no checkpoint found at '{}'\".format(args.resume))\n\n    cudnn.benchmark = True\n\n    # Data loading code\n    traindir = os.path.join(args.data, 'train')\n    valdir = os.path.join(args.data, 'val')\n    normalize = transforms.Normalize(mean=model.mean,\n                                     std=model.std)\n\n    # train_loader = torch.utils.data.DataLoader(\n    #     datasets.ImageFolder(traindir, transforms.Compose([\n    #         transforms.RandomSizedCrop(max(model.input_size)),\n    #         transforms.RandomHorizontalFlip(),\n    #         transforms.ToTensor(),\n    #         normalize,\n    #     ])),\n    #     batch_size=args.batch_size, shuffle=True,\n    #     num_workers=args.workers, pin_memory=True)\n\n    print('Images transformed from size {} to {}'.format(\n        round(max(model.input_size)*1.050),\n        model.input_size))\n\n    val_loader = torch.utils.data.DataLoader(\n        datasets.ImageFolder(valdir, transforms.Compose([\n            transforms.Scale(round(max(model.input_size)*1.050)),\n            transforms.CenterCrop(max(model.input_size)),\n            transforms.ToTensor(),\n            ToSpaceBGR(model.input_space=='BGR'),\n            ToRange255(max(model.input_range)==255),\n            normalize,\n        ])),\n        batch_size=args.batch_size, shuffle=False,\n        num_workers=args.workers, pin_memory=True)\n\n    # define loss function (criterion) and optimizer\n    criterion = nn.CrossEntropyLoss().cuda()\n\n    optimizer = torch.optim.SGD(model.parameters(), args.lr,\n                                momentum=args.momentum,\n                                weight_decay=args.weight_decay)\n\n    model = torch.nn.DataParallel(model).cuda()\n\n    if args.evaluate:\n        validate(val_loader, model, criterion)\n        return\n\n    for epoch in range(args.start_epoch, args.epochs):\n        adjust_learning_rate(optimizer, epoch)\n\n        # train for one epoch\n        train(train_loader, model, criterion, optimizer, epoch)\n\n        # evaluate on validation set\n        prec1 = validate(val_loader, model, criterion)\n\n        # remember best prec@1 and save checkpoint\n        is_best = prec1 > best_prec1\n        best_prec1 = max(prec1, best_prec1)\n        save_checkpoint({\n            'epoch': epoch + 1,\n            'arch': args.arch,\n            'state_dict': model.state_dict(),\n            'best_prec1': best_prec1,\n        }, is_best)\n\n\ndef train(train_loader, model, criterion, optimizer, epoch):\n    batch_time = AverageMeter()\n    data_time = AverageMeter()\n    losses = AverageMeter()\n    top1 = AverageMeter()\n    top5 = AverageMeter()\n\n    # switch to train mode\n    model.train()\n\n    end = time.time()\n    for i, (input, target) in enumerate(train_loader):\n        # measure data loading time\n        data_time.update(time.time() - end)\n\n        target = target.cuda(async=True)\n        input_var = torch.autograd.Variable(input)\n        target_var = torch.autograd.Variable(target)\n\n        # compute output\n        output = model(input_var)\n        loss = criterion(output, target_var)\n\n        # measure accuracy and record loss\n        prec1, prec5 = accuracy(output.data, target, topk=(1, 5))\n        losses.update(loss.data[0], input.size(0))\n        top1.update(prec1[0], input.size(0))\n        top5.update(prec5[0], input.size(0))\n\n        # compute gradient and do SGD step\n        optimizer.zero_grad()\n        loss.backward()\n        optimizer.step()\n\n        # measure elapsed time\n        batch_time.update(time.time() - end)\n        end = time.time()\n\n        if i % args.print_freq == 0:\n            print('Epoch: [{0}][{1}/{2}]\\t'\n                  'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\\t'\n                  'Data {data_time.val:.3f} ({data_time.avg:.3f})\\t'\n                  'Loss {loss.val:.4f} ({loss.avg:.4f})\\t'\n                  'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\\t'\n                  'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(\n                   epoch, i, len(train_loader), batch_time=batch_time,\n                   data_time=data_time, loss=losses, top1=top1, top5=top5))\n\n\ndef validate(val_loader, model, criterion):\n    batch_time = AverageMeter()\n    losses = AverageMeter()\n    top1 = AverageMeter()\n    top5 = AverageMeter()\n\n    # switch to evaluate mode\n    model.eval()\n\n    end = time.time()\n    for i, (input, target) in enumerate(val_loader):\n        target = target.cuda(async=True)\n        input_var = torch.autograd.Variable(input, volatile=True)\n        target_var = torch.autograd.Variable(target, volatile=True)\n\n        # compute output\n        output = model(input_var)\n        loss = criterion(output, target_var)\n\n        # measure accuracy and record loss\n        prec1, prec5 = accuracy(output.data, target, topk=(1, 5))\n        losses.update(loss.data[0], input.size(0))\n        top1.update(prec1[0], input.size(0))\n        top5.update(prec5[0], input.size(0))\n\n        # measure elapsed time\n        batch_time.update(time.time() - end)\n        end = time.time()\n\n        if i % args.print_freq == 0:\n            print('Test: [{0}/{1}]\\t'\n                  'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\\t'\n                  'Loss {loss.val:.4f} ({loss.avg:.4f})\\t'\n                  'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\\t'\n                  'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(\n                   i, len(val_loader), batch_time=batch_time, loss=losses,\n                   top1=top1, top5=top5))\n\n    print(' * Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f}'\n          .format(top1=top1, top5=top5))\n\n    return top1.avg\n\n\ndef save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):\n    torch.save(state, filename)\n    if is_best:\n        shutil.copyfile(filename, 'model_best.pth.tar')\n\n\nclass AverageMeter(object):\n    \"\"\"Computes and stores the average and current value\"\"\"\n    def __init__(self):\n        self.reset()\n\n    def reset(self):\n        self.val = 0\n        self.avg = 0\n        self.sum = 0\n        self.count = 0\n\n    def update(self, val, n=1):\n        self.val = val\n        self.sum += val * n\n        self.count += n\n        self.avg = self.sum / self.count\n\n\ndef adjust_learning_rate(optimizer, epoch):\n    \"\"\"Sets the learning rate to the initial LR decayed by 10 every 30 epochs\"\"\"\n    lr = args.lr * (0.1 ** (epoch // 30))\n    for param_group in optimizer.param_groups:\n        param_group['lr'] = lr\n\n\ndef accuracy(output, target, topk=(1,)):\n    \"\"\"Computes the precision@k for the specified values of k\"\"\"\n    maxk = max(topk)\n    batch_size = target.size(0)\n\n    _, pred = output.topk(maxk, 1, True, True)\n    pred = pred.t()\n    correct = pred.eq(target.view(1, -1).expand_as(pred))\n\n    res = []\n    for k in topk:\n        correct_k = correct[:k].view(-1).float().sum(0)\n        res.append(correct_k.mul_(100.0 / batch_size))\n    return res\n\n\nif __name__ == '__main__':\n    main()"
  },
  {
    "path": "pretrained_models_pytorch/test/toy-example.py",
    "content": "import argparse\n\nfrom PIL import Image\nimport torch\nimport torchvision.transforms as transforms\n\nimport sys\nsys.path.append('../pretrained-models.pytorch')\nimport pretrainedmodels\n\nmodel_names = sorted(name for name in pretrainedmodels.__dict__\n    if not name.startswith(\"__\")\n    and callable(pretrainedmodels.__dict__[name]))\n\nparser = argparse.ArgumentParser(description='PyTorch ImageNet Training')\nparser.add_argument('--arch', '-a', metavar='ARCH', default='fbresnet152',\n                    choices=model_names,\n                    help='model architecture: ' +\n                        ' | '.join(model_names) +\n                        ' (default: fbresnet152)')\nargs = parser.parse_args()\n\n# Load Model\nmodel = pretrainedmodels.__dict__[args.arch](num_classes=1000,\n                                             pretrained='imagenet')\nmodel.eval()\n\n# Load One Input Image\npath_img = 'data/ILSVRC2012_val_00002147.JPEG'\nwith open(path_img, 'rb') as f:\n    with Image.open(f) as img:\n        input_data = img.convert(model.input_space)\n\ntf = transforms.Compose([\n    transforms.Scale(round(max(model.input_size)*1.143)),\n    transforms.CenterCrop(max(model.input_size)),\n    transforms.ToTensor(),\n    transforms.Normalize(mean=model.mean,\n                         std=model.std)\n])\n\ninput_data = tf(input_data)          # 3x400x225 -> 3x299x299\ninput_data = input_data.unsqueeze(0) # 3x299x299 -> 1x3x299x299\ninput = torch.autograd.Variable(input_data)\nprint(input)\nexit()\n# Load Imagenet Synsets\nwith open('data/imagenet_synsets.txt', 'r') as f:\n    synsets = f.readlines()\n\n# len(synsets)==1001\n# sysnets[0] == background\nsynsets = [x.strip() for x in synsets]\nsplits = [line.split(' ') for line in synsets]\nkey_to_classname = {spl[0]:' '.join(spl[1:]) for spl in splits}\n\nwith open('data/imagenet_classes.txt', 'r') as f:\n    class_id_to_key = f.readlines()\n\nclass_id_to_key = [x.strip() for x in class_id_to_key]\n\n# Make predictions\noutput = model(input) # size(1, 1000)\nmax, argmax = output.data.squeeze().max(0)\nclass_id = argmax[0]\nclass_key = class_id_to_key[class_id]\nclassname = key_to_classname[class_key]\n\nprint(path_img, 'is a', classname) \n"
  },
  {
    "path": "utils.py",
    "content": "import os\nimport sys\nimport time\nimport math\nimport numpy as np\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.init as init\nfrom torch.autograd import Variable\n\nfrom scipy.ndimage.interpolation import rotate\n\n_, term_width = os.popen('stty size', 'r').read().split()\nterm_width = int(term_width)\n\nTOTAL_BAR_LENGTH = 35.\nlast_time = time.time()\nbegin_time = last_time\ndef progress_bar(current, total, msg=None):\n    global last_time, begin_time\n    if current == 0:\n        begin_time = time.time()  # Reset for new bar.\n\n    cur_len = int(TOTAL_BAR_LENGTH*current/total)\n    rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1\n\n    sys.stdout.write(' [')\n    for i in range(cur_len):\n        sys.stdout.write('=')\n    sys.stdout.write('>')\n    for i in range(rest_len):\n        sys.stdout.write('.')\n    sys.stdout.write(']')\n\n    cur_time = time.time()\n    step_time = cur_time - last_time\n    last_time = cur_time\n    tot_time = cur_time - begin_time\n\n    L = []\n    if msg:\n        L.append(' ' + msg)\n    L.append(' | Step: %s' % format_time(step_time))\n    L.append(' | Tot: %s' % format_time(tot_time))\n\n    msg = ''.join(L)\n    sys.stdout.write(msg)\n    for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3):\n        sys.stdout.write(' ')\n\n    # Go back to the center of the bar.\n    for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2):\n        sys.stdout.write('\\b')\n    sys.stdout.write(' %d/%d ' % (current+1, total))\n\n    if current < total-1:\n        sys.stdout.write('\\r')\n    else:\n        sys.stdout.write('\\n')\n    sys.stdout.flush()\n\ndef format_time(seconds):\n    days = int(seconds / 3600/24)\n    seconds = seconds - days*3600*24\n    hours = int(seconds / 3600)\n    seconds = seconds - hours*3600\n    minutes = int(seconds / 60)\n    seconds = seconds - minutes*60\n    secondsf = int(seconds)\n    seconds = seconds - secondsf\n    millis = int(seconds*1000)\n\n    f = ''\n    i = 1\n    if days > 0:\n        f += str(days) + 'D'\n        i += 1\n    if hours > 0 and i <= 2:\n        f += str(hours) + 'h'\n        i += 1\n    if minutes > 0 and i <= 2:\n        f += str(minutes) + 'm'\n        i += 1\n    if secondsf > 0 and i <= 2:\n        f += str(secondsf) + 's'\n        i += 1\n    if millis > 0 and i <= 2:\n        f += str(millis) + 'ms'\n        i += 1\n    if f == '':\n        f = '0ms'\n    return f\n\n\ndef submatrix(arr):\n    x, y = np.nonzero(arr)\n    # Using the smallest and largest x and y indices of nonzero elements, \n    # we can find the desired rectangular bounds.  \n    # And don't forget to add 1 to the top bound to avoid the fencepost problem.\n    return arr[x.min():x.max()+1, y.min():y.max()+1]\n\n\nclass ToSpaceBGR(object):\n    def __init__(self, is_bgr):\n        self.is_bgr = is_bgr\n    def __call__(self, tensor):\n        if self.is_bgr:\n            new_tensor = tensor.clone()\n            new_tensor[0] = tensor[2]\n            new_tensor[2] = tensor[0]\n            tensor = new_tensor\n        return tensor\n\n\nclass ToRange255(object):\n    def __init__(self, is_255):\n        self.is_255 = is_255\n    def __call__(self, tensor):\n        if self.is_255:\n            tensor.mul_(255)\n        return tensor\n\n\ndef init_patch_circle(image_size, patch_size):\n    image_size = image_size**2\n    noise_size = int(image_size*patch_size)\n    radius = int(math.sqrt(noise_size/math.pi))\n    patch = np.zeros((1, 3, radius*2, radius*2))    \n    for i in range(3):\n        a = np.zeros((radius*2, radius*2))    \n        cx, cy = radius, radius # The center of circle \n        y, x = np.ogrid[-radius: radius, -radius: radius]\n        index = x**2 + y**2 <= radius**2\n        a[cy-radius:cy+radius, cx-radius:cx+radius][index] = np.random.rand()\n        idx = np.flatnonzero((a == 0).all((1)))\n        a = np.delete(a, idx, axis=0)\n        patch[0][i] = np.delete(a, idx, axis=1)\n    return patch, patch.shape\n\n\ndef circle_transform(patch, data_shape, patch_shape, image_size):\n    # get dummy image \n    x = np.zeros(data_shape)\n   \n    # get shape\n    m_size = patch_shape[-1]\n    \n    for i in range(x.shape[0]):\n\n        # random rotation\n        rot = np.random.choice(360)\n        for j in range(patch[i].shape[0]):\n            patch[i][j] = rotate(patch[i][j], angle=rot, reshape=False)\n        \n        # random location\n        random_x = np.random.choice(image_size)\n        if random_x + m_size > x.shape[-1]:\n            while random_x + m_size > x.shape[-1]:\n                random_x = np.random.choice(image_size)\n        random_y = np.random.choice(image_size)\n        if random_y + m_size > x.shape[-1]:\n            while random_y + m_size > x.shape[-1]:\n                random_y = np.random.choice(image_size)\n       \n        # apply patch to dummy image  \n        x[i][0][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][0]\n        x[i][1][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][1]\n        x[i][2][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][2]\n    \n    mask = np.copy(x)\n    mask[mask != 0] = 1.0\n    \n    return x, mask, patch.shape\n\n\ndef init_patch_square(image_size, patch_size):\n    # get mask\n    image_size = image_size**2\n    noise_size = image_size*patch_size\n    noise_dim = int(noise_size**(0.5))\n    patch = np.random.rand(1,3,noise_dim,noise_dim)\n    return patch, patch.shape\n\n\ndef square_transform(patch, data_shape, patch_shape, image_size):\n    # get dummy image \n    x = np.zeros(data_shape)\n    \n    # get shape\n    m_size = patch_shape[-1]\n    \n    for i in range(x.shape[0]):\n\n        # random rotation\n        rot = np.random.choice(4)\n        for j in range(patch[i].shape[0]):\n            patch[i][j] = np.rot90(patch[i][j], rot)\n        \n        # random location\n        random_x = np.random.choice(image_size)\n        if random_x + m_size > x.shape[-1]:\n            while random_x + m_size > x.shape[-1]:\n                random_x = np.random.choice(image_size)\n        random_y = np.random.choice(image_size)\n        if random_y + m_size > x.shape[-1]:\n            while random_y + m_size > x.shape[-1]:\n                random_y = np.random.choice(image_size)\n       \n        # apply patch to dummy image  \n        x[i][0][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][0]\n        x[i][1][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][1]\n        x[i][2][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][2]\n    \n    mask = np.copy(x)\n    mask[mask != 0] = 1.0\n    \n    return x, mask\n\n\n\n"
  }
]