Repository: jhayes14/adversarial-patch Branch: master Commit: 4d78d1e7a232 Files: 25 Total size: 299.9 KB Directory structure: gitextract_1oq_ggrm/ ├── README.md ├── make_patch.py ├── pretrained_models_pytorch/ │ ├── LICENSE │ ├── README.md │ ├── data/ │ │ ├── imagenet_classes.txt │ │ └── imagenet_synsets.txt │ ├── pretrainedmodels/ │ │ ├── .torchvision.py.swp │ │ ├── __init__.py │ │ ├── bninception.py │ │ ├── fbresnet/ │ │ │ ├── resnet152_dump.lua │ │ │ └── resnet152_load.py │ │ ├── fbresnet.py │ │ ├── inceptionresnetv2.py │ │ ├── inceptionv4.py │ │ ├── nasnet.py │ │ ├── resnext.py │ │ ├── resnext_features/ │ │ │ ├── __init__.py │ │ │ ├── resnext101_32x4d_features.py │ │ │ └── resnext101_64x4d_features.py │ │ ├── torchvision.py │ │ └── wideresnet.py │ └── test/ │ ├── .toy-example.py.swp │ ├── imagenet.py │ └── toy-example.py └── utils.py ================================================ FILE CONTENTS ================================================ ================================================ FILE: README.md ================================================ # adversarial-patch PyTorch implementation of adversarial patch This is an implementation of the Adversarial Patch paper. Not official and likely to have bugs/errors. ## How to run: Data set-up: - Follow instructions https://github.com/amd/OpenCL-caffe/wiki/Instructions-to-create-ImageNet-2012-data . The validation set should be in path `./imagenet/val/`. There should be 1000 directories, each with 50 images. Run attack: - `python make_patch.py --cuda --netClassifier inceptionv3 --max_count 500 --image_size 299 --patch_type circle --outf log` ## Results: Using patch shapes of both circles and squares gave good results (both achieved 100% success on the training set and eventually > 90% success on test set) I managed to recreate the toaster example in the original paper. It looks slightly different but it is evidently a toaster. ![Alt text](1981_859_adversarial.png?raw=true "") This is a toaster Square patches are a little more homogenous due to that I only rotate by multiples of 90 degrees. ![Alt text](1978_859_adversarial.png?raw=true "") This is also a toaster ## Issues: - Cannot make a perfect circle with numpy/pytorch. The hack I came up with makes the boundary slightly hexagonal. - Rather slow if max_count and conf_target are large. - Probably lots of redundant calls and variables. ================================================ FILE: make_patch.py ================================================ import argparse import os import random import numpy as np import torch import torch.nn as nn import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim as optim import torch.utils.data import torch.nn.functional as F import torchvision.datasets as dset import torchvision.transforms as transforms import torchvision.utils as vutils from torch.autograd import Variable from torch.utils.data.sampler import SubsetRandomSampler from pretrained_models_pytorch import pretrainedmodels from utils import * parser = argparse.ArgumentParser() parser.add_argument('--workers', type=int, help='number of data loading workers', default=2) parser.add_argument('--epochs', type=int, default=20, help='number of epochs to train for') parser.add_argument('--cuda', action='store_true', help='enables cuda') parser.add_argument('--target', type=int, default=859, help='The target class: 859 == toaster') parser.add_argument('--conf_target', type=float, default=0.9, help='Stop attack on image when target classifier reaches this value for target class') parser.add_argument('--max_count', type=int, default=1000, help='max number of iterations to find adversarial example') parser.add_argument('--patch_type', type=str, default='circle', help='patch type: circle or square') parser.add_argument('--patch_size', type=float, default=0.05, help='patch size. E.g. 0.05 ~= 5% of image ') parser.add_argument('--train_size', type=int, default=2000, help='Number of training images') parser.add_argument('--test_size', type=int, default=2000, help='Number of test images') parser.add_argument('--image_size', type=int, default=299, help='the height / width of the input image to network') parser.add_argument('--plot_all', type=int, default=1, help='1 == plot all successful adversarial images') parser.add_argument('--netClassifier', default='inceptionv3', help="The target classifier") parser.add_argument('--outf', default='./logs', help='folder to output images and model checkpoints') parser.add_argument('--manualSeed', type=int, default=1338, help='manual seed') opt = parser.parse_args() print(opt) try: os.makedirs(opt.outf) except OSError: pass if opt.manualSeed is None: opt.manualSeed = random.randint(1, 10000) print("Random Seed: ", opt.manualSeed) random.seed(opt.manualSeed) np.random.seed(opt.manualSeed) torch.manual_seed(opt.manualSeed) if opt.cuda: torch.cuda.manual_seed_all(opt.manualSeed) cudnn.benchmark = True if torch.cuda.is_available() and not opt.cuda: print("WARNING: You have a CUDA device, so you should probably run with --cuda") target = opt.target conf_target = opt.conf_target max_count = opt.max_count patch_type = opt.patch_type patch_size = opt.patch_size image_size = opt.image_size train_size = opt.train_size test_size = opt.test_size plot_all = opt.plot_all assert train_size + test_size <= 50000, "Traing set size + Test set size > Total dataset size" print("=> creating model ") netClassifier = pretrainedmodels.__dict__[opt.netClassifier](num_classes=1000, pretrained='imagenet') if opt.cuda: netClassifier.cuda() print('==> Preparing data..') normalize = transforms.Normalize(mean=netClassifier.mean, std=netClassifier.std) idx = np.arange(50000) np.random.shuffle(idx) training_idx = idx[:train_size] test_idx = idx[train_size:test_size] train_loader = torch.utils.data.DataLoader( dset.ImageFolder('./imagenetdata/val', transforms.Compose([ transforms.Scale(round(max(netClassifier.input_size)*1.050)), transforms.CenterCrop(max(netClassifier.input_size)), transforms.ToTensor(), ToSpaceBGR(netClassifier.input_space=='BGR'), ToRange255(max(netClassifier.input_range)==255), normalize, ])), batch_size=1, shuffle=False, sampler=SubsetRandomSampler(training_idx), num_workers=opt.workers, pin_memory=True) test_loader = torch.utils.data.DataLoader( dset.ImageFolder('./imagenetdata/val', transforms.Compose([ transforms.Scale(round(max(netClassifier.input_size)*1.050)), transforms.CenterCrop(max(netClassifier.input_size)), transforms.ToTensor(), ToSpaceBGR(netClassifier.input_space=='BGR'), ToRange255(max(netClassifier.input_range)==255), normalize, ])), batch_size=1, shuffle=False, sampler=SubsetRandomSampler(test_idx), num_workers=opt.workers, pin_memory=True) min_in, max_in = netClassifier.input_range[0], netClassifier.input_range[1] min_in, max_in = np.array([min_in, min_in, min_in]), np.array([max_in, max_in, max_in]) mean, std = np.array(netClassifier.mean), np.array(netClassifier.std) min_out, max_out = np.min((min_in-mean)/std), np.max((max_in-mean)/std) def train(epoch, patch, patch_shape): netClassifier.eval() success = 0 total = 0 recover_time = 0 for batch_idx, (data, labels) in enumerate(train_loader): if opt.cuda: data = data.cuda() labels = labels.cuda() data, labels = Variable(data), Variable(labels) prediction = netClassifier(data) # only computer adversarial examples on examples that are originally classified correctly if prediction.data.max(1)[1][0] != labels.data[0]: continue total += 1 # transform path data_shape = data.data.cpu().numpy().shape if patch_type == 'circle': patch, mask, patch_shape = circle_transform(patch, data_shape, patch_shape, image_size) elif patch_type == 'square': patch, mask = square_transform(patch, data_shape, patch_shape, image_size) patch, mask = torch.FloatTensor(patch), torch.FloatTensor(mask) if opt.cuda: patch, mask = patch.cuda(), mask.cuda() patch, mask = Variable(patch), Variable(mask) adv_x, mask, patch = attack(data, patch, mask) adv_label = netClassifier(adv_x).data.max(1)[1][0] ori_label = labels.data[0] if adv_label == target: success += 1 if plot_all == 1: # plot source image vutils.save_image(data.data, "./%s/%d_%d_original.png" %(opt.outf, batch_idx, ori_label), normalize=True) # plot adversarial image vutils.save_image(adv_x.data, "./%s/%d_%d_adversarial.png" %(opt.outf, batch_idx, adv_label), normalize=True) masked_patch = torch.mul(mask, patch) patch = masked_patch.data.cpu().numpy() new_patch = np.zeros(patch_shape) for i in range(new_patch.shape[0]): for j in range(new_patch.shape[1]): new_patch[i][j] = submatrix(patch[i][j]) patch = new_patch # log to file progress_bar(batch_idx, len(train_loader), "Train Patch Success: {:.3f}".format(success/total)) return patch def test(epoch, patch, patch_shape): netClassifier.eval() success = 0 total = 0 for batch_idx, (data, labels) in enumerate(test_loader): if opt.cuda: data = data.cuda() labels = labels.cuda() data, labels = Variable(data), Variable(labels) prediction = netClassifier(data) # only computer adversarial examples on examples that are originally classified correctly if prediction.data.max(1)[1][0] != labels.data[0]: continue total += 1 # transform path data_shape = data.data.cpu().numpy().shape if patch_type == 'circle': patch, mask, patch_shape = circle_transform(patch, data_shape, patch_shape, image_size) elif patch_type == 'square': patch, mask = square_transform(patch, data_shape, patch_shape, image_size) patch, mask = torch.FloatTensor(patch), torch.FloatTensor(mask) if opt.cuda: patch, mask = patch.cuda(), mask.cuda() patch, mask = Variable(patch), Variable(mask) adv_x = torch.mul((1-mask),data) + torch.mul(mask,patch) adv_x = torch.clamp(adv_x, min_out, max_out) adv_label = netClassifier(adv_x).data.max(1)[1][0] ori_label = labels.data[0] if adv_label == target: success += 1 masked_patch = torch.mul(mask, patch) patch = masked_patch.data.cpu().numpy() new_patch = np.zeros(patch_shape) for i in range(new_patch.shape[0]): for j in range(new_patch.shape[1]): new_patch[i][j] = submatrix(patch[i][j]) patch = new_patch # log to file progress_bar(batch_idx, len(test_loader), "Test Success: {:.3f}".format(success/total)) def attack(x, patch, mask): netClassifier.eval() x_out = F.softmax(netClassifier(x)) target_prob = x_out.data[0][target] adv_x = torch.mul((1-mask),x) + torch.mul(mask,patch) count = 0 while conf_target > target_prob: count += 1 adv_x = Variable(adv_x.data, requires_grad=True) adv_out = F.log_softmax(netClassifier(adv_x)) adv_out_probs, adv_out_labels = adv_out.max(1) Loss = -adv_out[0][target] Loss.backward() adv_grad = adv_x.grad.clone() adv_x.grad.data.zero_() patch -= adv_grad adv_x = torch.mul((1-mask),x) + torch.mul(mask,patch) adv_x = torch.clamp(adv_x, min_out, max_out) out = F.softmax(netClassifier(adv_x)) target_prob = out.data[0][target] #y_argmax_prob = out.data.max(1)[0][0] #print(count, conf_target, target_prob, y_argmax_prob) if count >= opt.max_count: break return adv_x, mask, patch if __name__ == '__main__': if patch_type == 'circle': patch, patch_shape = init_patch_circle(image_size, patch_size) elif patch_type == 'square': patch, patch_shape = init_patch_square(image_size, patch_size) else: sys.exit("Please choose a square or circle patch") for epoch in range(1, opt.epochs + 1): patch = train(epoch, patch, patch_shape) test(epoch, patch, patch_shape) ================================================ FILE: pretrained_models_pytorch/LICENSE ================================================ BSD 3-Clause License Copyright (c) 2017, Remi Cadene All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================ FILE: pretrained_models_pytorch/README.md ================================================ # Pretrained models for Pytorch (Work in progress) The goal of this repo is: - to help to reproduce research papers results (transfer learning setups for instance), - to access pretrained ConvNets with a unique interface/API inspired by torchvision. News: - 16/11/2017: nasnet-a-large pretrained model ported by T. Durand and R. Cadene - 22/07/2017: torchvision pretrained models - 22/07/2017: momentum in inceptionv4 and inceptionresnetv2 to 0.1 - 17/07/2017: model.input_range attribut - 17/07/2017: BNInception pretrained on Imagenet ## Summary - [Installation](https://github.com/Cadene/pretrained-models.pytorch#installation) - [Toy example](https://github.com/Cadene/pretrained-models.pytorch#toy-example) - [Evaluation on ImageNet](https://github.com/Cadene/pretrained-models.pytorch#evaluation-on-imagenet) - [Accuracy on valset](https://github.com/Cadene/pretrained-models.pytorch#accuracy-on-validation-set) - [Reproducing results](https://github.com/Cadene/pretrained-models.pytorch#reproducing-results) - [Documentation](https://github.com/Cadene/pretrained-models.pytorch#documentation) - [Available models](https://github.com/Cadene/pretrained-models.pytorch#available-models) - [NasNetLarge](https://github.com/Cadene/pretrained-models.pytorch#nasnet) - [BNInception](https://github.com/Cadene/pretrained-models.pytorch#bninception) - [InceptionV3](https://github.com/Cadene/pretrained-models.pytorch#inception) - [InceptionV4](https://github.com/Cadene/pretrained-models.pytorch#inception) - [InceptionResNetV2](https://github.com/Cadene/pretrained-models.pytorch#inception) - [ResNeXt101_64x4d](https://github.com/Cadene/pretrained-models.pytorch#resnext) - [ResNeXt101_32x4d](https://github.com/Cadene/pretrained-models.pytorch#resnext) - [ResNet18](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [ResNet34](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [ResNet50](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [ResNet101](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [ResNet152](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [FBResNet152](https://github.com/Cadene/pretrained-models.pytorch#facebook-resnet) - [DenseNet121](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [DenseNet161](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [DenseNet169](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [DenseNet201](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [SqueezeNet1_0](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [SqueezeNet1_1](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [AlexNet](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [VGG11](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [VGG13](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [VGG16](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [VGG19](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [VGG11_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [VGG13_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [VGG16_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [VGG19_BN](https://github.com/Cadene/pretrained-models.pytorch#torchvision) - [Model API](https://github.com/Cadene/pretrained-models.pytorch#model-api) - [model.input_size](https://github.com/Cadene/pretrained-models.pytorch#modelinput_size) - [model.input_space](https://github.com/Cadene/pretrained-models.pytorch#modelinput_space) - [model.input_range](https://github.com/Cadene/pretrained-models.pytorch#modelinput_range) - [model.mean](https://github.com/Cadene/pretrained-models.pytorch#modelmean) - [model.std](https://github.com/Cadene/pretrained-models.pytorch#modelstd) - [model.features](https://github.com/Cadene/pretrained-models.pytorch#modelfeatures) - [model.classif](https://github.com/Cadene/pretrained-models.pytorch#modelclassif) - [model.forward](https://github.com/Cadene/pretrained-models.pytorch#modelforward) - [Reproducing porting](https://github.com/Cadene/pretrained-models.pytorch#reproducing) - [ResNet*](https://github.com/Cadene/pretrained-models.pytorch#hand-porting-of-resnet152) - [ResNeXt*](https://github.com/Cadene/pretrained-models.pytorch#automatic-porting-of-resnext) - [Inception*](https://github.com/Cadene/pretrained-models.pytorch#hand-porting-of-inceptionv4-and-inceptionresnetv2) ## Installation 1. [python3 with anaconda](https://www.continuum.io/downloads) 2. [pytorch with/out CUDA](http://pytorch.org) 3. `git clone https://github.com/Cadene/pretrained-models.pytorch.git` ## Toy example - See [test/toy-example.py](https://github.com/Cadene/pretrained-models.pytorch/blob/master/test/toy-example.py) to compute logits of classes appearance with pretrained models on imagenet. `python test/toy-example.py -a fbresnet152` ```python from PIL import Image import torch import torchvision.transforms as transforms import sys sys.path.append('yourdir/pretrained-models.pytorch') # if needed import pretrainedmodels # Load Model model_name = 'inceptionresnetv4' #fbresnet152 model = pretrainedmodels.__dict__[model_name](num_classes=1000, pretrained='imagenet') model.eval() # Load One Input Image path_img = 'data/cat.jpg' with open(path_img, 'rb') as f: with Image.open(f) as img: input_data = img.convert(model.input_space) tf = transforms.Compose([ transforms.Scale(round(max(model.input_size)*1.143)), transforms.CenterCrop(max(model.input_size)), transforms.ToTensor(), transforms.Normalize(mean=model.mean, std=model.std) ]) input_data = tf(input_data) # 3x400x225 -> 3x299x299 input_data = input_data.unsqueeze(0) # 3x299x299 -> 1x3x299x299 input = torch.autograd.Variable(input_data) # Load Imagenet Synsets with open('data/imagenet_synsets.txt', 'r') as f: synsets = f.readlines() # len(synsets)==1001 # sysnets[0] == background synsets = [x.strip() for x in synsets] splits = [line.split(' ') for line in synsets] key_to_classname = {spl[0]:' '.join(spl[1:]) for spl in splits} with open('data/imagenet_classes.txt', 'r') as f: class_id_to_key = f.readlines() class_id_to_key = [x.strip() for x in class_id_to_key] # Make predictions output = model(input) # size(1, 1000) max, argmax = output.data.squeeze().max(0) class_id = argmax[0] class_key = class_id_to_key[class_id] classname = key_to_classname[class_key] print(path_img, 'is a', classname) ``` - See also [test/imagenet.py](https://github.com/Cadene/pretrained-models.pytorch/blob/master/test/imagenet.py) to evaluate pretrained models on imagenet. ## Evaluation on imagenet ### Accuracy on validation set (single model) Model | Version | Acc@1 | Acc@5 --- | --- | --- | --- NASNet-A-Large | [Tensorflow](https://github.com/tensorflow/models/tree/master/slim) | 82.693 | 96.163 NASNet-A-Large | Our porting | 82.566 | 96.086 InceptionResNetV2 | [Tensorflow](https://github.com/tensorflow/models/tree/master/slim) | 80.4 | 95.3 InceptionV4 | [Tensorflow](https://github.com/tensorflow/models/tree/master/slim) | 80.2 | 95.3 InceptionResNetV2 | Our porting | 80.170 | 95.234 InceptionV4 | Our porting | 80.062 | 94.926 ResNeXt101_64x4d | [Torch7](https://github.com/facebookresearch/ResNeXt) | 79.6 | 94.7 ResNeXt101_64x4d | Our porting | 78.956 | 94.252 ResNeXt101_32x4d | [Torch7](https://github.com/facebookresearch/ResNeXt) | 78.8 | 94.4 ResNet152 | [Pytorch](https://github.com/pytorch/vision#models) | 78.428 | 94.110 ResNeXt101_32x4d | Our porting | 78.188 | 93.886 FBResNet152 | [Torch7](https://github.com/facebook/fb.resnet.torch) | 77.84 | 93.84 DenseNet161 | [Pytorch](https://github.com/pytorch/vision#models) | 77.560 | 93.798 FBResNet152 | Our porting | 77.386 | 93.594 InceptionV3 | [Pytorch](https://github.com/pytorch/vision#models) | 77.294 | 93.454 DenseNet201 | [Pytorch](https://github.com/pytorch/vision#models) | 77.152 | 93.548 ResNet101 | [Pytorch](https://github.com/pytorch/vision#models) | 77.438 | 93.672 DenseNet169 | [Pytorch](https://github.com/pytorch/vision#models) | 76.026 | 92.992 ResNet50 | [Pytorch](https://github.com/pytorch/vision#models) | 76.002 | 92.980 DenseNet121 | [Pytorch](https://github.com/pytorch/vision#models) | 74.646 | 92.136 VGG19_BN | [Pytorch](https://github.com/pytorch/vision#models) | 74.266 | 92.066 ResNet34 | [Pytorch](https://github.com/pytorch/vision#models) | 73.554 | 91.456 BNInception | [Caffe](https://github.com/Cadene/tensorflow-model-zoo.torch/pull/2) | 73.522 | 91.560 VGG16_BN | [Pytorch](https://github.com/pytorch/vision#models) | 73.518 | 91.608 VGG19 | [Pytorch](https://github.com/pytorch/vision#models) | 72.080 | 90.822 VGG16 | [Pytorch](https://github.com/pytorch/vision#models) | 71.636 | 90.354 VGG13_BN | [Pytorch](https://github.com/pytorch/vision#models) | 71.508 | 90.494 VGG11_BN | [Pytorch](https://github.com/pytorch/vision#models) | 70.452 | 89.818 ResNet18 | [Pytorch](https://github.com/pytorch/vision#models) | 70.142 | 89.274 VGG13 | [Pytorch](https://github.com/pytorch/vision#models) | 69.662 | 89.264 VGG11 | [Pytorch](https://github.com/pytorch/vision#models) | 68.970 | 88.746 SqueezeNet1_1 | [Pytorch](https://github.com/pytorch/vision#models) | 58.250 | 80.800 SqueezeNet1_0 | [Pytorch](https://github.com/pytorch/vision#models) | 58.108 | 80.428 Alexnet | [Pytorch](https://github.com/pytorch/vision#models) | 56.432 | 79.194 Note: the Pytorch version of ResNet152 is not a porting of the Torch7 but has been retrained by facebook. Beware, the accuracy reported here is not always representative of the transferable capacity of the network on other tasks and datasets. You must try them all! :P ### Reproducing results Download the ImageNet dataset and move validation images to labeled subfolders ``` python test/imagenet.py /local/data/imagenet_2012/images --arch resnext101_32x4d -e ``` ## Documentation ### Available models #### NASNet* Source: [TensorFlow Slim repo](https://github.com/tensorflow/models/tree/master/slim) - `nasnetlarge(num_classes=1000, pretrained='imagenet')` - `nasnetlarge(num_classes=1001, pretrained='imagenet+background')` #### FaceBook ResNet* Source: [Torch7 repo of FaceBook](https://github.com/facebook/fb.resnet.torch) There are a bit different from the ResNet* of torchvision. ResNet152 is currently the only one available. - `fbresnet152(num_classes=1000, pretrained='imagenet')` #### Inception* Source: [TensorFlow Slim repo](https://github.com/tensorflow/models/tree/master/slim) and [Pytorch/Vision repo](https://github.com/pytorch/vision/tree/master/torchvision) for `inceptionv3` - `inceptionresnetv2(num_classes=1000, pretrained='imagenet')` - `inceptionresnetv2(num_classes=1001, pretrained='imagenet+background')` - `inceptionv4(num_classes=1000, pretrained='imagenet')` - `inceptionv4(num_classes=1001, pretrained='imagenet+background')` - `inceptionv3(num_classes=1000, pretrained='imagenet')` #### BNInception Source: [Trained with Caffe](https://github.com/Cadene/tensorflow-model-zoo.torch/pull/2) by [Xiong Yuanjun](http://yjxiong.me) - `bninception(num_classes=1000, pretrained='imagenet')` #### ResNeXt* Source: [ResNeXt repo of FaceBook](https://github.com/facebookresearch/ResNeXt) - `resnext101_32x4d(num_classes=1000, pretrained='imagenet')` - `resnext101_62x4d(num_classes=1000, pretrained='imagenet')` #### TorchVision Source: [Pytorch/Vision repo](https://github.com/pytorch/vision/tree/master/torchvision) (`inceptionv3` included in [Inception*](https://github.com/Cadene/pretrained-models.pytorch#inception)) - `resnet18(num_classes=1000, pretrained='imagenet')` - `resnet34(num_classes=1000, pretrained='imagenet')` - `resnet50(num_classes=1000, pretrained='imagenet')` - `resnet101(num_classes=1000, pretrained='imagenet')` - `resnet152(num_classes=1000, pretrained='imagenet')` - `densenet121(num_classes=1000, pretrained='imagenet')` - `densenet161(num_classes=1000, pretrained='imagenet')` - `densenet169(num_classes=1000, pretrained='imagenet')` - `densenet201(num_classes=1000, pretrained='imagenet')` - `squeezenet1_0(num_classes=1000, pretrained='imagenet')` - `squeezenet1_1(num_classes=1000, pretrained='imagenet')` - `alexnet(num_classes=1000, pretrained='imagenet')` - `vgg11(num_classes=1000, pretrained='imagenet')` - `vgg13(num_classes=1000, pretrained='imagenet')` - `vgg16(num_classes=1000, pretrained='imagenet')` - `vgg19(num_classes=1000, pretrained='imagenet')` - `vgg11_bn(num_classes=1000, pretrained='imagenet')` - `vgg13_bn(num_classes=1000, pretrained='imagenet')` - `vgg16_bn(num_classes=1000, pretrained='imagenet')` - `vgg19_bn(num_classes=1000, pretrained='imagenet')` ### Model API Once a pretrained model has been loaded, you can use it that way. **Important note**: All image must be loaded using `PIL` which scales the pixel values between 0 and 1. #### `model.input_size` Attribut of type `list` composed of 3 numbers: - number of color channels, - height of the input image, - width of the input image. Example: - `[3, 299, 299]` for inception* networks, - `[3, 224, 224]` for resnet* networks. #### `model.input_space` Attribut of type `str` representating the color space of the image. Can be `RGB` or `BGR`. #### `model.input_range` Attribut of type `list` composed of 2 numbers: - min pixel value, - max pixel value. Example: - `[0, 1]` for resnet* and inception* networks, - `[0, 255]` for bninception network. #### `model.mean` Attribut of type `list` composed of 3 numbers which are used to normalize the input image (substract "color-channel-wise"). Example: - `[0.5, 0.5, 0.5]` for inception* networks, - `[0.485, 0.456, 0.406]` for resnet* networks. #### `model.std` Attribut of type `list` composed of 3 numbers which are used to normalize the input image (divide "color-channel-wise"). Example: - `[0.5, 0.5, 0.5]` for inception* networks, - `[0.229, 0.224, 0.225]` for resnet* networks. #### `model.features` /!\ work in progress (may not be available) Method which is used to extract the features from the image. Example when the model is loaded using `fbresnet152`: ```python print(input_224.size()) # (1,3,224,224) output = model.features(input_224) print(output.size()) # (1,2048,1,1) # print(input_448.size()) # (1,3,448,448) output = model.features(input_448) # print(output.size()) # (1,2048,7,7) ``` #### `model.classif` /!\ work in progress (may not be available) Method which is used to classify the features from the image. Example when the model is loaded using `fbresnet152`: ```python output = model.features(input_224) output = output.view(1,-1) print(output.size()) # (1,2048) output = model.classif(output) print(output.size()) # (1,1000) ``` #### `model.forward` Method used to call `model.features` and `model.classif`. It can be overwritten as desired. **Important note**: A good practice is to use `model.__call__` as your function of choice to forward an input to your model. See the example bellow. ```python # Without model.__call__ output = model.forward(input_224) print(output.size()) # (1,1000) # With model.__call__ output = model(input_224) print(output.size()) # (1,1000) ``` ## Reproducing ### Hand porting of ResNet152 ``` th pretrainedmodels/fbresnet/resnet152_dump.lua python pretrainedmodels/fbresnet/resnet152_load.py ``` ### Automatic porting of ResNeXt https://github.com/clcarwin/convert_torch_to_pytorch ### Hand porting of NASNet, InceptionV4 and InceptionResNetV2 https://github.com/Cadene/tensorflow-model-zoo.torch ## Acknowledgement Thanks to the deep learning community and especially to the contributers of the pytorch ecosystem. ================================================ FILE: pretrained_models_pytorch/data/imagenet_classes.txt ================================================ n01440764 n01443537 n01484850 n01491361 n01494475 n01496331 n01498041 n01514668 n01514859 n01518878 n01530575 n01531178 n01532829 n01534433 n01537544 n01558993 n01560419 n01580077 n01582220 n01592084 n01601694 n01608432 n01614925 n01616318 n01622779 n01629819 n01630670 n01631663 n01632458 n01632777 n01641577 n01644373 n01644900 n01664065 n01665541 n01667114 n01667778 n01669191 n01675722 n01677366 n01682714 n01685808 n01687978 n01688243 n01689811 n01692333 n01693334 n01694178 n01695060 n01697457 n01698640 n01704323 n01728572 n01728920 n01729322 n01729977 n01734418 n01735189 n01737021 n01739381 n01740131 n01742172 n01744401 n01748264 n01749939 n01751748 n01753488 n01755581 n01756291 n01768244 n01770081 n01770393 n01773157 n01773549 n01773797 n01774384 n01774750 n01775062 n01776313 n01784675 n01795545 n01796340 n01797886 n01798484 n01806143 n01806567 n01807496 n01817953 n01818515 n01819313 n01820546 n01824575 n01828970 n01829413 n01833805 n01843065 n01843383 n01847000 n01855032 n01855672 n01860187 n01871265 n01872401 n01873310 n01877812 n01882714 n01883070 n01910747 n01914609 n01917289 n01924916 n01930112 n01943899 n01944390 n01945685 n01950731 n01955084 n01968897 n01978287 n01978455 n01980166 n01981276 n01983481 n01984695 n01985128 n01986214 n01990800 n02002556 n02002724 n02006656 n02007558 n02009229 n02009912 n02011460 n02012849 n02013706 n02017213 n02018207 n02018795 n02025239 n02027492 n02028035 n02033041 n02037110 n02051845 n02056570 n02058221 n02066245 n02071294 n02074367 n02077923 n02085620 n02085782 n02085936 n02086079 n02086240 n02086646 n02086910 n02087046 n02087394 n02088094 n02088238 n02088364 n02088466 n02088632 n02089078 n02089867 n02089973 n02090379 n02090622 n02090721 n02091032 n02091134 n02091244 n02091467 n02091635 n02091831 n02092002 n02092339 n02093256 n02093428 n02093647 n02093754 n02093859 n02093991 n02094114 n02094258 n02094433 n02095314 n02095570 n02095889 n02096051 n02096177 n02096294 n02096437 n02096585 n02097047 n02097130 n02097209 n02097298 n02097474 n02097658 n02098105 n02098286 n02098413 n02099267 n02099429 n02099601 n02099712 n02099849 n02100236 n02100583 n02100735 n02100877 n02101006 n02101388 n02101556 n02102040 n02102177 n02102318 n02102480 n02102973 n02104029 n02104365 n02105056 n02105162 n02105251 n02105412 n02105505 n02105641 n02105855 n02106030 n02106166 n02106382 n02106550 n02106662 n02107142 n02107312 n02107574 n02107683 n02107908 n02108000 n02108089 n02108422 n02108551 n02108915 n02109047 n02109525 n02109961 n02110063 n02110185 n02110341 n02110627 n02110806 n02110958 n02111129 n02111277 n02111500 n02111889 n02112018 n02112137 n02112350 n02112706 n02113023 n02113186 n02113624 n02113712 n02113799 n02113978 n02114367 n02114548 n02114712 n02114855 n02115641 n02115913 n02116738 n02117135 n02119022 n02119789 n02120079 n02120505 n02123045 n02123159 n02123394 n02123597 n02124075 n02125311 n02127052 n02128385 n02128757 n02128925 n02129165 n02129604 n02130308 n02132136 n02133161 n02134084 n02134418 n02137549 n02138441 n02165105 n02165456 n02167151 n02168699 n02169497 n02172182 n02174001 n02177972 n02190166 n02206856 n02219486 n02226429 n02229544 n02231487 n02233338 n02236044 n02256656 n02259212 n02264363 n02268443 n02268853 n02276258 n02277742 n02279972 n02280649 n02281406 n02281787 n02317335 n02319095 n02321529 n02325366 n02326432 n02328150 n02342885 n02346627 n02356798 n02361337 n02363005 n02364673 n02389026 n02391049 n02395406 n02396427 n02397096 n02398521 n02403003 n02408429 n02410509 n02412080 n02415577 n02417914 n02422106 n02422699 n02423022 n02437312 n02437616 n02441942 n02442845 n02443114 n02443484 n02444819 n02445715 n02447366 n02454379 n02457408 n02480495 n02480855 n02481823 n02483362 n02483708 n02484975 n02486261 n02486410 n02487347 n02488291 n02488702 n02489166 n02490219 n02492035 n02492660 n02493509 n02493793 n02494079 n02497673 n02500267 n02504013 n02504458 n02509815 n02510455 n02514041 n02526121 n02536864 n02606052 n02607072 n02640242 n02641379 n02643566 n02655020 n02666196 n02667093 n02669723 n02672831 n02676566 n02687172 n02690373 n02692877 n02699494 n02701002 n02704792 n02708093 n02727426 n02730930 n02747177 n02749479 n02769748 n02776631 n02777292 n02782093 n02783161 n02786058 n02787622 n02788148 n02790996 n02791124 n02791270 n02793495 n02794156 n02795169 n02797295 n02799071 n02802426 n02804414 n02804610 n02807133 n02808304 n02808440 n02814533 n02814860 n02815834 n02817516 n02823428 n02823750 n02825657 n02834397 n02835271 n02837789 n02840245 n02841315 n02843684 n02859443 n02860847 n02865351 n02869837 n02870880 n02871525 n02877765 n02879718 n02883205 n02892201 n02892767 n02894605 n02895154 n02906734 n02909870 n02910353 n02916936 n02917067 n02927161 n02930766 n02939185 n02948072 n02950826 n02951358 n02951585 n02963159 n02965783 n02966193 n02966687 n02971356 n02974003 n02977058 n02978881 n02979186 n02980441 n02981792 n02988304 n02992211 n02992529 n02999410 n03000134 n03000247 n03000684 n03014705 n03016953 n03017168 n03018349 n03026506 n03028079 n03032252 n03041632 n03042490 n03045698 n03047690 n03062245 n03063599 n03063689 n03065424 n03075370 n03085013 n03089624 n03095699 n03100240 n03109150 n03110669 n03124043 n03124170 n03125729 n03126707 n03127747 n03127925 n03131574 n03133878 n03134739 n03141823 n03146219 n03160309 n03179701 n03180011 n03187595 n03188531 n03196217 n03197337 n03201208 n03207743 n03207941 n03208938 n03216828 n03218198 n03220513 n03223299 n03240683 n03249569 n03250847 n03255030 n03259280 n03271574 n03272010 n03272562 n03290653 n03291819 n03297495 n03314780 n03325584 n03337140 n03344393 n03345487 n03347037 n03355925 n03372029 n03376595 n03379051 n03384352 n03388043 n03388183 n03388549 n03393912 n03394916 n03400231 n03404251 n03417042 n03424325 n03425413 n03443371 n03444034 n03445777 n03445924 n03447447 n03447721 n03450230 n03452741 n03457902 n03459775 n03461385 n03467068 n03476684 n03476991 n03478589 n03481172 n03482405 n03483316 n03485407 n03485794 n03492542 n03494278 n03495258 n03496892 n03498962 n03527444 n03529860 n03530642 n03532672 n03534580 n03535780 n03538406 n03544143 n03584254 n03584829 n03590841 n03594734 n03594945 n03595614 n03598930 n03599486 n03602883 n03617480 n03623198 n03627232 n03630383 n03633091 n03637318 n03642806 n03649909 n03657121 n03658185 n03661043 n03662601 n03666591 n03670208 n03673027 n03676483 n03680355 n03690938 n03691459 n03692522 n03697007 n03706229 n03709823 n03710193 n03710637 n03710721 n03717622 n03720891 n03721384 n03724870 n03729826 n03733131 n03733281 n03733805 n03742115 n03743016 n03759954 n03761084 n03763968 n03764736 n03769881 n03770439 n03770679 n03773504 n03775071 n03775546 n03776460 n03777568 n03777754 n03781244 n03782006 n03785016 n03786901 n03787032 n03788195 n03788365 n03791053 n03792782 n03792972 n03793489 n03794056 n03796401 n03803284 n03804744 n03814639 n03814906 n03825788 n03832673 n03837869 n03838899 n03840681 n03841143 n03843555 n03854065 n03857828 n03866082 n03868242 n03868863 n03871628 n03873416 n03874293 n03874599 n03876231 n03877472 n03877845 n03884397 n03887697 n03888257 n03888605 n03891251 n03891332 n03895866 n03899768 n03902125 n03903868 n03908618 n03908714 n03916031 n03920288 n03924679 n03929660 n03929855 n03930313 n03930630 n03933933 n03935335 n03937543 n03938244 n03942813 n03944341 n03947888 n03950228 n03954731 n03956157 n03958227 n03961711 n03967562 n03970156 n03976467 n03976657 n03977966 n03980874 n03982430 n03983396 n03991062 n03992509 n03995372 n03998194 n04004767 n04005630 n04008634 n04009552 n04019541 n04023962 n04026417 n04033901 n04033995 n04037443 n04039381 n04040759 n04041544 n04044716 n04049303 n04065272 n04067472 n04069434 n04070727 n04074963 n04081281 n04086273 n04090263 n04099969 n04111531 n04116512 n04118538 n04118776 n04120489 n04125021 n04127249 n04131690 n04133789 n04136333 n04141076 n04141327 n04141975 n04146614 n04147183 n04149813 n04152593 n04153751 n04154565 n04162706 n04179913 n04192698 n04200800 n04201297 n04204238 n04204347 n04208210 n04209133 n04209239 n04228054 n04229816 n04235860 n04238763 n04239074 n04243546 n04251144 n04252077 n04252225 n04254120 n04254680 n04254777 n04258138 n04259630 n04263257 n04264628 n04265275 n04266014 n04270147 n04273569 n04275548 n04277352 n04285008 n04286575 n04296562 n04310018 n04311004 n04311174 n04317175 n04325704 n04326547 n04328186 n04330267 n04332243 n04335435 n04336792 n04344873 n04346328 n04347754 n04350905 n04355338 n04355933 n04356056 n04357314 n04366367 n04367480 n04370456 n04371430 n04371774 n04372370 n04376876 n04380533 n04389033 n04392985 n04398044 n04399382 n04404412 n04409515 n04417672 n04418357 n04423845 n04428191 n04429376 n04435653 n04442312 n04443257 n04447861 n04456115 n04458633 n04461696 n04462240 n04465501 n04467665 n04476259 n04479046 n04482393 n04483307 n04485082 n04486054 n04487081 n04487394 n04493381 n04501370 n04505470 n04507155 n04509417 n04515003 n04517823 n04522168 n04523525 n04525038 n04525305 n04532106 n04532670 n04536866 n04540053 n04542943 n04548280 n04548362 n04550184 n04552348 n04553703 n04554684 n04557648 n04560804 n04562935 n04579145 n04579432 n04584207 n04589890 n04590129 n04591157 n04591713 n04592741 n04596742 n04597913 n04599235 n04604644 n04606251 n04612504 n04613696 n06359193 n06596364 n06785654 n06794110 n06874185 n07248320 n07565083 n07579787 n07583066 n07584110 n07590611 n07613480 n07614500 n07615774 n07684084 n07693725 n07695742 n07697313 n07697537 n07711569 n07714571 n07714990 n07715103 n07716358 n07716906 n07717410 n07717556 n07718472 n07718747 n07720875 n07730033 n07734744 n07742313 n07745940 n07747607 n07749582 n07753113 n07753275 n07753592 n07754684 n07760859 n07768694 n07802026 n07831146 n07836838 n07860988 n07871810 n07873807 n07875152 n07880968 n07892512 n07920052 n07930864 n07932039 n09193705 n09229709 n09246464 n09256479 n09288635 n09332890 n09399592 n09421951 n09428293 n09468604 n09472597 n09835506 n10148035 n10565667 n11879895 n11939491 n12057211 n12144580 n12267677 n12620546 n12768682 n12985857 n12998815 n13037406 n13040303 n13044778 n13052670 n13054560 n13133613 n15075141 ================================================ FILE: pretrained_models_pytorch/data/imagenet_synsets.txt ================================================ ????????? dummy class for index 0 n02119789 kit fox, Vulpes macrotis n02100735 English setter n02110185 Siberian husky n02096294 Australian terrier n02102040 English springer, English springer spaniel n02066245 grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus n02509815 lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens n02124075 Egyptian cat n02417914 ibex, Capra ibex n02123394 Persian cat n02125311 cougar, puma, catamount, mountain lion, painter, panther, Felis concolor n02423022 gazelle n02346627 porcupine, hedgehog n02077923 sea lion n02110063 malamute, malemute, Alaskan malamute n02447366 badger n02109047 Great Dane n02089867 Walker hound, Walker foxhound n02102177 Welsh springer spaniel n02091134 whippet n02092002 Scottish deerhound, deerhound n02071294 killer whale, killer, orca, grampus, sea wolf, Orcinus orca n02442845 mink n02504458 African elephant, Loxodonta africana n02092339 Weimaraner n02098105 soft-coated wheaten terrier n02096437 Dandie Dinmont, Dandie Dinmont terrier n02114712 red wolf, maned wolf, Canis rufus, Canis niger n02105641 Old English sheepdog, bobtail n02128925 jaguar, panther, Panthera onca, Felis onca n02091635 otterhound, otter hound n02088466 bloodhound, sleuthhound n02096051 Airedale, Airedale terrier n02117135 hyena, hyaena n02138441 meerkat, mierkat n02097130 giant schnauzer n02493509 titi, titi monkey n02457408 three-toed sloth, ai, Bradypus tridactylus n02389026 sorrel n02443484 black-footed ferret, ferret, Mustela nigripes n02110341 dalmatian, coach dog, carriage dog n02089078 black-and-tan coonhound n02086910 papillon n02445715 skunk, polecat, wood pussy n02093256 Staffordshire bullterrier, Staffordshire bull terrier n02113978 Mexican hairless n02106382 Bouvier des Flandres, Bouviers des Flandres n02441942 weasel n02113712 miniature poodle n02113186 Cardigan, Cardigan Welsh corgi n02105162 malinois n02415577 bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis n02356798 fox squirrel, eastern fox squirrel, Sciurus niger n02488702 colobus, colobus monkey n02123159 tiger cat n02098413 Lhasa, Lhasa apso n02422699 impala, Aepyceros melampus n02114855 coyote, prairie wolf, brush wolf, Canis latrans n02094433 Yorkshire terrier n02111277 Newfoundland, Newfoundland dog n02132136 brown bear, bruin, Ursus arctos n02119022 red fox, Vulpes vulpes n02091467 Norwegian elkhound, elkhound n02106550 Rottweiler n02422106 hartebeest n02091831 Saluki, gazelle hound n02120505 grey fox, gray fox, Urocyon cinereoargenteus n02104365 schipperke n02086079 Pekinese, Pekingese, Peke n02112706 Brabancon griffon n02098286 West Highland white terrier n02095889 Sealyham terrier, Sealyham n02484975 guenon, guenon monkey n02137549 mongoose n02500267 indri, indris, Indri indri, Indri brevicaudatus n02129604 tiger, Panthera tigris n02090721 Irish wolfhound n02396427 wild boar, boar, Sus scrofa n02108000 EntleBucher n02391049 zebra n02412080 ram, tup n02108915 French bulldog n02480495 orangutan, orang, orangutang, Pongo pygmaeus n02110806 basenji n02128385 leopard, Panthera pardus n02107683 Bernese mountain dog n02085936 Maltese dog, Maltese terrier, Maltese n02094114 Norfolk terrier n02087046 toy terrier n02100583 vizsla, Hungarian pointer n02096177 cairn, cairn terrier n02494079 squirrel monkey, Saimiri sciureus n02105056 groenendael n02101556 clumber, clumber spaniel n02123597 Siamese cat, Siamese n02481823 chimpanzee, chimp, Pan troglodytes n02105505 komondor n02088094 Afghan hound, Afghan n02085782 Japanese spaniel n02489166 proboscis monkey, Nasalis larvatus n02364673 guinea pig, Cavia cobaya n02114548 white wolf, Arctic wolf, Canis lupus tundrarum n02134084 ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus n02480855 gorilla, Gorilla gorilla n02090622 borzoi, Russian wolfhound n02113624 toy poodle n02093859 Kerry blue terrier n02403003 ox n02097298 Scotch terrier, Scottish terrier, Scottie n02108551 Tibetan mastiff n02493793 spider monkey, Ateles geoffroyi n02107142 Doberman, Doberman pinscher n02096585 Boston bull, Boston terrier n02107574 Greater Swiss Mountain dog n02107908 Appenzeller n02086240 Shih-Tzu n02102973 Irish water spaniel n02112018 Pomeranian n02093647 Bedlington terrier n02397096 warthog n02437312 Arabian camel, dromedary, Camelus dromedarius n02483708 siamang, Hylobates syndactylus, Symphalangus syndactylus n02097047 miniature schnauzer n02106030 collie n02099601 golden retriever n02093991 Irish terrier n02110627 affenpinscher, monkey pinscher, monkey dog n02106166 Border collie n02326432 hare n02108089 boxer n02097658 silky terrier, Sydney silky n02088364 beagle n02111129 Leonberg n02100236 German short-haired pointer n02486261 patas, hussar monkey, Erythrocebus patas n02115913 dhole, Cuon alpinus n02486410 baboon n02487347 macaque n02099849 Chesapeake Bay retriever n02108422 bull mastiff n02104029 kuvasz n02492035 capuchin, ringtail, Cebus capucinus n02110958 pug, pug-dog n02099429 curly-coated retriever n02094258 Norwich terrier n02099267 flat-coated retriever n02395406 hog, pig, grunter, squealer, Sus scrofa n02112350 keeshond n02109961 Eskimo dog, husky n02101388 Brittany spaniel n02113799 standard poodle n02095570 Lakeland terrier n02128757 snow leopard, ounce, Panthera uncia n02101006 Gordon setter n02115641 dingo, warrigal, warragal, Canis dingo n02097209 standard schnauzer n02342885 hamster n02097474 Tibetan terrier, chrysanthemum dog n02120079 Arctic fox, white fox, Alopex lagopus n02095314 wire-haired fox terrier n02088238 basset, basset hound n02408429 water buffalo, water ox, Asiatic buffalo, Bubalus bubalis n02133161 American black bear, black bear, Ursus americanus, Euarctos americanus n02328150 Angora, Angora rabbit n02410509 bison n02492660 howler monkey, howler n02398521 hippopotamus, hippo, river horse, Hippopotamus amphibius n02112137 chow, chow chow n02510455 giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca n02093428 American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier n02105855 Shetland sheepdog, Shetland sheep dog, Shetland n02111500 Great Pyrenees n02085620 Chihuahua n02123045 tabby, tabby cat n02490219 marmoset n02099712 Labrador retriever n02109525 Saint Bernard, St Bernard n02454379 armadillo n02111889 Samoyed, Samoyede n02088632 bluetick n02090379 redbone n02443114 polecat, fitch, foulmart, foumart, Mustela putorius n02361337 marmot n02105412 kelpie n02483362 gibbon, Hylobates lar n02437616 llama n02107312 miniature pinscher n02325366 wood rabbit, cottontail, cottontail rabbit n02091032 Italian greyhound n02129165 lion, king of beasts, Panthera leo n02102318 cocker spaniel, English cocker spaniel, cocker n02100877 Irish setter, red setter n02074367 dugong, Dugong dugon n02504013 Indian elephant, Elephas maximus n02363005 beaver n02102480 Sussex spaniel n02113023 Pembroke, Pembroke Welsh corgi n02086646 Blenheim spaniel n02497673 Madagascar cat, ring-tailed lemur, Lemur catta n02087394 Rhodesian ridgeback n02127052 lynx, catamount n02116738 African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus n02488291 langur n02091244 Ibizan hound, Ibizan Podenco n02114367 timber wolf, grey wolf, gray wolf, Canis lupus n02130308 cheetah, chetah, Acinonyx jubatus n02089973 English foxhound n02105251 briard n02134418 sloth bear, Melursus ursinus, Ursus ursinus n02093754 Border terrier n02106662 German shepherd, German shepherd dog, German police dog, alsatian n02444819 otter n01882714 koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus n01871265 tusker n01872401 echidna, spiny anteater, anteater n01877812 wallaby, brush kangaroo n01873310 platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus n01883070 wombat n04086273 revolver, six-gun, six-shooter n04507155 umbrella n04147183 schooner n04254680 soccer ball n02672831 accordion, piano accordion, squeeze box n02219486 ant, emmet, pismire n02317335 starfish, sea star n01968897 chambered nautilus, pearly nautilus, nautilus n03452741 grand piano, grand n03642806 laptop, laptop computer n07745940 strawberry n02690373 airliner n04552348 warplane, military plane n02692877 airship, dirigible n02782093 balloon n04266014 space shuttle n03344393 fireboat n03447447 gondola n04273569 speedboat n03662601 lifeboat n02951358 canoe n04612504 yawl n02981792 catamaran n04483307 trimaran n03095699 container ship, containership, container vessel n03673027 liner, ocean liner n03947888 pirate, pirate ship n02687172 aircraft carrier, carrier, flattop, attack aircraft carrier n04347754 submarine, pigboat, sub, U-boat n04606251 wreck n03478589 half track n04389033 tank, army tank, armored combat vehicle, armoured combat vehicle n03773504 missile n02860847 bobsled, bobsleigh, bob n03218198 dogsled, dog sled, dog sleigh n02835271 bicycle-built-for-two, tandem bicycle, tandem n03792782 mountain bike, all-terrain bike, off-roader n03393912 freight car n03895866 passenger car, coach, carriage n02797295 barrow, garden cart, lawn cart, wheelbarrow n04204347 shopping cart n03791053 motor scooter, scooter n03384352 forklift n03272562 electric locomotive n04310018 steam locomotive n02704792 amphibian, amphibious vehicle n02701002 ambulance n02814533 beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon n02930766 cab, hack, taxi, taxicab n03100240 convertible n03594945 jeep, landrover n03670208 limousine, limo n03770679 minivan n03777568 Model T n04037443 racer, race car, racing car n04285008 sports car, sport car n03444034 go-kart n03445924 golfcart, golf cart n03785016 moped n04252225 snowplow, snowplough n03345487 fire engine, fire truck n03417042 garbage truck, dustcart n03930630 pickup, pickup truck n04461696 tow truck, tow car, wrecker n04467665 trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi n03796401 moving van n03977966 police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria n04065272 recreational vehicle, RV, R.V. n04335435 streetcar, tram, tramcar, trolley, trolley car n04252077 snowmobile n04465501 tractor n03776460 mobile home, manufactured home n04482393 tricycle, trike, velocipede n04509417 unicycle, monocycle n03538406 horse cart, horse-cart n03599486 jinrikisha, ricksha, rickshaw n03868242 oxcart n02804414 bassinet n03125729 cradle n03131574 crib, cot n03388549 four-poster n02870880 bookcase n03018349 china cabinet, china closet n03742115 medicine chest, medicine cabinet n03016953 chiffonier, commode n04380533 table lamp n03337140 file, file cabinet, filing cabinet n03891251 park bench n02791124 barber chair n04429376 throne n03376595 folding chair n04099969 rocking chair, rocker n04344873 studio couch, day bed n04447861 toilet seat n03179701 desk n03982430 pool table, billiard table, snooker table n03201208 dining table, board n03290653 entertainment center n04550184 wardrobe, closet, press n07742313 Granny Smith n07747607 orange n07749582 lemon n07753113 fig n07753275 pineapple, ananas n07753592 banana n07754684 jackfruit, jak, jack n07760859 custard apple n07768694 pomegranate n12267677 acorn n12620546 hip, rose hip, rosehip n13133613 ear, spike, capitulum n11879895 rapeseed n12144580 corn n12768682 buckeye, horse chestnut, conker n03854065 organ, pipe organ n04515003 upright, upright piano n03017168 chime, bell, gong n03249569 drum, membranophone, tympan n03447721 gong, tam-tam n03720891 maraca n03721384 marimba, xylophone n04311174 steel drum n02787622 banjo n02992211 cello, violoncello n04536866 violin, fiddle n03495258 harp n02676566 acoustic guitar n03272010 electric guitar n03110669 cornet, horn, trumpet, trump n03394916 French horn, horn n04487394 trombone n03494278 harmonica, mouth organ, harp, mouth harp n03840681 ocarina, sweet potato n03884397 panpipe, pandean pipe, syrinx n02804610 bassoon n03838899 oboe, hautboy, hautbois n04141076 sax, saxophone n03372029 flute, transverse flute n11939491 daisy n12057211 yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum n09246464 cliff, drop, drop-off n09468604 valley, vale n09193705 alp n09472597 volcano n09399592 promontory, headland, head, foreland n09421951 sandbar, sand bar n09256479 coral reef n09332890 lakeside, lakeshore n09428293 seashore, coast, seacoast, sea-coast n09288635 geyser n03498962 hatchet n03041632 cleaver, meat cleaver, chopper n03658185 letter opener, paper knife, paperknife n03954731 plane, carpenter's plane, woodworking plane n03995372 power drill n03649909 lawn mower, mower n03481172 hammer n03109150 corkscrew, bottle screw n02951585 can opener, tin opener n03970156 plunger, plumber's helper n04154565 screwdriver n04208210 shovel n03967562 plow, plough n03000684 chain saw, chainsaw n01514668 cock n01514859 hen n01518878 ostrich, Struthio camelus n01530575 brambling, Fringilla montifringilla n01531178 goldfinch, Carduelis carduelis n01532829 house finch, linnet, Carpodacus mexicanus n01534433 junco, snowbird n01537544 indigo bunting, indigo finch, indigo bird, Passerina cyanea n01558993 robin, American robin, Turdus migratorius n01560419 bulbul n01580077 jay n01582220 magpie n01592084 chickadee n01601694 water ouzel, dipper n01608432 kite n01614925 bald eagle, American eagle, Haliaeetus leucocephalus n01616318 vulture n01622779 great grey owl, great gray owl, Strix nebulosa n01795545 black grouse n01796340 ptarmigan n01797886 ruffed grouse, partridge, Bonasa umbellus n01798484 prairie chicken, prairie grouse, prairie fowl n01806143 peacock n01806567 quail n01807496 partridge n01817953 African grey, African gray, Psittacus erithacus n01818515 macaw n01819313 sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita n01820546 lorikeet n01824575 coucal n01828970 bee eater n01829413 hornbill n01833805 hummingbird n01843065 jacamar n01843383 toucan n01847000 drake n01855032 red-breasted merganser, Mergus serrator n01855672 goose n01860187 black swan, Cygnus atratus n02002556 white stork, Ciconia ciconia n02002724 black stork, Ciconia nigra n02006656 spoonbill n02007558 flamingo n02009912 American egret, great white heron, Egretta albus n02009229 little blue heron, Egretta caerulea n02011460 bittern n02012849 crane n02013706 limpkin, Aramus pictus n02018207 American coot, marsh hen, mud hen, water hen, Fulica americana n02018795 bustard n02025239 ruddy turnstone, Arenaria interpres n02027492 red-backed sandpiper, dunlin, Erolia alpina n02028035 redshank, Tringa totanus n02033041 dowitcher n02037110 oystercatcher, oyster catcher n02017213 European gallinule, Porphyrio porphyrio n02051845 pelican n02056570 king penguin, Aptenodytes patagonica n02058221 albatross, mollymawk n01484850 great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias n01491361 tiger shark, Galeocerdo cuvieri n01494475 hammerhead, hammerhead shark n01496331 electric ray, crampfish, numbfish, torpedo n01498041 stingray n02514041 barracouta, snoek n02536864 coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch n01440764 tench, Tinca tinca n01443537 goldfish, Carassius auratus n02526121 eel n02606052 rock beauty, Holocanthus tricolor n02607072 anemone fish n02643566 lionfish n02655020 puffer, pufferfish, blowfish, globefish n02640242 sturgeon n02641379 gar, garfish, garpike, billfish, Lepisosteus osseus n01664065 loggerhead, loggerhead turtle, Caretta caretta n01665541 leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea n01667114 mud turtle n01667778 terrapin n01669191 box turtle, box tortoise n01675722 banded gecko n01677366 common iguana, iguana, Iguana iguana n01682714 American chameleon, anole, Anolis carolinensis n01685808 whiptail, whiptail lizard n01687978 agama n01688243 frilled lizard, Chlamydosaurus kingi n01689811 alligator lizard n01692333 Gila monster, Heloderma suspectum n01693334 green lizard, Lacerta viridis n01694178 African chameleon, Chamaeleo chamaeleon n01695060 Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis n01704323 triceratops n01697457 African crocodile, Nile crocodile, Crocodylus niloticus n01698640 American alligator, Alligator mississipiensis n01728572 thunder snake, worm snake, Carphophis amoenus n01728920 ringneck snake, ring-necked snake, ring snake n01729322 hognose snake, puff adder, sand viper n01729977 green snake, grass snake n01734418 king snake, kingsnake n01735189 garter snake, grass snake n01737021 water snake n01739381 vine snake n01740131 night snake, Hypsiglena torquata n01742172 boa constrictor, Constrictor constrictor n01744401 rock python, rock snake, Python sebae n01748264 Indian cobra, Naja naja n01749939 green mamba n01751748 sea snake n01753488 horned viper, cerastes, sand viper, horned asp, Cerastes cornutus n01755581 diamondback, diamondback rattlesnake, Crotalus adamanteus n01756291 sidewinder, horned rattlesnake, Crotalus cerastes n01629819 European fire salamander, Salamandra salamandra n01630670 common newt, Triturus vulgaris n01631663 eft n01632458 spotted salamander, Ambystoma maculatum n01632777 axolotl, mud puppy, Ambystoma mexicanum n01641577 bullfrog, Rana catesbeiana n01644373 tree frog, tree-frog n01644900 tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui n04579432 whistle n04592741 wing n03876231 paintbrush n03483316 hand blower, blow dryer, blow drier, hair dryer, hair drier n03868863 oxygen mask n04251144 snorkel n03691459 loudspeaker, speaker, speaker unit, loudspeaker system, speaker system n03759954 microphone, mike n04152593 screen, CRT screen n03793489 mouse, computer mouse n03271574 electric fan, blower n03843555 oil filter n04332243 strainer n04265275 space heater n04330267 stove n03467068 guillotine n02794156 barometer n04118776 rule, ruler n03841143 odometer, hodometer, mileometer, milometer n04141975 scale, weighing machine n02708093 analog clock n03196217 digital clock n04548280 wall clock n03544143 hourglass n04355338 sundial n03891332 parking meter n04328186 stopwatch, stop watch n03197337 digital watch n04317175 stethoscope n04376876 syringe n03706229 magnetic compass n02841315 binoculars, field glasses, opera glasses n04009552 projector n04356056 sunglasses, dark glasses, shades n03692522 loupe, jeweler's loupe n04044716 radio telescope, radio reflector n02879718 bow n02950826 cannon n02749479 assault rifle, assault gun n04090263 rifle n04008634 projectile, missile n03085013 computer keyboard, keypad n04505470 typewriter keyboard n03126707 crane n03666591 lighter, light, igniter, ignitor n02666196 abacus n02977058 cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM n04238763 slide rule, slipstick n03180011 desktop computer n03485407 hand-held computer, hand-held microcomputer n03832673 notebook, notebook computer n06359193 web site, website, internet site, site n03496892 harvester, reaper n04428191 thresher, thrasher, threshing machine n04004767 printer n04243546 slot, one-armed bandit n04525305 vending machine n04179913 sewing machine n03602883 joystick n04372370 switch, electric switch, electrical switch n03532672 hook, claw n02974003 car wheel n03874293 paddlewheel, paddle wheel n03944341 pinwheel n03992509 potter's wheel n03425413 gas pump, gasoline pump, petrol pump, island dispenser n02966193 carousel, carrousel, merry-go-round, roundabout, whirligig n04371774 swing n04067472 reel n04040759 radiator n04019541 puck, hockey puck n03492542 hard disc, hard disk, fixed disk n04355933 sunglass n03929660 pick, plectrum, plectron n02965783 car mirror n04258138 solar dish, solar collector, solar furnace n04074963 remote control, remote n03208938 disk brake, disc brake n02910353 buckle n03476684 hair slide n03627232 knot n03075370 combination lock n03874599 padlock n03804744 nail n04127249 safety pin n04153751 screw n03803284 muzzle n04162706 seat belt, seatbelt n04228054 ski n02948072 candle, taper, wax light n03590841 jack-o'-lantern n04286575 spotlight, spot n04456115 torch n03814639 neck brace n03933933 pier n04485082 tripod n03733131 maypole n03794056 mousetrap n04275548 spider web, spider's web n01768244 trilobite n01770081 harvestman, daddy longlegs, Phalangium opilio n01770393 scorpion n01773157 black and gold garden spider, Argiope aurantia n01773549 barn spider, Araneus cavaticus n01773797 garden spider, Aranea diademata n01774384 black widow, Latrodectus mactans n01774750 tarantula n01775062 wolf spider, hunting spider n01776313 tick n01784675 centipede n01990800 isopod n01978287 Dungeness crab, Cancer magister n01978455 rock crab, Cancer irroratus n01980166 fiddler crab n01981276 king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica n01983481 American lobster, Northern lobster, Maine lobster, Homarus americanus n01984695 spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish n01985128 crayfish, crawfish, crawdad, crawdaddy n01986214 hermit crab n02165105 tiger beetle n02165456 ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle n02167151 ground beetle, carabid beetle n02168699 long-horned beetle, longicorn, longicorn beetle n02169497 leaf beetle, chrysomelid n02172182 dung beetle n02174001 rhinoceros beetle n02177972 weevil n02190166 fly n02206856 bee n02226429 grasshopper, hopper n02229544 cricket n02231487 walking stick, walkingstick, stick insect n02233338 cockroach, roach n02236044 mantis, mantid n02256656 cicada, cicala n02259212 leafhopper n02264363 lacewing, lacewing fly n02268443 dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk n02268853 damselfly n02276258 admiral n02277742 ringlet, ringlet butterfly n02279972 monarch, monarch butterfly, milkweed butterfly, Danaus plexippus n02280649 cabbage butterfly n02281406 sulphur butterfly, sulfur butterfly n02281787 lycaenid, lycaenid butterfly n01910747 jellyfish n01914609 sea anemone, anemone n01917289 brain coral n01924916 flatworm, platyhelminth n01930112 nematode, nematode worm, roundworm n01943899 conch n01944390 snail n01945685 slug n01950731 sea slug, nudibranch n01955084 chiton, coat-of-mail shell, sea cradle, polyplacophore n02319095 sea urchin n02321529 sea cucumber, holothurian n03584829 iron, smoothing iron n03297495 espresso maker n03761084 microwave, microwave oven n03259280 Dutch oven n04111531 rotisserie n04442312 toaster n04542943 waffle iron n04517823 vacuum, vacuum cleaner n03207941 dishwasher, dish washer, dishwashing machine n04070727 refrigerator, icebox n04554684 washer, automatic washer, washing machine n03133878 Crock Pot n03400231 frying pan, frypan, skillet n04596742 wok n02939185 caldron, cauldron n03063689 coffeepot n04398044 teapot n04270147 spatula n02699494 altar n04486054 triumphal arch n03899768 patio, terrace n04311004 steel arch bridge n04366367 suspension bridge n04532670 viaduct n02793495 barn n03457902 greenhouse, nursery, glasshouse n03877845 palace n03781244 monastery n03661043 library n02727426 apiary, bee house n02859443 boathouse n03028079 church, church building n03788195 mosque n04346328 stupa, tope n03956157 planetarium n04081281 restaurant, eating house, eating place, eatery n03032252 cinema, movie theater, movie theatre, movie house, picture palace n03529860 home theater, home theatre n03697007 lumbermill, sawmill n03065424 coil, spiral, volute, whorl, helix n03837869 obelisk n04458633 totem pole n02980441 castle n04005630 prison, prison house n03461385 grocery store, grocery, food market, market n02776631 bakery, bakeshop, bakehouse n02791270 barbershop n02871525 bookshop, bookstore, bookstall n02927161 butcher shop, meat market n03089624 confectionery, confectionary, candy store n04200800 shoe shop, shoe-shop, shoe store n04443257 tobacco shop, tobacconist shop, tobacconist n04462240 toyshop n03388043 fountain n03042490 cliff dwelling n04613696 yurt n03216828 dock, dockage, docking facility n02892201 brass, memorial tablet, plaque n03743016 megalith, megalithic structure n02788148 bannister, banister, balustrade, balusters, handrail n02894605 breakwater, groin, groyne, mole, bulwark, seawall, jetty n03160309 dam, dike, dyke n03000134 chainlink fence n03930313 picket fence, paling n04604644 worm fence, snake fence, snake-rail fence, Virginia fence n04326547 stone wall n03459775 grille, radiator grille n04239074 sliding door n04501370 turnstile n03792972 mountain tent n04149813 scoreboard n03530642 honeycomb n03961711 plate rack n03903868 pedestal, plinth, footstall n02814860 beacon, lighthouse, beacon light, pharos n07711569 mashed potato n07720875 bell pepper n07714571 head cabbage n07714990 broccoli n07715103 cauliflower n07716358 zucchini, courgette n07716906 spaghetti squash n07717410 acorn squash n07717556 butternut squash n07718472 cucumber, cuke n07718747 artichoke, globe artichoke n07730033 cardoon n07734744 mushroom n04209239 shower curtain n03594734 jean, blue jean, denim n02971356 carton n03485794 handkerchief, hankie, hanky, hankey n04133789 sandal n02747177 ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin n04125021 safe n07579787 plate n03814906 necklace n03134739 croquet ball n03404251 fur coat n04423845 thimble n03877472 pajama, pyjama, pj's, jammies n04120489 running shoe n03062245 cocktail shaker n03014705 chest n03717622 manhole cover n03777754 modem n04493381 tub, vat n04476259 tray n02777292 balance beam, beam n07693725 bagel, beigel n03998194 prayer rug, prayer mat n03617480 kimono n07590611 hot pot, hotpot n04579145 whiskey jug n03623198 knee pad n07248320 book jacket, dust cover, dust jacket, dust wrapper n04277352 spindle n04229816 ski mask n02823428 beer bottle n03127747 crash helmet n02877765 bottlecap n04435653 tile roof n03724870 mask n03710637 maillot n03920288 Petri dish n03379051 football helmet n02807133 bathing cap, swimming cap n04399382 teddy, teddy bear n03527444 holster n03983396 pop bottle, soda bottle n03924679 photocopier n04532106 vestment n06785654 crossword puzzle, crossword n03445777 golf ball n07613480 trifle n04350905 suit, suit of clothes n04562935 water tower n03325584 feather boa, boa n03045698 cloak n07892512 red wine n03250847 drumstick n04192698 shield, buckler n03026506 Christmas stocking n03534580 hoopskirt, crinoline n07565083 menu n04296562 stage n02869837 bonnet, poke bonnet n07871810 meat loaf, meatloaf n02799071 baseball n03314780 face powder n04141327 scabbard n04357314 sunscreen, sunblock, sun blocker n02823750 beer glass n13052670 hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa n07583066 guacamole n03637318 lampshade, lamp shade n04599235 wool, woolen, woollen n07802026 hay n02883205 bow tie, bow-tie, bowtie n03709823 mailbag, postbag n04560804 water jug n02909870 bucket, pail n03207743 dishrag, dishcloth n04263257 soup bowl n07932039 eggnog n03786901 mortar n04479046 trench coat n03873416 paddle, boat paddle n02999410 chain n04367480 swab, swob, mop n03775546 mixing bowl n07875152 potpie n04591713 wine bottle n04201297 shoji n02916936 bulletproof vest n03240683 drilling platform, offshore rig n02840245 binder, ring-binder n02963159 cardigan n04370456 sweatshirt n03991062 pot, flowerpot n02843684 birdhouse n03482405 hamper n03942813 ping-pong ball n03908618 pencil box, pencil case n03902125 pay-phone, pay-station n07584110 consomme n02730930 apron n04023962 punching bag, punch bag, punching ball, punchball n02769748 backpack, back pack, knapsack, packsack, rucksack, haversack n10148035 groom, bridegroom n02817516 bearskin, busby, shako n03908714 pencil sharpener n02906734 broom n03788365 mosquito net n02667093 abaya n03787032 mortarboard n03980874 poncho n03141823 crutch n03976467 Polaroid camera, Polaroid Land camera n04264628 space bar n07930864 cup n04039381 racket, racquet n06874185 traffic light, traffic signal, stoplight n04033901 quill, quill pen n04041544 radio, wireless n07860988 dough n03146219 cuirass n03763968 military uniform n03676483 lipstick, lip rouge n04209133 shower cap n03782006 monitor n03857828 oscilloscope, scope, cathode-ray oscilloscope, CRO n03775071 mitten n02892767 brassiere, bra, bandeau n07684084 French loaf n04522168 vase n03764736 milk can n04118538 rugby ball n03887697 paper towel n13044778 earthstar n03291819 envelope n03770439 miniskirt, mini n03124170 cowboy hat, ten-gallon hat n04487081 trolleybus, trolley coach, trackless trolley n03916031 perfume, essence n02808440 bathtub, bathing tub, bath, tub n07697537 hotdog, hot dog, red hot n12985857 coral fungus n02917067 bullet train, bullet n03938244 pillow n15075141 toilet tissue, toilet paper, bathroom tissue n02978881 cassette n02966687 carpenter's kit, tool kit n03633091 ladle n13040303 stinkhorn, carrion fungus n03690938 lotion n03476991 hair spray n02669723 academic gown, academic robe, judge's robe n03220513 dome n03127925 crate n04584207 wig n07880968 burrito n03937543 pill bottle n03000247 chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour n04418357 theater curtain, theatre curtain n04590129 window shade n02795169 barrel, cask n04553703 washbasin, handbasin, washbowl, lavabo, wash-hand basin n02783161 ballpoint, ballpoint pen, ballpen, Biro n02802426 basketball n02808304 bath towel n03124043 cowboy boot n03450230 gown n04589890 window screen n12998815 agaric n02992529 cellular telephone, cellular phone, cellphone, cell, mobile phone n03825788 nipple n02790996 barbell n03710193 mailbox, letter box n03630383 lab coat, laboratory coat n03347037 fire screen, fireguard n03769881 minibus n03871628 packet n03733281 maze, labyrinth n03976657 pole n03535780 horizontal bar, high bar n04259630 sombrero n03929855 pickelhaube n04049303 rain barrel n04548362 wallet, billfold, notecase, pocketbook n02979186 cassette player n06596364 comic book n03935335 piggy bank, penny bank n06794110 street sign n02825657 bell cote, bell cot n03388183 fountain pen n04591157 Windsor tie n04540053 volleyball n03866082 overskirt n04136333 sarong n04026417 purse n02865351 bolo tie, bolo, bola tie, bola n02834397 bib n03888257 parachute, chute n04235860 sleeping bag n04404412 television, television system n04371430 swimming trunks, bathing trunks n03733805 measuring cup n07920052 espresso n07873807 pizza, pizza pie n02895154 breastplate, aegis, egis n04204238 shopping basket n04597913 wooden spoon n04131690 saltshaker, salt shaker n07836838 chocolate sauce, chocolate syrup n09835506 ballplayer, baseball player n03443371 goblet n13037406 gyromitra n04336792 stretcher n04557648 water bottle n03187595 dial telephone, dial phone n04254120 soap dispenser n03595614 jersey, T-shirt, tee shirt n04146614 school bus n03598930 jigsaw puzzle n03958227 plastic bag n04069434 reflex camera n03188531 diaper, nappy, napkin n02786058 Band Aid n07615774 ice lolly, lolly, lollipop, popsicle n04525038 velvet n04409515 tennis ball n03424325 gasmask, respirator, gas helmet n03223299 doormat, welcome mat n03680355 Loafer n07614500 ice cream, icecream n07695742 pretzel n04033995 quilt, comforter, comfort, puff n03710721 maillot, tank suit n04392985 tape player n03047690 clog, geta, patten, sabot n03584254 iPod n13054560 bolete n10565667 scuba diver n03950228 pitcher, ewer n03729826 matchstick n02837789 bikini, two-piece n04254777 sock n02988304 CD player n03657121 lens cap, lens cover n04417672 thatch, thatched roof n04523525 vault n02815834 beaker n09229709 bubble n07697313 cheeseburger n03888605 parallel bars, bars n03355925 flagpole, flagstaff n03063599 coffee mug n04116512 rubber eraser, rubber, pencil eraser n04325704 stole n07831146 carbonara n03255030 dumbbell n00001740 entity n00001930 physical entity n00020827 matter n00020090 substance n00021265 food, nutrient n07566340 foodstuff, food product n07566863 starches n07710616 potato, white potato, Irish potato, murphy, spud, tater n07679356 bread, breadstuff, staff of life n07683786 loaf of bread, loaf n07681926 cracker n07680932 bun, roll n07809096 ingredient, fixings n07809368 flavorer, flavourer, flavoring, flavouring, seasoner, seasoning n07810907 condiment n07582609 dip n07829412 sauce n07838233 spaghetti sauce, pasta sauce n07882497 concoction, mixture, intermixture n07560652 fare n07570720 nutriment, nourishment, nutrition, sustenance, aliment, alimentation, victuals n07557434 dish n07588947 stew n07583197 soup n07712382 snack food n07695965 sandwich n07697100 hamburger, beefburger, burger n07556970 course n07579575 entree, main course n07609840 dessert, sweet, afters n07611358 frozen dessert n07612996 pudding, pud n07800091 feed, provender n07800740 fodder n07881800 beverage, drink, drinkable, potable n07929519 coffee, java n07884567 alcohol, alcoholic drink, alcoholic beverage, intoxicant, inebriant n07891726 wine, vino n07911371 mixed drink n07930554 punch n14778436 agent n03247620 drug n03248958 drug of abuse, street drug n03740161 medicine, medication, medicament, medicinal drug n03994008 powder n00019613 substance n14580897 material, stuff n14974264 paper n15074962 tissue, tissue paper n14939900 fluid n14940386 liquid n15046900 solid n07555863 food, solid food n07622061 baked goods n07705711 produce, green goods, green groceries, garden truck n07705931 edible fruit n07739125 apple n07739506 eating apple, dessert apple n07742704 berry n07747055 citrus, citrus fruit, citrous fruit n07707451 vegetable, veggie, veg n07710007 solanaceous vegetable n07720442 pepper n07720615 sweet pepper n07710283 root vegetable n07713395 cruciferous vegetable n07713895 cabbage, chou n07715561 squash n07715721 summer squash n07717070 winter squash n00007347 causal agent, cause, causal agency n00007846 person, individual, someone, somebody, mortal, soul n09613191 contestant n10439851 player, participant n09820263 athlete, jock n10072708 explorer, adventurer n10019552 diver, frogman, underwater diver n09626238 peer, equal, match, compeer n09816771 associate n10401829 participant n00002684 object, physical object n09287968 geological formation, formation n09366017 natural depression, depression n09366317 natural elevation, elevation n09359803 mountain, mount n09409512 ridge n09214060 bar n09406793 reef n09433442 shore n09443453 spring, fountain, outflow, outpouring, natural spring n00027167 location n08620061 point n08578706 geographic point, geographical point n04602044 workplace, work n00003553 whole, unit n00019128 natural object n09349797 mechanism n09214581 barrier n13086908 plant part, plant structure n13087625 plant organ n11675842 reproductive structure n13134947 fruit n13135832 seed n11689483 oilseed, oil-rich seed n12156819 grain, caryopsis n12157056 kernel n13138842 pome, false fruit n00004258 living thing, animate thing n00004475 organism, being n00017222 plant, flora, plant life n13083586 vascular plant, tracheophyte n11552386 spermatophyte, phanerogam, seed plant n11665372 angiosperm, flowering plant n11669921 flower n12041446 orchid, orchidaceous plant n12056217 lady's slipper, lady-slipper, ladies' slipper, slipper orchid n12992868 fungus n12997654 basidiomycete, basidiomycetous fungi n00015388 animal, animate being, beast, brute, creature, fauna n01905661 invertebrate n01767661 arthropod n01769347 arachnid, arachnoid n01772222 spider n01776192 acarine n01974773 crustacean n01975687 malacostracan crustacean n01976146 decapod crustacean, decapod n01976957 crab n01982650 lobster n01983048 true lobster n02159955 insect n02164464 beetle n02171453 lamellicorn beetle n02171869 scarabaeid beetle, scarabaeid, scarabaean n02188699 dipterous insect, two-winged insects, dipteran, dipteron n02206270 hymenopterous insect, hymenopteran, hymenopteron, hymenopter n02226183 orthopterous insect, orthopteron, orthopteran n02231052 phasmid, phasmid insect n02232951 dictyopterous insect n02246011 homopterous insect, homopteran n02263378 neuropteron, neuropteran, neuropterous insect n02268148 odonate n02274024 lepidopterous insect, lepidopteron, lepidopteran n02274259 butterfly n02274822 nymphalid, nymphalid butterfly, brush-footed butterfly, four-footed butterfly n02279637 danaid, danaid butterfly n02280458 pierid, pierid butterfly n01909422 coelenterate, cnidarian n01914163 anthozoan, actinozoan n01915811 coral n01916925 stony coral, madrepore, madriporian coral n01922303 worm n01940736 mollusk, mollusc, shellfish n01942177 gastropod, univalve n01968315 cephalopod, cephalopod mollusk n02316707 echinoderm n01317541 domestic animal, domesticated animal n02121808 domestic cat, house cat, Felis domesticus, Felis catus n02084071 dog, domestic dog, Canis familiaris n02087122 hunting dog n02098550 sporting dog, gun dog n02099997 pointer, Spanish pointer n02100399 setter n02101108 spaniel n02102605 water spaniel n02101861 springer spaniel, springer n02099029 retriever n02092468 terrier n02095050 fox terrier n02095412 wirehair, wirehaired terrier, wire-haired terrier n02095727 Welsh terrier n02096756 schnauzer n02093056 bullterrier, bull terrier n02087551 hound, hound dog n02090475 wolfhound n02088839 coonhound n02089555 foxhound n02090827 greyhound n02112826 corgi, Welsh corgi n02113335 poodle, poodle dog n02112497 griffon, Brussels griffon, Belgian griffon n02103406 working dog n02104523 shepherd dog, sheepdog, sheep dog n02104882 Belgian sheepdog, Belgian shepherd n02103841 watchdog, guard dog n02106966 pinscher n02109811 sled dog, sledge dog n02107420 Sennenhunde n02108254 mastiff n02108672 bulldog, English bulldog n02111626 spitz n02085374 toy dog, toy n02086346 toy spaniel n02086478 English toy spaniel n02152991 game n02153203 game bird n01795088 grouse n01802721 phasianid n01803078 pheasant n01805801 peafowl, bird of Juno n02384858 racer n01466257 chordate n01471682 vertebrate, craniate n01861778 mammal, mammalian n01886756 placental, placental mammal, eutherian, eutherian mammal n02323449 lagomorph, gnawing mammal n02323902 leporid, leporid mammal n02324045 rabbit, coney, cony n02469914 primate n02496913 lemur n02470325 ape n02470899 anthropoid ape n02480153 great ape, pongid n02483092 lesser ape n02484322 monkey n02489589 New World monkey, platyrrhine, platyrrhinian n02484473 Old World monkey, catarrhine n02503127 proboscidean, proboscidian n02503517 elephant n02075296 carnivore n02131653 bear n02441326 musteline mammal, mustelid, musteline n02507649 procyonid n02134971 viverrine, viverrine mammal n02083346 canine, canid n02115335 wild dog n02118333 fox n02114100 wolf n02120997 feline, felid n02121620 cat, true cat n02124623 wildcat n02127808 big cat, cat n02062017 aquatic mammal n02073250 sea cow, sirenian mammal, sirenian n02075927 pinniped mammal, pinniped, pinnatiped n02076196 seal n02076779 eared seal n02062430 cetacean, cetacean mammal, blower n02062744 whale n02066707 toothed whale n02068974 dolphin n02063224 baleen whale, whalebone whale n02370806 ungulate, hoofed mammal n02373336 odd-toed ungulate, perissodactyl, perissodactyl mammal n02374149 equine, equid n02374451 horse, Equus caballus n02394477 even-toed ungulate, artiodactyl, artiodactyl mammal n02437136 camel n02399000 ruminant n02401031 bovid n02411705 sheep n02419796 antelope n02414578 wild sheep n02415435 mountain sheep n02416519 goat, caprine animal n02417534 wild goat n02407959 Old World buffalo, buffalo n02402010 bovine n02402425 cattle, cows, kine, oxen, Bos taurus n02395003 swine n02453611 edentate n02456962 sloth, tree sloth n02453108 pachyderm n02329401 rodent, gnawer n02364520 cavy n02355227 squirrel n02355477 tree squirrel n01871543 prototherian n01871875 monotreme, egg-laying mammal n01873982 metatherian n01874434 marsupial, pouched mammal n01881171 phalanger, opossum, possum n01877134 kangaroo n01503061 bird n01517565 ratite, ratite bird, flightless bird n01524359 passerine, passeriform bird n01525720 oscine, oscine bird n01529672 finch n01537134 bunting n01557185 thrush n01560105 nightingale, Luscinia megarhynchos n01578575 corvine bird n01591697 titmouse, tit n01604330 bird of prey, raptor, raptorial bird n01605630 hawk n01613294 eagle, bird of Jove n01621127 owl, bird of Minerva, bird of night, hooter n01789386 gallinaceous bird, gallinacean n01816887 parrot n01819115 cockatoo n01820348 lory n01822602 cuculiform bird n01823013 cuckoo n01825930 coraciiform bird n01831712 apodiform bird n01838038 piciform bird n01844917 aquatic bird n01845132 waterfowl, water bird, waterbird n01845477 anseriform bird n01846331 duck n01852861 sea duck n01854415 merganser, fish duck, sawbill, sheldrake n01858441 swan n02000954 wading bird, wader n02002075 stork n02008041 heron n02008796 egret n02014941 rail n02018027 coot n02022684 shorebird, shore bird, limicoline bird n02023341 plover n02025043 turnstone n02026059 sandpiper n02031934 snipe n02016358 gallinule, marsh hen, water hen, swamphen n02016956 purple gallinule n02021795 seabird, sea bird, seafowl n02051474 pelecaniform seabird n02055658 sphenisciform seabird n02055803 penguin n02057731 pelagic bird, oceanic bird n01661091 reptile, reptilian n01661592 anapsid, anapsid reptile n01662622 chelonian, chelonian reptile n01662784 turtle n01663401 sea turtle, marine turtle n01661818 diapsid, diapsid reptile n01674216 saurian n01674464 lizard n01674990 gecko n01676755 iguanid, iguanid lizard n01685439 teiid lizard, teiid n01687665 agamid, agamid lizard n01689411 anguid lizard n01691951 venomous lizard n01692864 lacertid lizard, lacertid n01693783 chameleon, chamaeleon n01694709 monitor, monitor lizard, varan n01695681 archosaur, archosaurian, archosaurian reptile n01699831 dinosaur n01700470 ornithischian, ornithischian dinosaur n01703569 ceratopsian, horned dinosaur n01696633 crocodilian reptile, crocodilian n01698434 alligator, gator n01697178 crocodile n01726692 snake, serpent, ophidian n01727646 colubrid snake, colubrid n01741562 constrictor n01741943 boa n01743605 python n01745125 elapid, elapid snake n01747885 cobra n01749582 mamba n01749742 black mamba, Dendroaspis augusticeps n01752165 viper n01753959 pit viper n01754876 rattlesnake, rattler n01627424 amphibian n01639765 frog, toad, toad frog, anuran, batrachian, salientian n01640846 true frog, ranid n01629276 salamander n01630284 newt, triton n01632047 ambystomid, ambystomid salamander n01473806 aquatic vertebrate n02512053 fish n01480516 cartilaginous fish, chondrichthian n01482071 elasmobranch, selachian n01482330 shark n01483522 mackerel shark n01488918 requiem shark n01495701 ray n02512938 food fish n02534734 salmon n02514825 bony fish n02528163 teleost fish, teleost, teleostan n01428580 soft-finned fish, malacopterygian n01438208 cypriniform fish n01439121 cyprinid, cyprinid fish n02534559 salmonid n02552171 spiny-finned fish, acanthopterygian n02554730 percoid fish, percoid, percoidean n02605316 butterfly fish n02606384 damselfish, demoiselle n02642107 scorpaenoid, scorpaenoid fish n02642644 scorpaenid, scorpaenid fish n02652668 plectognath, plectognath fish n02638596 ganoid, ganoid fish n00021939 artifact, artefact n03575240 instrumentality, instrumentation n03183080 device n03800933 musical instrument, instrument n03614532 keyboard instrument n03928116 piano, pianoforte, forte-piano n03915437 percussion instrument, percussive instrument n04338517 stringed instrument n02880546 bowed stringed instrument, string n03025886 chordophone n03467517 guitar n04586932 wind instrument, wind n02891788 brass, brass instrument n03393324 free-reed instrument n03945615 pipe n04598582 woodwind, woodwind instrument, wood n02817799 beating-reed instrument, reed instrument, reed n03228016 double-reed instrument, double reed n04222847 single-reed instrument, single-reed woodwind n02676261 acoustic device n02688443 airfoil, aerofoil, control surface, surface n02730265 applicator, applier n02855089 blower n02895606 breathing device, breathing apparatus, breathing machine, ventilator n03269401 electrical device n04470953 transducer n03274561 electro-acoustic transducer n03277771 electronic device n03211117 display, video display n03320046 fan n03339643 filter n03508101 heater, warmer n03574816 instrument n03575691 instrument of execution n03733925 measuring instrument, measuring system, measuring device n03735637 measuring stick, measure, measuring rod n03753077 meter n04437953 timepiece, timekeeper, horologe n04555897 watch, ticker n03046257 clock n04134632 sandglass n04438304 timer n03739693 medical instrument n03813176 navigational instrument n03080497 compass n03852280 optical instrument n04272054 spectacles, specs, eyeglasses, glasses n04147495 scientific instrument n03709206 magnifier n03760671 microscope n03667829 light microscope n03484931 hand glass, simple microscope, magnifying glass n04403638 telescope, scope n02751295 astronomical telescope n04565375 weapon, arm, weapon system n03467984 gun n03343853 firearm, piece, small-arm n02759963 autoloader, self-loader n02760429 automatic firearm, automatic gun, automatic weapon n02760855 automatic rifle, automatic, machine rifle n03701391 machine gun n03948459 pistol, handgun, side arm, shooting iron n03614007 keyboard n03664675 lifting device n03699975 machine n03997484 power tool n03996145 power saw, saw, sawing machine n02938886 calculator, calculating machine n03082979 computer, computing machine, computing device, data processor, electronic computer, information processing system n02708224 analog computer, analogue computer n03196324 digital computer n03918480 personal computer, PC, microcomputer n03985232 portable computer n03322940 farm machine n04004475 printer, printing machine n04243941 slot machine, coin machine n04417180 textile machine n03738472 mechanism n03096960 control, controller n03736970 mechanical device n03700963 machine, simple machine n04574999 wheel n04021798 pump n04088797 ride n04586421 winder n04110955 rotating mechanism n03032811 circle, round n03208556 disk, disc n03744840 memory device, storage device n03706653 magnetic disk, magnetic disc, disk, disc n03851341 optical device n03656484 lens, lense, lens system n03099771 converging lens, convex lens n04069276 reflector n03773035 mirror n04081844 restraint, constraint n02889425 brake n02891188 brake system, brakes n03551084 hydraulic brake, hydraulic brakes n03323703 fastener, fastening, holdfast, fixing n03043958 clip n03682487 lock n03940256 pin n04125853 safety belt, life belt, safety harness n04120093 runner n04217718 signaling device n04263760 source of illumination n03636248 lamp n03640988 lantern n03665366 light, light source n04336034 strengthener, reinforcement n02887209 brace n04359589 support n04038440 rack, stand n04341414 structural member n04515129 upright, vertical n03988170 post n04474466 trap n04568557 web, entanglement n03563967 implement n04451818 tool n03154446 cutting implement n03154073 cutter, cutlery, cutting tool n03265032 edge tool n02764044 ax, axe n03623556 knife n03239726 drill n03418242 garden tool, lawn tool n03489162 hand tool n03848348 opener n02877962 bottle opener n04516672 utensil n03621049 kitchen utensil n03101986 cooking utensil, cookware n03101156 cooker n03880531 pan, cooking pan n03990474 pot n04500060 turner, food turner n03039947 cleaning implement, cleaning device, cleaning equipment n02908217 brush n03294833 eraser n04185071 sharpener n03837422 oar n04100174 rod n04608567 writing implement n03906997 pen n04317420 stick n04296261 staff n02788689 bar n03659292 lever n03613592 key n04285622 sports implement n03094503 container n04576211 wheeled vehicle n02834778 bicycle, bike, wheel, cycle n02959942 car, railcar, railway car, railroad car n03484083 handcart, pushcart, cart, go-cart n04170037 self-propelled vehicle n02740533 armored vehicle, armoured vehicle n03684823 locomotive, engine, locomotive engine, railway locomotive n03791235 motor vehicle, automotive vehicle n02958343 car, auto, automobile, machine, motorcar n03790512 motorcycle, bike n03769722 minibike, motorbike n04490091 truck, motortruck n04520170 van n03896419 passenger van n04464852 tracked vehicle n04467099 trailer, house trailer n04543158 wagon, waggon n02970849 cart n02801938 basket, handbasket n02773037 bag n04284002 spoon n04139859 savings bank, coin bank, money box, bank n03206908 dish n02880940 bowl n02839910 bin n04183329 shaker n04531098 vessel n02801525 basin n03593526 jar n02876657 bottle n03603722 jug n04388743 tank, storage tank n04078574 reservoir n03035510 cistern n03241496 drinking vessel n03797390 mug n02946921 can, tin, tin can n03438257 glass, drinking glass n04060904 receptacle n03210683 dispenser n03871083 package, parcel n02883344 box n04340750 strongbox, deedbox n03733644 measure n02974697 case n03294048 equipment n04285146 sports equipment n03446832 golf equipment n02799897 baseball equipment n03472232 gymnastic apparatus, exerciser n02802721 basketball equipment n03134853 croquet equipment n04571292 weight, free weight, exercising weight n03414162 game equipment n03413828 game n04028315 puzzle n02778669 ball n03926148 photographic equipment n02942699 camera, photographic camera n03430959 gear, paraphernalia, appurtenance n03619396 kit, outfit n04091097 rig n03241093 drill rig, drilling rig, oilrig, oil rig n04137444 satellite, artificial satellite, orbiter n04264914 spacecraft, ballistic capsule, space vehicle n03278248 electronic equipment n02757462 audio system, sound system n04077430 reproducer n04315948 stereo, stereophony, stereo system, stereophonic system n03916720 peripheral, computer peripheral, peripheral device n03163973 data input device, input device n04401088 telephone, phone, telephone set n04044498 radiotelephone, radiophone, wireless telephone n02727825 apparatus, setup n03257586 duplicator, copier n04077734 rescue equipment n04447443 toiletry, toilet articles n03128519 cream, ointment, emollient n03113152 cosmetic n03714235 makeup, make-up, war paint n03100490 conveyance, transport n04524313 vehicle n03125870 craft n02686568 aircraft n03510583 heavier-than-air craft n02691156 airplane, aeroplane, plane n03666917 lighter-than-air craft n04530566 vessel, watercraft n02858304 boat n03790230 motorboat, powerboat n04158807 sea boat n04244997 small boat n04128837 sailing vessel, sailing ship n04128499 sailboat, sailing boat n04194289 ship n02965300 cargo ship, cargo vessel n03896103 passenger ship n04552696 warship, war vessel, combat ship n04348184 submersible, submersible warship n03764276 military vehicle n04099429 rocket, projectile n04235291 sled, sledge, sleigh n03678362 litter n04019101 public transport n04468005 train, railroad train n03896233 passenger train n02924116 bus, autobus, coach, charabanc, double-decker, jitney, motorbus, motorcoach, omnibus, passenger vehicle n03091374 connection, connexion, connector, connecter, connective n02755352 attachment, bond n03664943 ligament n03405265 furnishing n03405725 furniture, piece of furniture, article of furniture n02766320 baby bed, baby's bed n02821943 bedroom furniture n02818832 bed n02933112 cabinet n03015254 chest of drawers, chest, bureau, dresser n03636649 lamp n03842156 office furniture n04161981 seat n02828884 bench n03001627 chair n03002210 chair of state n04256520 sofa, couch, lounge n03100346 convertible, sofa bed n04379243 table n04379964 table n04549122 wall unit n04118021 rug, carpet, carpeting n03151077 curtain, drape, drapery, mantle, pall n06254669 medium n06263609 print media n06263369 press, public press n06595351 magazine, mag n04377057 system n03078287 communication system n04400289 telecommunication system, telecom system, telecommunication equipment, telecom equipment n04341686 structure, construction n02733524 arch n02735688 area n02898711 bridge, span n02913152 building, edifice n03322570 farm building n03544360 house n04079244 residence n04073948 religious residence, cloister n03859280 outbuilding n04187547 shed n03953416 place of worship, house of prayer, house of God, house of worship n04210390 shrine n04417809 theater, theatre, house n02914991 building complex, complex n03956922 plant, works, industrial plant n03316406 factory, mill, manufacturing plant, manufactory n03074380 column, pillar n03171356 defensive structure, defense, defence n03385557 fortification, munition n03297735 establishment n03574555 institution n03907654 penal institution, penal facility n03111690 correctional institution n03953020 place of business, business establishment n03748162 mercantile establishment, retail store, sales outlet, outlet n03722288 marketplace, market place, mart, market n04202417 shop, store n03546340 housing, lodging, living accommodations n03259505 dwelling, home, domicile, abode, habitation, dwelling house n03638321 landing, landing place n03743902 memorial, monument n03839993 obstruction, obstructor, obstructer, impediment, impedimenta n02796623 barrier n03327234 fence, fencing n04046974 rail fence n03454707 grate, grating n03795580 movable barrier n03221720 door n03427296 gate n04191595 shelter n04411264 tent, collapsible shelter n04217882 signboard, sign n04361095 supporting structure n03391770 framework n04038727 rack n04360501 support n04460130 tower n03129123 creation n04007894 product, production n04599396 work, piece of work n06589574 publication n04188643 sheet, flat solid n02856463 board n03959936 plate n03309808 fabric, cloth, material, textile n03932670 piece of cloth, piece of material n04459362 towel n03122748 covering n04605726 wrapping, wrap, wrapper n03590306 jacket n04151940 screen, cover, covert, concealment n03380867 footwear, footgear n04199027 shoe n02872752 boot n03050026 cloth covering n03237639 dressing, medical dressing n02785648 bandage, patch n02680110 adhesive bandage n02820210 bedclothes, bed clothing, bedding n03366823 floor cover, floor covering n03727837 mat n03051540 clothing, article of clothing, vesture, wear, wearable, habiliment n02756098 attire, garb, dress n03206718 disguise n03476083 hairpiece, false hair, postiche n04015204 protective garment n02671780 accessory, accoutrement, accouterment n02827606 belt n03859495 outerwear, overclothes n03450516 gown, robe n03419014 garment n04531873 vest, waistcoat n04371563 swimsuit, swimwear, bathing suit, swimming costume, bathing costume n04489008 trouser, pant n04143897 scarf n03816005 neckwear n03815615 necktie, tie n04230808 skirt n03863923 overgarment, outer garment n03045337 cloak n03057021 coat n04049405 raincoat, waterproof n04370048 sweater, jumper n04021028 pullover, slipover n04097866 robe n04197391 shirt n04508163 undergarment, unmentionable n03490324 handwear, hand wear n03441112 glove n03502509 headdress, headgear n03513137 helmet n02954340 cap n03497657 hat, chapeau, lid n03381126 footwear n03540267 hosiery, hose n04434932 tights, leotards n04323819 stocking n04509592 uniform n03825080 nightwear, sleepwear, nightclothes n02728440 apparel, wearing apparel, dress, clothes n04603872 workwear n04596852 woman's clothing n03236735 dress, frock n04014297 protective covering, protective cover, protection n04192858 shield n03959701 plate, scale, shell n02740764 armor plate, armour plate, armor plating, plate armor, plate armour n03513376 helmet n02955065 cap n04105068 roof n02851099 blind, screen n04589190 window blind n04181718 shade n04151581 screen n02739668 armor, armour n02862048 body armor, body armour, suit of armor, suit of armour, coat of mail, cataphract n03725035 mask n03314378 face mask n04187061 sheath n04191943 shelter n02951843 canopy n04453910 top, cover n02954938 cap n03873064 padding, cushioning n03151500 cushion n03076708 commodity, trade good, good n03093574 consumer goods n03257877 durables, durable goods, consumer durables n02729837 appliance n03251766 dryer, drier n03528263 home appliance, household appliance n03620052 kitchen appliance n03063338 coffee maker n03862676 oven n04580493 white goods n03252064 drygoods, soft goods n04580298 white goods, household linen n03672352 linen n02807260 bath linen n03302121 excavation n03982060 pool n03169390 decoration, ornament, ornamentation n02681518 adornment n03597469 jewelry, jewellery n03178782 design, pattern, figure n03282591 emblem n03964744 plaything, toy n00022903 article n04550840 ware n04597804 woodenware n04381994 tableware n03153375 cutlery, eating utensil n03133538 crockery, dishware n04362025 surface n03536348 horizontal surface, level n03961939 platform n03892891 part, portion n03932203 piece n00002137 abstraction, abstract entity n00024264 attribute n00027807 shape, form n13865483 round shape n13899200 sphere n13899404 ball, globe, orb n09289709 globule n00033020 communication n06793231 sign n06791372 signal, signaling, sign n06873571 visual signal n06874019 light n00031921 relation n13809207 part, portion, component part, component, constituent ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/__init__.py ================================================ from .fbresnet import * from .resnext import * from .inceptionv4 import * from .inceptionresnetv2 import * from .bninception import * from .torchvision import * from .nasnet import * ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/bninception.py ================================================ import torch import torch.nn as nn import torch.utils.model_zoo as model_zoo import os import sys __all__ = ['BNInception', 'bninception'] pretrained_settings = { 'bninception': { 'imagenet': { # Was ported using python2 (may trigger warning) 'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/bn_inception-9f5701afb96c8044.pth', # 'url': 'http://yjxiong.me/others/bn_inception-9f5701afb96c8044.pth', 'input_space': 'BGR', 'input_size': [3, 224, 224], 'input_range': [0, 255], 'mean': [104, 117, 128], 'std': [1, 1, 1], 'num_classes': 1000 } } } class BNInception(nn.Module): def __init__(self, num_classes=1000): super(BNInception, self).__init__() inplace = True self.conv1_7x7_s2 = nn.Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3)) self.conv1_7x7_s2_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.conv1_relu_7x7 = nn.ReLU (inplace) self.pool1_3x3_s2 = nn.MaxPool2d ((3, 3), stride=(2, 2), dilation=(1, 1), ceil_mode=True) self.conv2_3x3_reduce = nn.Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1)) self.conv2_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.conv2_relu_3x3_reduce = nn.ReLU (inplace) self.conv2_3x3 = nn.Conv2d(64, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.conv2_3x3_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.conv2_relu_3x3 = nn.ReLU (inplace) self.pool2_3x3_s2 = nn.MaxPool2d ((3, 3), stride=(2, 2), dilation=(1, 1), ceil_mode=True) self.inception_3a_1x1 = nn.Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1)) self.inception_3a_1x1_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_3a_relu_1x1 = nn.ReLU (inplace) self.inception_3a_3x3_reduce = nn.Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1)) self.inception_3a_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_3a_relu_3x3_reduce = nn.ReLU (inplace) self.inception_3a_3x3 = nn.Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_3a_3x3_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_3a_relu_3x3 = nn.ReLU (inplace) self.inception_3a_double_3x3_reduce = nn.Conv2d(192, 64, kernel_size=(1, 1), stride=(1, 1)) self.inception_3a_double_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_3a_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_3a_double_3x3_1 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_3a_double_3x3_1_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_3a_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_3a_double_3x3_2 = nn.Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_3a_double_3x3_2_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_3a_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_3a_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True) self.inception_3a_pool_proj = nn.Conv2d(192, 32, kernel_size=(1, 1), stride=(1, 1)) self.inception_3a_pool_proj_bn = nn.BatchNorm2d(32, eps=1e-05, momentum=0.9, affine=True) self.inception_3a_relu_pool_proj = nn.ReLU (inplace) self.inception_3b_1x1 = nn.Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) self.inception_3b_1x1_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_3b_relu_1x1 = nn.ReLU (inplace) self.inception_3b_3x3_reduce = nn.Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) self.inception_3b_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_3b_relu_3x3_reduce = nn.ReLU (inplace) self.inception_3b_3x3 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_3b_3x3_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_3b_relu_3x3 = nn.ReLU (inplace) self.inception_3b_double_3x3_reduce = nn.Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) self.inception_3b_double_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_3b_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_3b_double_3x3_1 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_3b_double_3x3_1_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_3b_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_3b_double_3x3_2 = nn.Conv2d(96, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_3b_double_3x3_2_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_3b_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_3b_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True) self.inception_3b_pool_proj = nn.Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1)) self.inception_3b_pool_proj_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_3b_relu_pool_proj = nn.ReLU (inplace) self.inception_3c_3x3_reduce = nn.Conv2d(320, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_3c_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_3c_relu_3x3_reduce = nn.ReLU (inplace) self.inception_3c_3x3 = nn.Conv2d(128, 160, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) self.inception_3c_3x3_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True) self.inception_3c_relu_3x3 = nn.ReLU (inplace) self.inception_3c_double_3x3_reduce = nn.Conv2d(320, 64, kernel_size=(1, 1), stride=(1, 1)) self.inception_3c_double_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_3c_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_3c_double_3x3_1 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_3c_double_3x3_1_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_3c_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_3c_double_3x3_2 = nn.Conv2d(96, 96, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) self.inception_3c_double_3x3_2_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_3c_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_3c_pool = nn.MaxPool2d ((3, 3), stride=(2, 2), dilation=(1, 1), ceil_mode=True) self.inception_4a_1x1 = nn.Conv2d(576, 224, kernel_size=(1, 1), stride=(1, 1)) self.inception_4a_1x1_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True) self.inception_4a_relu_1x1 = nn.ReLU (inplace) self.inception_4a_3x3_reduce = nn.Conv2d(576, 64, kernel_size=(1, 1), stride=(1, 1)) self.inception_4a_3x3_reduce_bn = nn.BatchNorm2d(64, eps=1e-05, momentum=0.9, affine=True) self.inception_4a_relu_3x3_reduce = nn.ReLU (inplace) self.inception_4a_3x3 = nn.Conv2d(64, 96, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4a_3x3_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_4a_relu_3x3 = nn.ReLU (inplace) self.inception_4a_double_3x3_reduce = nn.Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1)) self.inception_4a_double_3x3_reduce_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_4a_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_4a_double_3x3_1 = nn.Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4a_double_3x3_1_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4a_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_4a_double_3x3_2 = nn.Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4a_double_3x3_2_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4a_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_4a_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True) self.inception_4a_pool_proj = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_4a_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4a_relu_pool_proj = nn.ReLU (inplace) self.inception_4b_1x1 = nn.Conv2d(576, 192, kernel_size=(1, 1), stride=(1, 1)) self.inception_4b_1x1_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.inception_4b_relu_1x1 = nn.ReLU (inplace) self.inception_4b_3x3_reduce = nn.Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1)) self.inception_4b_3x3_reduce_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_4b_relu_3x3_reduce = nn.ReLU (inplace) self.inception_4b_3x3 = nn.Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4b_3x3_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4b_relu_3x3 = nn.ReLU (inplace) self.inception_4b_double_3x3_reduce = nn.Conv2d(576, 96, kernel_size=(1, 1), stride=(1, 1)) self.inception_4b_double_3x3_reduce_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_4b_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_4b_double_3x3_1 = nn.Conv2d(96, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4b_double_3x3_1_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4b_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_4b_double_3x3_2 = nn.Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4b_double_3x3_2_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4b_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_4b_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True) self.inception_4b_pool_proj = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_4b_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4b_relu_pool_proj = nn.ReLU (inplace) self.inception_4c_1x1 = nn.Conv2d(576, 160, kernel_size=(1, 1), stride=(1, 1)) self.inception_4c_1x1_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True) self.inception_4c_relu_1x1 = nn.ReLU (inplace) self.inception_4c_3x3_reduce = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_4c_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4c_relu_3x3_reduce = nn.ReLU (inplace) self.inception_4c_3x3 = nn.Conv2d(128, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4c_3x3_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True) self.inception_4c_relu_3x3 = nn.ReLU (inplace) self.inception_4c_double_3x3_reduce = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_4c_double_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4c_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_4c_double_3x3_1 = nn.Conv2d(128, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4c_double_3x3_1_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True) self.inception_4c_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_4c_double_3x3_2 = nn.Conv2d(160, 160, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4c_double_3x3_2_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True) self.inception_4c_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_4c_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True) self.inception_4c_pool_proj = nn.Conv2d(576, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_4c_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4c_relu_pool_proj = nn.ReLU (inplace) self.inception_4d_1x1 = nn.Conv2d(608, 96, kernel_size=(1, 1), stride=(1, 1)) self.inception_4d_1x1_bn = nn.BatchNorm2d(96, eps=1e-05, momentum=0.9, affine=True) self.inception_4d_relu_1x1 = nn.ReLU (inplace) self.inception_4d_3x3_reduce = nn.Conv2d(608, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_4d_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4d_relu_3x3_reduce = nn.ReLU (inplace) self.inception_4d_3x3 = nn.Conv2d(128, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4d_3x3_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.inception_4d_relu_3x3 = nn.ReLU (inplace) self.inception_4d_double_3x3_reduce = nn.Conv2d(608, 160, kernel_size=(1, 1), stride=(1, 1)) self.inception_4d_double_3x3_reduce_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True) self.inception_4d_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_4d_double_3x3_1 = nn.Conv2d(160, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4d_double_3x3_1_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.inception_4d_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_4d_double_3x3_2 = nn.Conv2d(192, 192, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4d_double_3x3_2_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.inception_4d_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_4d_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True) self.inception_4d_pool_proj = nn.Conv2d(608, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_4d_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4d_relu_pool_proj = nn.ReLU (inplace) self.inception_4e_3x3_reduce = nn.Conv2d(608, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_4e_3x3_reduce_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_4e_relu_3x3_reduce = nn.ReLU (inplace) self.inception_4e_3x3 = nn.Conv2d(128, 192, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) self.inception_4e_3x3_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.inception_4e_relu_3x3 = nn.ReLU (inplace) self.inception_4e_double_3x3_reduce = nn.Conv2d(608, 192, kernel_size=(1, 1), stride=(1, 1)) self.inception_4e_double_3x3_reduce_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.inception_4e_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_4e_double_3x3_1 = nn.Conv2d(192, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_4e_double_3x3_1_bn = nn.BatchNorm2d(256, eps=1e-05, momentum=0.9, affine=True) self.inception_4e_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_4e_double_3x3_2 = nn.Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) self.inception_4e_double_3x3_2_bn = nn.BatchNorm2d(256, eps=1e-05, momentum=0.9, affine=True) self.inception_4e_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_4e_pool = nn.MaxPool2d ((3, 3), stride=(2, 2), dilation=(1, 1), ceil_mode=True) self.inception_5a_1x1 = nn.Conv2d(1056, 352, kernel_size=(1, 1), stride=(1, 1)) self.inception_5a_1x1_bn = nn.BatchNorm2d(352, eps=1e-05, momentum=0.9, affine=True) self.inception_5a_relu_1x1 = nn.ReLU (inplace) self.inception_5a_3x3_reduce = nn.Conv2d(1056, 192, kernel_size=(1, 1), stride=(1, 1)) self.inception_5a_3x3_reduce_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.inception_5a_relu_3x3_reduce = nn.ReLU (inplace) self.inception_5a_3x3 = nn.Conv2d(192, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_5a_3x3_bn = nn.BatchNorm2d(320, eps=1e-05, momentum=0.9, affine=True) self.inception_5a_relu_3x3 = nn.ReLU (inplace) self.inception_5a_double_3x3_reduce = nn.Conv2d(1056, 160, kernel_size=(1, 1), stride=(1, 1)) self.inception_5a_double_3x3_reduce_bn = nn.BatchNorm2d(160, eps=1e-05, momentum=0.9, affine=True) self.inception_5a_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_5a_double_3x3_1 = nn.Conv2d(160, 224, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_5a_double_3x3_1_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True) self.inception_5a_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_5a_double_3x3_2 = nn.Conv2d(224, 224, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_5a_double_3x3_2_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True) self.inception_5a_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_5a_pool = nn.AvgPool2d (3, stride=1, padding=1, ceil_mode=True, count_include_pad=True) self.inception_5a_pool_proj = nn.Conv2d(1056, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_5a_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_5a_relu_pool_proj = nn.ReLU (inplace) self.inception_5b_1x1 = nn.Conv2d(1024, 352, kernel_size=(1, 1), stride=(1, 1)) self.inception_5b_1x1_bn = nn.BatchNorm2d(352, eps=1e-05, momentum=0.9, affine=True) self.inception_5b_relu_1x1 = nn.ReLU (inplace) self.inception_5b_3x3_reduce = nn.Conv2d(1024, 192, kernel_size=(1, 1), stride=(1, 1)) self.inception_5b_3x3_reduce_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.inception_5b_relu_3x3_reduce = nn.ReLU (inplace) self.inception_5b_3x3 = nn.Conv2d(192, 320, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_5b_3x3_bn = nn.BatchNorm2d(320, eps=1e-05, momentum=0.9, affine=True) self.inception_5b_relu_3x3 = nn.ReLU (inplace) self.inception_5b_double_3x3_reduce = nn.Conv2d(1024, 192, kernel_size=(1, 1), stride=(1, 1)) self.inception_5b_double_3x3_reduce_bn = nn.BatchNorm2d(192, eps=1e-05, momentum=0.9, affine=True) self.inception_5b_relu_double_3x3_reduce = nn.ReLU (inplace) self.inception_5b_double_3x3_1 = nn.Conv2d(192, 224, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_5b_double_3x3_1_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True) self.inception_5b_relu_double_3x3_1 = nn.ReLU (inplace) self.inception_5b_double_3x3_2 = nn.Conv2d(224, 224, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) self.inception_5b_double_3x3_2_bn = nn.BatchNorm2d(224, eps=1e-05, momentum=0.9, affine=True) self.inception_5b_relu_double_3x3_2 = nn.ReLU (inplace) self.inception_5b_pool = nn.MaxPool2d ((3, 3), stride=(1, 1), padding=(1, 1), dilation=(1, 1), ceil_mode=True) self.inception_5b_pool_proj = nn.Conv2d(1024, 128, kernel_size=(1, 1), stride=(1, 1)) self.inception_5b_pool_proj_bn = nn.BatchNorm2d(128, eps=1e-05, momentum=0.9, affine=True) self.inception_5b_relu_pool_proj = nn.ReLU (inplace) self.global_pool = nn.AvgPool2d (7, stride=1, padding=0, ceil_mode=True, count_include_pad=True) self.fc = nn.Linear (1024, 1000) def features(self, input): conv1_7x7_s2_out = self.conv1_7x7_s2(input) conv1_7x7_s2_bn_out = self.conv1_7x7_s2_bn(conv1_7x7_s2_out) conv1_relu_7x7_out = self.conv1_relu_7x7(conv1_7x7_s2_bn_out) pool1_3x3_s2_out = self.pool1_3x3_s2(conv1_7x7_s2_bn_out) conv2_3x3_reduce_out = self.conv2_3x3_reduce(pool1_3x3_s2_out) conv2_3x3_reduce_bn_out = self.conv2_3x3_reduce_bn(conv2_3x3_reduce_out) conv2_relu_3x3_reduce_out = self.conv2_relu_3x3_reduce(conv2_3x3_reduce_bn_out) conv2_3x3_out = self.conv2_3x3(conv2_3x3_reduce_bn_out) conv2_3x3_bn_out = self.conv2_3x3_bn(conv2_3x3_out) conv2_relu_3x3_out = self.conv2_relu_3x3(conv2_3x3_bn_out) pool2_3x3_s2_out = self.pool2_3x3_s2(conv2_3x3_bn_out) inception_3a_1x1_out = self.inception_3a_1x1(pool2_3x3_s2_out) inception_3a_1x1_bn_out = self.inception_3a_1x1_bn(inception_3a_1x1_out) inception_3a_relu_1x1_out = self.inception_3a_relu_1x1(inception_3a_1x1_bn_out) inception_3a_3x3_reduce_out = self.inception_3a_3x3_reduce(pool2_3x3_s2_out) inception_3a_3x3_reduce_bn_out = self.inception_3a_3x3_reduce_bn(inception_3a_3x3_reduce_out) inception_3a_relu_3x3_reduce_out = self.inception_3a_relu_3x3_reduce(inception_3a_3x3_reduce_bn_out) inception_3a_3x3_out = self.inception_3a_3x3(inception_3a_3x3_reduce_bn_out) inception_3a_3x3_bn_out = self.inception_3a_3x3_bn(inception_3a_3x3_out) inception_3a_relu_3x3_out = self.inception_3a_relu_3x3(inception_3a_3x3_bn_out) inception_3a_double_3x3_reduce_out = self.inception_3a_double_3x3_reduce(pool2_3x3_s2_out) inception_3a_double_3x3_reduce_bn_out = self.inception_3a_double_3x3_reduce_bn(inception_3a_double_3x3_reduce_out) inception_3a_relu_double_3x3_reduce_out = self.inception_3a_relu_double_3x3_reduce(inception_3a_double_3x3_reduce_bn_out) inception_3a_double_3x3_1_out = self.inception_3a_double_3x3_1(inception_3a_double_3x3_reduce_bn_out) inception_3a_double_3x3_1_bn_out = self.inception_3a_double_3x3_1_bn(inception_3a_double_3x3_1_out) inception_3a_relu_double_3x3_1_out = self.inception_3a_relu_double_3x3_1(inception_3a_double_3x3_1_bn_out) inception_3a_double_3x3_2_out = self.inception_3a_double_3x3_2(inception_3a_double_3x3_1_bn_out) inception_3a_double_3x3_2_bn_out = self.inception_3a_double_3x3_2_bn(inception_3a_double_3x3_2_out) inception_3a_relu_double_3x3_2_out = self.inception_3a_relu_double_3x3_2(inception_3a_double_3x3_2_bn_out) inception_3a_pool_out = self.inception_3a_pool(pool2_3x3_s2_out) inception_3a_pool_proj_out = self.inception_3a_pool_proj(inception_3a_pool_out) inception_3a_pool_proj_bn_out = self.inception_3a_pool_proj_bn(inception_3a_pool_proj_out) inception_3a_relu_pool_proj_out = self.inception_3a_relu_pool_proj(inception_3a_pool_proj_bn_out) inception_3a_output_out = torch.cat([inception_3a_1x1_bn_out,inception_3a_3x3_bn_out,inception_3a_double_3x3_2_bn_out,inception_3a_pool_proj_bn_out], 1) inception_3b_1x1_out = self.inception_3b_1x1(inception_3a_output_out) inception_3b_1x1_bn_out = self.inception_3b_1x1_bn(inception_3b_1x1_out) inception_3b_relu_1x1_out = self.inception_3b_relu_1x1(inception_3b_1x1_bn_out) inception_3b_3x3_reduce_out = self.inception_3b_3x3_reduce(inception_3a_output_out) inception_3b_3x3_reduce_bn_out = self.inception_3b_3x3_reduce_bn(inception_3b_3x3_reduce_out) inception_3b_relu_3x3_reduce_out = self.inception_3b_relu_3x3_reduce(inception_3b_3x3_reduce_bn_out) inception_3b_3x3_out = self.inception_3b_3x3(inception_3b_3x3_reduce_bn_out) inception_3b_3x3_bn_out = self.inception_3b_3x3_bn(inception_3b_3x3_out) inception_3b_relu_3x3_out = self.inception_3b_relu_3x3(inception_3b_3x3_bn_out) inception_3b_double_3x3_reduce_out = self.inception_3b_double_3x3_reduce(inception_3a_output_out) inception_3b_double_3x3_reduce_bn_out = self.inception_3b_double_3x3_reduce_bn(inception_3b_double_3x3_reduce_out) inception_3b_relu_double_3x3_reduce_out = self.inception_3b_relu_double_3x3_reduce(inception_3b_double_3x3_reduce_bn_out) inception_3b_double_3x3_1_out = self.inception_3b_double_3x3_1(inception_3b_double_3x3_reduce_bn_out) inception_3b_double_3x3_1_bn_out = self.inception_3b_double_3x3_1_bn(inception_3b_double_3x3_1_out) inception_3b_relu_double_3x3_1_out = self.inception_3b_relu_double_3x3_1(inception_3b_double_3x3_1_bn_out) inception_3b_double_3x3_2_out = self.inception_3b_double_3x3_2(inception_3b_double_3x3_1_bn_out) inception_3b_double_3x3_2_bn_out = self.inception_3b_double_3x3_2_bn(inception_3b_double_3x3_2_out) inception_3b_relu_double_3x3_2_out = self.inception_3b_relu_double_3x3_2(inception_3b_double_3x3_2_bn_out) inception_3b_pool_out = self.inception_3b_pool(inception_3a_output_out) inception_3b_pool_proj_out = self.inception_3b_pool_proj(inception_3b_pool_out) inception_3b_pool_proj_bn_out = self.inception_3b_pool_proj_bn(inception_3b_pool_proj_out) inception_3b_relu_pool_proj_out = self.inception_3b_relu_pool_proj(inception_3b_pool_proj_bn_out) inception_3b_output_out = torch.cat([inception_3b_1x1_bn_out,inception_3b_3x3_bn_out,inception_3b_double_3x3_2_bn_out,inception_3b_pool_proj_bn_out], 1) inception_3c_3x3_reduce_out = self.inception_3c_3x3_reduce(inception_3b_output_out) inception_3c_3x3_reduce_bn_out = self.inception_3c_3x3_reduce_bn(inception_3c_3x3_reduce_out) inception_3c_relu_3x3_reduce_out = self.inception_3c_relu_3x3_reduce(inception_3c_3x3_reduce_bn_out) inception_3c_3x3_out = self.inception_3c_3x3(inception_3c_3x3_reduce_bn_out) inception_3c_3x3_bn_out = self.inception_3c_3x3_bn(inception_3c_3x3_out) inception_3c_relu_3x3_out = self.inception_3c_relu_3x3(inception_3c_3x3_bn_out) inception_3c_double_3x3_reduce_out = self.inception_3c_double_3x3_reduce(inception_3b_output_out) inception_3c_double_3x3_reduce_bn_out = self.inception_3c_double_3x3_reduce_bn(inception_3c_double_3x3_reduce_out) inception_3c_relu_double_3x3_reduce_out = self.inception_3c_relu_double_3x3_reduce(inception_3c_double_3x3_reduce_bn_out) inception_3c_double_3x3_1_out = self.inception_3c_double_3x3_1(inception_3c_double_3x3_reduce_bn_out) inception_3c_double_3x3_1_bn_out = self.inception_3c_double_3x3_1_bn(inception_3c_double_3x3_1_out) inception_3c_relu_double_3x3_1_out = self.inception_3c_relu_double_3x3_1(inception_3c_double_3x3_1_bn_out) inception_3c_double_3x3_2_out = self.inception_3c_double_3x3_2(inception_3c_double_3x3_1_bn_out) inception_3c_double_3x3_2_bn_out = self.inception_3c_double_3x3_2_bn(inception_3c_double_3x3_2_out) inception_3c_relu_double_3x3_2_out = self.inception_3c_relu_double_3x3_2(inception_3c_double_3x3_2_bn_out) inception_3c_pool_out = self.inception_3c_pool(inception_3b_output_out) inception_3c_output_out = torch.cat([inception_3c_3x3_bn_out,inception_3c_double_3x3_2_bn_out,inception_3c_pool_out], 1) inception_4a_1x1_out = self.inception_4a_1x1(inception_3c_output_out) inception_4a_1x1_bn_out = self.inception_4a_1x1_bn(inception_4a_1x1_out) inception_4a_relu_1x1_out = self.inception_4a_relu_1x1(inception_4a_1x1_bn_out) inception_4a_3x3_reduce_out = self.inception_4a_3x3_reduce(inception_3c_output_out) inception_4a_3x3_reduce_bn_out = self.inception_4a_3x3_reduce_bn(inception_4a_3x3_reduce_out) inception_4a_relu_3x3_reduce_out = self.inception_4a_relu_3x3_reduce(inception_4a_3x3_reduce_bn_out) inception_4a_3x3_out = self.inception_4a_3x3(inception_4a_3x3_reduce_bn_out) inception_4a_3x3_bn_out = self.inception_4a_3x3_bn(inception_4a_3x3_out) inception_4a_relu_3x3_out = self.inception_4a_relu_3x3(inception_4a_3x3_bn_out) inception_4a_double_3x3_reduce_out = self.inception_4a_double_3x3_reduce(inception_3c_output_out) inception_4a_double_3x3_reduce_bn_out = self.inception_4a_double_3x3_reduce_bn(inception_4a_double_3x3_reduce_out) inception_4a_relu_double_3x3_reduce_out = self.inception_4a_relu_double_3x3_reduce(inception_4a_double_3x3_reduce_bn_out) inception_4a_double_3x3_1_out = self.inception_4a_double_3x3_1(inception_4a_double_3x3_reduce_bn_out) inception_4a_double_3x3_1_bn_out = self.inception_4a_double_3x3_1_bn(inception_4a_double_3x3_1_out) inception_4a_relu_double_3x3_1_out = self.inception_4a_relu_double_3x3_1(inception_4a_double_3x3_1_bn_out) inception_4a_double_3x3_2_out = self.inception_4a_double_3x3_2(inception_4a_double_3x3_1_bn_out) inception_4a_double_3x3_2_bn_out = self.inception_4a_double_3x3_2_bn(inception_4a_double_3x3_2_out) inception_4a_relu_double_3x3_2_out = self.inception_4a_relu_double_3x3_2(inception_4a_double_3x3_2_bn_out) inception_4a_pool_out = self.inception_4a_pool(inception_3c_output_out) inception_4a_pool_proj_out = self.inception_4a_pool_proj(inception_4a_pool_out) inception_4a_pool_proj_bn_out = self.inception_4a_pool_proj_bn(inception_4a_pool_proj_out) inception_4a_relu_pool_proj_out = self.inception_4a_relu_pool_proj(inception_4a_pool_proj_bn_out) inception_4a_output_out = torch.cat([inception_4a_1x1_bn_out,inception_4a_3x3_bn_out,inception_4a_double_3x3_2_bn_out,inception_4a_pool_proj_bn_out], 1) inception_4b_1x1_out = self.inception_4b_1x1(inception_4a_output_out) inception_4b_1x1_bn_out = self.inception_4b_1x1_bn(inception_4b_1x1_out) inception_4b_relu_1x1_out = self.inception_4b_relu_1x1(inception_4b_1x1_bn_out) inception_4b_3x3_reduce_out = self.inception_4b_3x3_reduce(inception_4a_output_out) inception_4b_3x3_reduce_bn_out = self.inception_4b_3x3_reduce_bn(inception_4b_3x3_reduce_out) inception_4b_relu_3x3_reduce_out = self.inception_4b_relu_3x3_reduce(inception_4b_3x3_reduce_bn_out) inception_4b_3x3_out = self.inception_4b_3x3(inception_4b_3x3_reduce_bn_out) inception_4b_3x3_bn_out = self.inception_4b_3x3_bn(inception_4b_3x3_out) inception_4b_relu_3x3_out = self.inception_4b_relu_3x3(inception_4b_3x3_bn_out) inception_4b_double_3x3_reduce_out = self.inception_4b_double_3x3_reduce(inception_4a_output_out) inception_4b_double_3x3_reduce_bn_out = self.inception_4b_double_3x3_reduce_bn(inception_4b_double_3x3_reduce_out) inception_4b_relu_double_3x3_reduce_out = self.inception_4b_relu_double_3x3_reduce(inception_4b_double_3x3_reduce_bn_out) inception_4b_double_3x3_1_out = self.inception_4b_double_3x3_1(inception_4b_double_3x3_reduce_bn_out) inception_4b_double_3x3_1_bn_out = self.inception_4b_double_3x3_1_bn(inception_4b_double_3x3_1_out) inception_4b_relu_double_3x3_1_out = self.inception_4b_relu_double_3x3_1(inception_4b_double_3x3_1_bn_out) inception_4b_double_3x3_2_out = self.inception_4b_double_3x3_2(inception_4b_double_3x3_1_bn_out) inception_4b_double_3x3_2_bn_out = self.inception_4b_double_3x3_2_bn(inception_4b_double_3x3_2_out) inception_4b_relu_double_3x3_2_out = self.inception_4b_relu_double_3x3_2(inception_4b_double_3x3_2_bn_out) inception_4b_pool_out = self.inception_4b_pool(inception_4a_output_out) inception_4b_pool_proj_out = self.inception_4b_pool_proj(inception_4b_pool_out) inception_4b_pool_proj_bn_out = self.inception_4b_pool_proj_bn(inception_4b_pool_proj_out) inception_4b_relu_pool_proj_out = self.inception_4b_relu_pool_proj(inception_4b_pool_proj_bn_out) inception_4b_output_out = torch.cat([inception_4b_1x1_bn_out,inception_4b_3x3_bn_out,inception_4b_double_3x3_2_bn_out,inception_4b_pool_proj_bn_out], 1) inception_4c_1x1_out = self.inception_4c_1x1(inception_4b_output_out) inception_4c_1x1_bn_out = self.inception_4c_1x1_bn(inception_4c_1x1_out) inception_4c_relu_1x1_out = self.inception_4c_relu_1x1(inception_4c_1x1_bn_out) inception_4c_3x3_reduce_out = self.inception_4c_3x3_reduce(inception_4b_output_out) inception_4c_3x3_reduce_bn_out = self.inception_4c_3x3_reduce_bn(inception_4c_3x3_reduce_out) inception_4c_relu_3x3_reduce_out = self.inception_4c_relu_3x3_reduce(inception_4c_3x3_reduce_bn_out) inception_4c_3x3_out = self.inception_4c_3x3(inception_4c_3x3_reduce_bn_out) inception_4c_3x3_bn_out = self.inception_4c_3x3_bn(inception_4c_3x3_out) inception_4c_relu_3x3_out = self.inception_4c_relu_3x3(inception_4c_3x3_bn_out) inception_4c_double_3x3_reduce_out = self.inception_4c_double_3x3_reduce(inception_4b_output_out) inception_4c_double_3x3_reduce_bn_out = self.inception_4c_double_3x3_reduce_bn(inception_4c_double_3x3_reduce_out) inception_4c_relu_double_3x3_reduce_out = self.inception_4c_relu_double_3x3_reduce(inception_4c_double_3x3_reduce_bn_out) inception_4c_double_3x3_1_out = self.inception_4c_double_3x3_1(inception_4c_double_3x3_reduce_bn_out) inception_4c_double_3x3_1_bn_out = self.inception_4c_double_3x3_1_bn(inception_4c_double_3x3_1_out) inception_4c_relu_double_3x3_1_out = self.inception_4c_relu_double_3x3_1(inception_4c_double_3x3_1_bn_out) inception_4c_double_3x3_2_out = self.inception_4c_double_3x3_2(inception_4c_double_3x3_1_bn_out) inception_4c_double_3x3_2_bn_out = self.inception_4c_double_3x3_2_bn(inception_4c_double_3x3_2_out) inception_4c_relu_double_3x3_2_out = self.inception_4c_relu_double_3x3_2(inception_4c_double_3x3_2_bn_out) inception_4c_pool_out = self.inception_4c_pool(inception_4b_output_out) inception_4c_pool_proj_out = self.inception_4c_pool_proj(inception_4c_pool_out) inception_4c_pool_proj_bn_out = self.inception_4c_pool_proj_bn(inception_4c_pool_proj_out) inception_4c_relu_pool_proj_out = self.inception_4c_relu_pool_proj(inception_4c_pool_proj_bn_out) inception_4c_output_out = torch.cat([inception_4c_1x1_bn_out,inception_4c_3x3_bn_out,inception_4c_double_3x3_2_bn_out,inception_4c_pool_proj_bn_out], 1) inception_4d_1x1_out = self.inception_4d_1x1(inception_4c_output_out) inception_4d_1x1_bn_out = self.inception_4d_1x1_bn(inception_4d_1x1_out) inception_4d_relu_1x1_out = self.inception_4d_relu_1x1(inception_4d_1x1_bn_out) inception_4d_3x3_reduce_out = self.inception_4d_3x3_reduce(inception_4c_output_out) inception_4d_3x3_reduce_bn_out = self.inception_4d_3x3_reduce_bn(inception_4d_3x3_reduce_out) inception_4d_relu_3x3_reduce_out = self.inception_4d_relu_3x3_reduce(inception_4d_3x3_reduce_bn_out) inception_4d_3x3_out = self.inception_4d_3x3(inception_4d_3x3_reduce_bn_out) inception_4d_3x3_bn_out = self.inception_4d_3x3_bn(inception_4d_3x3_out) inception_4d_relu_3x3_out = self.inception_4d_relu_3x3(inception_4d_3x3_bn_out) inception_4d_double_3x3_reduce_out = self.inception_4d_double_3x3_reduce(inception_4c_output_out) inception_4d_double_3x3_reduce_bn_out = self.inception_4d_double_3x3_reduce_bn(inception_4d_double_3x3_reduce_out) inception_4d_relu_double_3x3_reduce_out = self.inception_4d_relu_double_3x3_reduce(inception_4d_double_3x3_reduce_bn_out) inception_4d_double_3x3_1_out = self.inception_4d_double_3x3_1(inception_4d_double_3x3_reduce_bn_out) inception_4d_double_3x3_1_bn_out = self.inception_4d_double_3x3_1_bn(inception_4d_double_3x3_1_out) inception_4d_relu_double_3x3_1_out = self.inception_4d_relu_double_3x3_1(inception_4d_double_3x3_1_bn_out) inception_4d_double_3x3_2_out = self.inception_4d_double_3x3_2(inception_4d_double_3x3_1_bn_out) inception_4d_double_3x3_2_bn_out = self.inception_4d_double_3x3_2_bn(inception_4d_double_3x3_2_out) inception_4d_relu_double_3x3_2_out = self.inception_4d_relu_double_3x3_2(inception_4d_double_3x3_2_bn_out) inception_4d_pool_out = self.inception_4d_pool(inception_4c_output_out) inception_4d_pool_proj_out = self.inception_4d_pool_proj(inception_4d_pool_out) inception_4d_pool_proj_bn_out = self.inception_4d_pool_proj_bn(inception_4d_pool_proj_out) inception_4d_relu_pool_proj_out = self.inception_4d_relu_pool_proj(inception_4d_pool_proj_bn_out) inception_4d_output_out = torch.cat([inception_4d_1x1_bn_out,inception_4d_3x3_bn_out,inception_4d_double_3x3_2_bn_out,inception_4d_pool_proj_bn_out], 1) inception_4e_3x3_reduce_out = self.inception_4e_3x3_reduce(inception_4d_output_out) inception_4e_3x3_reduce_bn_out = self.inception_4e_3x3_reduce_bn(inception_4e_3x3_reduce_out) inception_4e_relu_3x3_reduce_out = self.inception_4e_relu_3x3_reduce(inception_4e_3x3_reduce_bn_out) inception_4e_3x3_out = self.inception_4e_3x3(inception_4e_3x3_reduce_bn_out) inception_4e_3x3_bn_out = self.inception_4e_3x3_bn(inception_4e_3x3_out) inception_4e_relu_3x3_out = self.inception_4e_relu_3x3(inception_4e_3x3_bn_out) inception_4e_double_3x3_reduce_out = self.inception_4e_double_3x3_reduce(inception_4d_output_out) inception_4e_double_3x3_reduce_bn_out = self.inception_4e_double_3x3_reduce_bn(inception_4e_double_3x3_reduce_out) inception_4e_relu_double_3x3_reduce_out = self.inception_4e_relu_double_3x3_reduce(inception_4e_double_3x3_reduce_bn_out) inception_4e_double_3x3_1_out = self.inception_4e_double_3x3_1(inception_4e_double_3x3_reduce_bn_out) inception_4e_double_3x3_1_bn_out = self.inception_4e_double_3x3_1_bn(inception_4e_double_3x3_1_out) inception_4e_relu_double_3x3_1_out = self.inception_4e_relu_double_3x3_1(inception_4e_double_3x3_1_bn_out) inception_4e_double_3x3_2_out = self.inception_4e_double_3x3_2(inception_4e_double_3x3_1_bn_out) inception_4e_double_3x3_2_bn_out = self.inception_4e_double_3x3_2_bn(inception_4e_double_3x3_2_out) inception_4e_relu_double_3x3_2_out = self.inception_4e_relu_double_3x3_2(inception_4e_double_3x3_2_bn_out) inception_4e_pool_out = self.inception_4e_pool(inception_4d_output_out) inception_4e_output_out = torch.cat([inception_4e_3x3_bn_out,inception_4e_double_3x3_2_bn_out,inception_4e_pool_out], 1) inception_5a_1x1_out = self.inception_5a_1x1(inception_4e_output_out) inception_5a_1x1_bn_out = self.inception_5a_1x1_bn(inception_5a_1x1_out) inception_5a_relu_1x1_out = self.inception_5a_relu_1x1(inception_5a_1x1_bn_out) inception_5a_3x3_reduce_out = self.inception_5a_3x3_reduce(inception_4e_output_out) inception_5a_3x3_reduce_bn_out = self.inception_5a_3x3_reduce_bn(inception_5a_3x3_reduce_out) inception_5a_relu_3x3_reduce_out = self.inception_5a_relu_3x3_reduce(inception_5a_3x3_reduce_bn_out) inception_5a_3x3_out = self.inception_5a_3x3(inception_5a_3x3_reduce_bn_out) inception_5a_3x3_bn_out = self.inception_5a_3x3_bn(inception_5a_3x3_out) inception_5a_relu_3x3_out = self.inception_5a_relu_3x3(inception_5a_3x3_bn_out) inception_5a_double_3x3_reduce_out = self.inception_5a_double_3x3_reduce(inception_4e_output_out) inception_5a_double_3x3_reduce_bn_out = self.inception_5a_double_3x3_reduce_bn(inception_5a_double_3x3_reduce_out) inception_5a_relu_double_3x3_reduce_out = self.inception_5a_relu_double_3x3_reduce(inception_5a_double_3x3_reduce_bn_out) inception_5a_double_3x3_1_out = self.inception_5a_double_3x3_1(inception_5a_double_3x3_reduce_bn_out) inception_5a_double_3x3_1_bn_out = self.inception_5a_double_3x3_1_bn(inception_5a_double_3x3_1_out) inception_5a_relu_double_3x3_1_out = self.inception_5a_relu_double_3x3_1(inception_5a_double_3x3_1_bn_out) inception_5a_double_3x3_2_out = self.inception_5a_double_3x3_2(inception_5a_double_3x3_1_bn_out) inception_5a_double_3x3_2_bn_out = self.inception_5a_double_3x3_2_bn(inception_5a_double_3x3_2_out) inception_5a_relu_double_3x3_2_out = self.inception_5a_relu_double_3x3_2(inception_5a_double_3x3_2_bn_out) inception_5a_pool_out = self.inception_5a_pool(inception_4e_output_out) inception_5a_pool_proj_out = self.inception_5a_pool_proj(inception_5a_pool_out) inception_5a_pool_proj_bn_out = self.inception_5a_pool_proj_bn(inception_5a_pool_proj_out) inception_5a_relu_pool_proj_out = self.inception_5a_relu_pool_proj(inception_5a_pool_proj_bn_out) inception_5a_output_out = torch.cat([inception_5a_1x1_bn_out,inception_5a_3x3_bn_out,inception_5a_double_3x3_2_bn_out,inception_5a_pool_proj_bn_out], 1) inception_5b_1x1_out = self.inception_5b_1x1(inception_5a_output_out) inception_5b_1x1_bn_out = self.inception_5b_1x1_bn(inception_5b_1x1_out) inception_5b_relu_1x1_out = self.inception_5b_relu_1x1(inception_5b_1x1_bn_out) inception_5b_3x3_reduce_out = self.inception_5b_3x3_reduce(inception_5a_output_out) inception_5b_3x3_reduce_bn_out = self.inception_5b_3x3_reduce_bn(inception_5b_3x3_reduce_out) inception_5b_relu_3x3_reduce_out = self.inception_5b_relu_3x3_reduce(inception_5b_3x3_reduce_bn_out) inception_5b_3x3_out = self.inception_5b_3x3(inception_5b_3x3_reduce_bn_out) inception_5b_3x3_bn_out = self.inception_5b_3x3_bn(inception_5b_3x3_out) inception_5b_relu_3x3_out = self.inception_5b_relu_3x3(inception_5b_3x3_bn_out) inception_5b_double_3x3_reduce_out = self.inception_5b_double_3x3_reduce(inception_5a_output_out) inception_5b_double_3x3_reduce_bn_out = self.inception_5b_double_3x3_reduce_bn(inception_5b_double_3x3_reduce_out) inception_5b_relu_double_3x3_reduce_out = self.inception_5b_relu_double_3x3_reduce(inception_5b_double_3x3_reduce_bn_out) inception_5b_double_3x3_1_out = self.inception_5b_double_3x3_1(inception_5b_double_3x3_reduce_bn_out) inception_5b_double_3x3_1_bn_out = self.inception_5b_double_3x3_1_bn(inception_5b_double_3x3_1_out) inception_5b_relu_double_3x3_1_out = self.inception_5b_relu_double_3x3_1(inception_5b_double_3x3_1_bn_out) inception_5b_double_3x3_2_out = self.inception_5b_double_3x3_2(inception_5b_double_3x3_1_bn_out) inception_5b_double_3x3_2_bn_out = self.inception_5b_double_3x3_2_bn(inception_5b_double_3x3_2_out) inception_5b_relu_double_3x3_2_out = self.inception_5b_relu_double_3x3_2(inception_5b_double_3x3_2_bn_out) inception_5b_pool_out = self.inception_5b_pool(inception_5a_output_out) inception_5b_pool_proj_out = self.inception_5b_pool_proj(inception_5b_pool_out) inception_5b_pool_proj_bn_out = self.inception_5b_pool_proj_bn(inception_5b_pool_proj_out) inception_5b_relu_pool_proj_out = self.inception_5b_relu_pool_proj(inception_5b_pool_proj_bn_out) inception_5b_output_out = torch.cat([inception_5b_1x1_bn_out,inception_5b_3x3_bn_out,inception_5b_double_3x3_2_bn_out,inception_5b_pool_proj_bn_out], 1) global_pool_out = self.global_pool(inception_5b_output_out) return global_pool_out def classif(self, features): fc_out = self.fc(features.view(features.size(0), -1)) return fc_out def forward(self, input): features_out = self.features(input) classif_out = self.classif(features_out) return classif_out def bninception(num_classes=1000, pretrained='imagenet'): r"""BNInception model architecture from `_ paper. """ model = BNInception(num_classes=1000) if pretrained is not None: settings = pretrained_settings['bninception'][pretrained] assert num_classes == settings['num_classes'], \ "num_classes should be {}, but is {}".format(settings['num_classes'], num_classes) model.load_state_dict(model_zoo.load_url(settings['url'])) model.input_space = settings['input_space'] model.input_size = settings['input_size'] model.input_range = settings['input_range'] model.mean = settings['mean'] model.std = settings['std'] return model if __name__ == '__main__': model = bninception() ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/fbresnet/resnet152_dump.lua ================================================ require 'cutorch' require 'cunn' require 'cudnn' require 'image' vision=require 'torchnet-vision' net=vision.models.resnet.load{filename='data/resnet152/net.t7',length=152} print(net) require 'nn' nn.Module.parameters = function(self) if self.weight and self.bias and self.running_mean and self.running_var then return {self.weight, self.bias, self.running_mean, self.running_var}, {self.gradWeight, self.gradBias} elseif self.weight and self.bias then return {self.weight, self.bias}, {self.gradWeight, self.gradBias} elseif self.weight then return {self.weight}, {self.gradWeight} elseif self.bias then return {self.bias}, {self.gradBias} else return end end netparams, _ = net:parameters() print(#netparams) torch.save('data/resnet152/netparams.t7', netparams) net=net:cuda() net:evaluate() --p, gp = net:getParameters() input = torch.ones(1,3,224,224) input[{1,1,1,1}] = -1 input[1] = image.load('data/lena_224.png') print(input:sum()) input = input:cuda() output=net:forward(input) for i=1, 11 do torch.save('data/resnet152/output'..i..'.t7', net:get(i).output:float()) end ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/fbresnet/resnet152_load.py ================================================ import torch.nn as nn import math import torch.utils.model_zoo as model_zoo __all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152'] model_urls = { 'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth', 'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth', 'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth', 'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth', 'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth', } def conv3x3(in_planes, out_planes, stride=1): "3x3 convolution with padding" return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=True) class BasicBlock(nn.Module): expansion = 1 def __init__(self, inplanes, planes, stride=1, downsample=None): super(BasicBlock, self).__init__() self.conv1 = conv3x3(inplanes, planes, stride) self.bn1 = nn.BatchNorm2d(planes) self.relu = nn.ReLU(inplace=True) self.conv2 = conv3x3(planes, planes) self.bn2 = nn.BatchNorm2d(planes) self.downsample = downsample self.stride = stride def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) if self.downsample is not None: residual = self.downsample(x) out += residual out = self.relu(out) return out class Bottleneck(nn.Module): expansion = 4 def __init__(self, inplanes, planes, stride=1, downsample=None): super(Bottleneck, self).__init__() self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=True) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=True) self.bn2 = nn.BatchNorm2d(planes) self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=True) self.bn3 = nn.BatchNorm2d(planes * 4) self.relu = nn.ReLU(inplace=True) self.downsample = downsample self.stride = stride def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) out = self.relu(out) out = self.conv3(out) out = self.bn3(out) if self.downsample is not None: residual = self.downsample(x) out += residual out = self.relu(out) return out from torch.legacy import nn as nnl class ResNet(nn.Module): def __init__(self, block, layers, num_classes=1000): self.inplanes = 64 super(ResNet, self).__init__() self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=True) #self.conv1 = nnl.SpatialConvolution(3, 64, 7, 7, 2, 2, 3, 3) self.bn1 = nn.BatchNorm2d(64) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer(block, 128, layers[1], stride=2) self.layer3 = self._make_layer(block, 256, layers[2], stride=2) self.layer4 = self._make_layer(block, 512, layers[3], stride=2) self.avgpool = nn.AvgPool2d(7) self.fc = nn.Linear(512 * block.expansion, num_classes) for m in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() def _make_layer(self, block, planes, blocks, stride=1): downsample = None if stride != 1 or self.inplanes != planes * block.expansion: downsample = nn.Sequential( nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=True), nn.BatchNorm2d(planes * block.expansion), ) layers = [] layers.append(block(self.inplanes, planes, stride, downsample)) self.inplanes = planes * block.expansion for i in range(1, blocks): layers.append(block(self.inplanes, planes)) return nn.Sequential(*layers) def forward(self, x): x = self.conv1(x) self.conv1_input = x.clone() x = self.bn1(x) x = self.relu(x) x = self.maxpool(x) x = self.layer1(x) x = self.layer2(x) x = self.layer3(x) x = self.layer4(x) x = self.avgpool(x) x = x.view(x.size(0), -1) x = self.fc(x) return x def resnet18(pretrained=False, **kwargs): """Constructs a ResNet-18 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['resnet18'])) return model def resnet34(pretrained=False, **kwargs): """Constructs a ResNet-34 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['resnet34'])) return model def resnet50(pretrained=False, **kwargs): """Constructs a ResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = ResNet(Bottleneck, [3, 4, 6, 3], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['resnet50'])) return model def resnet101(pretrained=False, **kwargs): """Constructs a ResNet-101 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = ResNet(Bottleneck, [3, 4, 23, 3], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['resnet101'])) return model def resnet152(pretrained=False, **kwargs): """Constructs a ResNet-152 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = ResNet(Bottleneck, [3, 8, 36, 3], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['resnet152'])) return model import torchfile from torch.utils.serialization import load_lua import torch netparams = torchfile.load('data/resnet152/netparams.t7') #netparams2 = load_lua('data/resnet152/netparams.t7') #import ipdb; ipdb.set_trace() netoutputs = [] for i in range(1, 12): path = 'data/resnet152/output{}.t7'.format(i) out = load_lua(path) #print(out.size()) if out.dim()==4: pass#out.transpose_(2, 3) netoutputs.append(out) net = resnet152() state_dict = net.state_dict() import collections s = collections.OrderedDict() i=0 for key in state_dict.keys(): new = torch.from_numpy(netparams[i]) s[key] = new if s[key].dim() == 4: pass#s[key].transpose_(2,3) i += 1 net.load_state_dict(s) net.conv1.register_forward_hook(lambda self, input, output: \ print('conv1', torch.dist(output.data, netoutputs[0]))) net.bn1.register_forward_hook(lambda self, input, output: \ print('bn1', torch.dist(output.data, netoutputs[1]))) net.relu.register_forward_hook(lambda self, input, output: \ print('relu', torch.dist(output.data, netoutputs[2]))) net.maxpool.register_forward_hook(lambda self, input, output: \ print('maxpool', torch.dist(output.data, netoutputs[3]))) net.layer1.register_forward_hook(lambda self, input, output: \ print('layer1', torch.dist(output.data, netoutputs[4]))) net.layer2.register_forward_hook(lambda self, input, output: \ print('layer2', torch.dist(output.data, netoutputs[5]))) net.layer3.register_forward_hook(lambda self, input, output: \ print('layer3', torch.dist(output.data, netoutputs[6]))) net.layer4.register_forward_hook(lambda self, input, output: \ print('layer4', torch.dist(output.data, netoutputs[7]))) net.avgpool.register_forward_hook(lambda self, input, output: \ print('avgpool', torch.dist(output.data, netoutputs[8]))) net.fc.register_forward_hook(lambda self, input, output: \ print('fc', torch.dist(output.data, netoutputs[10]))) net.eval() input_data = torch.ones(1,3,224,224) input_data[0][0][0][0] = -1 from PIL import Image import torchvision.transforms as transforms input_data[0] = transforms.ToTensor()(Image.open('data/lena_224.png')) print('lena sum', input_data.sum()) input = torch.autograd.Variable(input_data) output = net.forward(input) torch.save(s, 'data/resnet152.pth') ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/fbresnet.py ================================================ import torch.nn as nn import math import torch.utils.model_zoo as model_zoo __all__ = ['FBResNet', 'fbresnet18', 'fbresnet34', 'fbresnet50', 'fbresnet101', 'fbresnet152'] pretrained_settings = { 'fbresnet152': { 'imagenet': { 'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/resnet152-c11d722e.pth', 'input_space': 'RGB', 'input_size': [3, 224, 224], 'input_range': [0, 1], 'mean': [0.485, 0.456, 0.406], 'std': [0.229, 0.224, 0.225], 'num_classes': 1000 } } } def conv3x3(in_planes, out_planes, stride=1): "3x3 convolution with padding" return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=True) class BasicBlock(nn.Module): expansion = 1 def __init__(self, inplanes, planes, stride=1, downsample=None): super(BasicBlock, self).__init__() self.conv1 = conv3x3(inplanes, planes, stride) self.bn1 = nn.BatchNorm2d(planes) self.relu = nn.ReLU(inplace=True) self.conv2 = conv3x3(planes, planes) self.bn2 = nn.BatchNorm2d(planes) self.downsample = downsample self.stride = stride def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) if self.downsample is not None: residual = self.downsample(x) out += residual out = self.relu(out) return out class Bottleneck(nn.Module): expansion = 4 def __init__(self, inplanes, planes, stride=1, downsample=None): super(Bottleneck, self).__init__() self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=True) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=True) self.bn2 = nn.BatchNorm2d(planes) self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=True) self.bn3 = nn.BatchNorm2d(planes * 4) self.relu = nn.ReLU(inplace=True) self.downsample = downsample self.stride = stride def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) out = self.relu(out) out = self.conv3(out) out = self.bn3(out) if self.downsample is not None: residual = self.downsample(x) out += residual out = self.relu(out) return out class FBResNet(nn.Module): def __init__(self, block, layers, num_classes=1000): self.inplanes = 64 # Special attributs self.input_space = None self.input_size = (299, 299, 3) self.mean = None self.std = None super(FBResNet, self).__init__() # Modules self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=True) self.bn1 = nn.BatchNorm2d(64) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(block, 64, layers[0]) self.layer2 = self._make_layer(block, 128, layers[1], stride=2) self.layer3 = self._make_layer(block, 256, layers[2], stride=2) self.layer4 = self._make_layer(block, 512, layers[3], stride=2) self.avgpool = nn.AvgPool2d(7) self.fc = nn.Linear(512 * block.expansion, num_classes) for m in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() def _make_layer(self, block, planes, blocks, stride=1): downsample = None if stride != 1 or self.inplanes != planes * block.expansion: downsample = nn.Sequential( nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=True), nn.BatchNorm2d(planes * block.expansion), ) layers = [] layers.append(block(self.inplanes, planes, stride, downsample)) self.inplanes = planes * block.expansion for i in range(1, blocks): layers.append(block(self.inplanes, planes)) return nn.Sequential(*layers) def forward(self, x): x = self.conv1(x) self.conv1_input = x.clone() x = self.bn1(x) x = self.relu(x) x = self.maxpool(x) x = self.layer1(x) x = self.layer2(x) x = self.layer3(x) x = self.layer4(x) x = self.avgpool(x) x = x.view(x.size(0), -1) x = self.fc(x) return x def fbresnet18(num_classes=1000): """Constructs a ResNet-18 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = FBResNet(BasicBlock, [2, 2, 2, 2], num_classes=num_classes) return model def fbresnet34(num_classes=1000): """Constructs a ResNet-34 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = FBResNet(BasicBlock, [3, 4, 6, 3], num_classes=num_classes) return model def fbresnet50(num_classes=1000): """Constructs a ResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = FBResNet(Bottleneck, [3, 4, 6, 3], num_classes=num_classes) return model def fbresnet101(num_classes=1000): """Constructs a ResNet-101 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = FBResNet(Bottleneck, [3, 4, 23, 3], num_classes=num_classes) return model def fbresnet152(num_classes=1000, pretrained='imagenet'): """Constructs a ResNet-152 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet """ model = FBResNet(Bottleneck, [3, 8, 36, 3], num_classes=num_classes) if pretrained is not None: settings = pretrained_settings['fbresnet152'][pretrained] assert num_classes == settings['num_classes'], \ "num_classes should be {}, but is {}".format(settings['num_classes'], num_classes) model.load_state_dict(model_zoo.load_url(settings['url'])) model.input_space = settings['input_space'] model.input_size = settings['input_size'] model.input_range = settings['input_range'] model.mean = settings['mean'] model.std = settings['std'] return model ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/inceptionresnetv2.py ================================================ import torch import torch.nn as nn import torch.utils.model_zoo as model_zoo import os import sys __all__ = ['InceptionResNetV2', 'inceptionresnetv2'] pretrained_settings = { 'inceptionresnetv2': { 'imagenet': { 'url': 'http://webia.lip6.fr/~cadene/Downloads/inceptionresnetv2-d579a627.pth', 'input_space': 'RGB', 'input_size': [3, 299, 299], 'input_range': [0, 1], 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'num_classes': 1000 }, 'imagenet+background': { 'url': 'http://webia.lip6.fr/~cadene/Downloads/inceptionresnetv2-d579a627.pth', 'input_space': 'RGB', 'input_size': [3, 299, 299], 'input_range': [0, 1], 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'num_classes': 1001 } } } class BasicConv2d(nn.Module): def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0): super(BasicConv2d, self).__init__() self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, bias=False) # verify bias false self.bn = nn.BatchNorm2d(out_planes, eps=0.001, # value found in tensorflow momentum=0.1, # default pytorch value affine=True) self.relu = nn.ReLU(inplace=False) def forward(self, x): x = self.conv(x) x = self.bn(x) x = self.relu(x) return x class Mixed_5b(nn.Module): def __init__(self): super(Mixed_5b, self).__init__() self.branch0 = BasicConv2d(192, 96, kernel_size=1, stride=1) self.branch1 = nn.Sequential( BasicConv2d(192, 48, kernel_size=1, stride=1), BasicConv2d(48, 64, kernel_size=5, stride=1, padding=2) ) self.branch2 = nn.Sequential( BasicConv2d(192, 64, kernel_size=1, stride=1), BasicConv2d(64, 96, kernel_size=3, stride=1, padding=1), BasicConv2d(96, 96, kernel_size=3, stride=1, padding=1) ) self.branch3 = nn.Sequential( nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False), BasicConv2d(192, 64, kernel_size=1, stride=1) ) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) x2 = self.branch2(x) x3 = self.branch3(x) out = torch.cat((x0, x1, x2, x3), 1) return out class Block35(nn.Module): def __init__(self, scale=1.0): super(Block35, self).__init__() self.scale = scale self.branch0 = BasicConv2d(320, 32, kernel_size=1, stride=1) self.branch1 = nn.Sequential( BasicConv2d(320, 32, kernel_size=1, stride=1), BasicConv2d(32, 32, kernel_size=3, stride=1, padding=1) ) self.branch2 = nn.Sequential( BasicConv2d(320, 32, kernel_size=1, stride=1), BasicConv2d(32, 48, kernel_size=3, stride=1, padding=1), BasicConv2d(48, 64, kernel_size=3, stride=1, padding=1) ) self.conv2d = nn.Conv2d(128, 320, kernel_size=1, stride=1) self.relu = nn.ReLU(inplace=False) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) x2 = self.branch2(x) out = torch.cat((x0, x1, x2), 1) out = self.conv2d(out) out = out * self.scale + x out = self.relu(out) return out class Mixed_6a(nn.Module): def __init__(self): super(Mixed_6a, self).__init__() self.branch0 = BasicConv2d(320, 384, kernel_size=3, stride=2) self.branch1 = nn.Sequential( BasicConv2d(320, 256, kernel_size=1, stride=1), BasicConv2d(256, 256, kernel_size=3, stride=1, padding=1), BasicConv2d(256, 384, kernel_size=3, stride=2) ) self.branch2 = nn.MaxPool2d(3, stride=2) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) x2 = self.branch2(x) out = torch.cat((x0, x1, x2), 1) return out class Block17(nn.Module): def __init__(self, scale=1.0): super(Block17, self).__init__() self.scale = scale self.branch0 = BasicConv2d(1088, 192, kernel_size=1, stride=1) self.branch1 = nn.Sequential( BasicConv2d(1088, 128, kernel_size=1, stride=1), BasicConv2d(128, 160, kernel_size=(1,7), stride=1, padding=(0,3)), BasicConv2d(160, 192, kernel_size=(7,1), stride=1, padding=(3,0)) ) self.conv2d = nn.Conv2d(384, 1088, kernel_size=1, stride=1) self.relu = nn.ReLU(inplace=False) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) out = torch.cat((x0, x1), 1) out = self.conv2d(out) out = out * self.scale + x out = self.relu(out) return out class Mixed_7a(nn.Module): def __init__(self): super(Mixed_7a, self).__init__() self.branch0 = nn.Sequential( BasicConv2d(1088, 256, kernel_size=1, stride=1), BasicConv2d(256, 384, kernel_size=3, stride=2) ) self.branch1 = nn.Sequential( BasicConv2d(1088, 256, kernel_size=1, stride=1), BasicConv2d(256, 288, kernel_size=3, stride=2) ) self.branch2 = nn.Sequential( BasicConv2d(1088, 256, kernel_size=1, stride=1), BasicConv2d(256, 288, kernel_size=3, stride=1, padding=1), BasicConv2d(288, 320, kernel_size=3, stride=2) ) self.branch3 = nn.MaxPool2d(3, stride=2) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) x2 = self.branch2(x) x3 = self.branch3(x) out = torch.cat((x0, x1, x2, x3), 1) return out class Block8(nn.Module): def __init__(self, scale=1.0, noReLU=False): super(Block8, self).__init__() self.scale = scale self.noReLU = noReLU self.branch0 = BasicConv2d(2080, 192, kernel_size=1, stride=1) self.branch1 = nn.Sequential( BasicConv2d(2080, 192, kernel_size=1, stride=1), BasicConv2d(192, 224, kernel_size=(1,3), stride=1, padding=(0,1)), BasicConv2d(224, 256, kernel_size=(3,1), stride=1, padding=(1,0)) ) self.conv2d = nn.Conv2d(448, 2080, kernel_size=1, stride=1) if not self.noReLU: self.relu = nn.ReLU(inplace=False) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) out = torch.cat((x0, x1), 1) out = self.conv2d(out) out = out * self.scale + x if not self.noReLU: out = self.relu(out) return out class InceptionResNetV2(nn.Module): def __init__(self, num_classes=1001): super(InceptionResNetV2, self).__init__() # Special attributs self.input_space = None self.input_size = (299, 299, 3) self.mean = None self.std = None # Modules self.conv2d_1a = BasicConv2d(3, 32, kernel_size=3, stride=2) self.conv2d_2a = BasicConv2d(32, 32, kernel_size=3, stride=1) self.conv2d_2b = BasicConv2d(32, 64, kernel_size=3, stride=1, padding=1) self.maxpool_3a = nn.MaxPool2d(3, stride=2) self.conv2d_3b = BasicConv2d(64, 80, kernel_size=1, stride=1) self.conv2d_4a = BasicConv2d(80, 192, kernel_size=3, stride=1) self.maxpool_5a = nn.MaxPool2d(3, stride=2) self.mixed_5b = Mixed_5b() self.repeat = nn.Sequential( Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17), Block35(scale=0.17) ) self.mixed_6a = Mixed_6a() self.repeat_1 = nn.Sequential( Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10), Block17(scale=0.10) ) self.mixed_7a = Mixed_7a() self.repeat_2 = nn.Sequential( Block8(scale=0.20), Block8(scale=0.20), Block8(scale=0.20), Block8(scale=0.20), Block8(scale=0.20), Block8(scale=0.20), Block8(scale=0.20), Block8(scale=0.20), Block8(scale=0.20) ) self.block8 = Block8(noReLU=True) self.conv2d_7b = BasicConv2d(2080, 1536, kernel_size=1, stride=1) self.avgpool_1a = nn.AvgPool2d(8, count_include_pad=False) self.classif = nn.Linear(1536, num_classes) def forward(self, x): x = self.conv2d_1a(x) x = self.conv2d_2a(x) x = self.conv2d_2b(x) x = self.maxpool_3a(x) x = self.conv2d_3b(x) x = self.conv2d_4a(x) x = self.maxpool_5a(x) x = self.mixed_5b(x) x = self.repeat(x) x = self.mixed_6a(x) x = self.repeat_1(x) x = self.mixed_7a(x) x = self.repeat_2(x) x = self.block8(x) x = self.conv2d_7b(x) x = self.avgpool_1a(x) x = x.view(x.size(0), -1) x = self.classif(x) return x def inceptionresnetv2(num_classes=1001, pretrained='imagenet'): r"""InceptionResNetV2 model architecture from the `"InceptionV4, Inception-ResNet..." `_ paper. """ if pretrained: settings = pretrained_settings['inceptionresnetv2'][pretrained] assert num_classes == settings['num_classes'], \ "num_classes should be {}, but is {}".format(settings['num_classes'], num_classes) # both 'imagenet'&'imagenet+background' are loaded from same parameters model = InceptionResNetV2(num_classes=1001) model.load_state_dict(model_zoo.load_url(settings['url'])) if pretrained == 'imagenet': new_classif = nn.Linear(1536, 1000) new_classif.weight.data = model.classif.weight.data[1:] new_classif.bias.data = model.classif.bias.data[1:] model.classif = new_classif model.input_space = settings['input_space'] model.input_size = settings['input_size'] model.input_range = settings['input_range'] model.mean = settings['mean'] model.std = settings['std'] else: model = InceptionResNetV2(num_classes=num_classes) return model ''' TEST Run this code with: ``` cd $HOME/pretrained-models.pytorch python -m pretrainedmodels.inceptionresnetv2 ``` ''' if __name__ == '__main__': assert inceptionresnetv2(num_classes=10, pretrained=None) print('success') assert inceptionresnetv2(num_classes=1000, pretrained='imagenet') print('success') assert inceptionresnetv2(num_classes=1001, pretrained='imagenet+background') print('success') # fail assert inceptionresnetv2(num_classes=1001, pretrained='imagenet') ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/inceptionv4.py ================================================ import torch import torch.nn as nn import torch.utils.model_zoo as model_zoo import os import sys __all__ = ['InceptionV4', 'inceptionv4'] pretrained_settings = { 'inceptionv4': { 'imagenet': { 'url': 'http://webia.lip6.fr/~cadene/Downloads/inceptionv4-97ef9c30.pth', 'input_space': 'RGB', 'input_size': [3, 299, 299], 'input_range': [0, 1], 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'num_classes': 1000 }, 'imagenet+background': { 'url': 'http://webia.lip6.fr/~cadene/Downloads/inceptionv4-97ef9c30.pth', 'input_space': 'RGB', 'input_size': [3, 299, 299], 'input_range': [0, 1], 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'num_classes': 1001 } } } class BasicConv2d(nn.Module): def __init__(self, in_planes, out_planes, kernel_size, stride, padding=0): super(BasicConv2d, self).__init__() self.conv = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, bias=False) # verify bias false self.bn = nn.BatchNorm2d(out_planes, eps=0.001, # value found in tensorflow momentum=0.1, # default pytorch value affine=True) self.relu = nn.ReLU(inplace=True) def forward(self, x): x = self.conv(x) x = self.bn(x) x = self.relu(x) return x class Mixed_3a(nn.Module): def __init__(self): super(Mixed_3a, self).__init__() self.maxpool = nn.MaxPool2d(3, stride=2) self.conv = BasicConv2d(64, 96, kernel_size=3, stride=2) def forward(self, x): x0 = self.maxpool(x) x1 = self.conv(x) out = torch.cat((x0, x1), 1) return out class Mixed_4a(nn.Module): def __init__(self): super(Mixed_4a, self).__init__() self.branch0 = nn.Sequential( BasicConv2d(160, 64, kernel_size=1, stride=1), BasicConv2d(64, 96, kernel_size=3, stride=1) ) self.branch1 = nn.Sequential( BasicConv2d(160, 64, kernel_size=1, stride=1), BasicConv2d(64, 64, kernel_size=(1,7), stride=1, padding=(0,3)), BasicConv2d(64, 64, kernel_size=(7,1), stride=1, padding=(3,0)), BasicConv2d(64, 96, kernel_size=(3,3), stride=1) ) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) out = torch.cat((x0, x1), 1) return out class Mixed_5a(nn.Module): def __init__(self): super(Mixed_5a, self).__init__() self.conv = BasicConv2d(192, 192, kernel_size=3, stride=2) self.maxpool = nn.MaxPool2d(3, stride=2) def forward(self, x): x0 = self.conv(x) x1 = self.maxpool(x) out = torch.cat((x0, x1), 1) return out class Inception_A(nn.Module): def __init__(self): super(Inception_A, self).__init__() self.branch0 = BasicConv2d(384, 96, kernel_size=1, stride=1) self.branch1 = nn.Sequential( BasicConv2d(384, 64, kernel_size=1, stride=1), BasicConv2d(64, 96, kernel_size=3, stride=1, padding=1) ) self.branch2 = nn.Sequential( BasicConv2d(384, 64, kernel_size=1, stride=1), BasicConv2d(64, 96, kernel_size=3, stride=1, padding=1), BasicConv2d(96, 96, kernel_size=3, stride=1, padding=1) ) self.branch3 = nn.Sequential( nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False), BasicConv2d(384, 96, kernel_size=1, stride=1) ) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) x2 = self.branch2(x) x3 = self.branch3(x) out = torch.cat((x0, x1, x2, x3), 1) return out class Reduction_A(nn.Module): def __init__(self): super(Reduction_A, self).__init__() self.branch0 = BasicConv2d(384, 384, kernel_size=3, stride=2) self.branch1 = nn.Sequential( BasicConv2d(384, 192, kernel_size=1, stride=1), BasicConv2d(192, 224, kernel_size=3, stride=1, padding=1), BasicConv2d(224, 256, kernel_size=3, stride=2) ) self.branch2 = nn.MaxPool2d(3, stride=2) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) x2 = self.branch2(x) out = torch.cat((x0, x1, x2), 1) return out class Inception_B(nn.Module): def __init__(self): super(Inception_B, self).__init__() self.branch0 = BasicConv2d(1024, 384, kernel_size=1, stride=1) self.branch1 = nn.Sequential( BasicConv2d(1024, 192, kernel_size=1, stride=1), BasicConv2d(192, 224, kernel_size=(1,7), stride=1, padding=(0,3)), BasicConv2d(224, 256, kernel_size=(7,1), stride=1, padding=(3,0)) ) self.branch2 = nn.Sequential( BasicConv2d(1024, 192, kernel_size=1, stride=1), BasicConv2d(192, 192, kernel_size=(7,1), stride=1, padding=(3,0)), BasicConv2d(192, 224, kernel_size=(1,7), stride=1, padding=(0,3)), BasicConv2d(224, 224, kernel_size=(7,1), stride=1, padding=(3,0)), BasicConv2d(224, 256, kernel_size=(1,7), stride=1, padding=(0,3)) ) self.branch3 = nn.Sequential( nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False), BasicConv2d(1024, 128, kernel_size=1, stride=1) ) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) x2 = self.branch2(x) x3 = self.branch3(x) out = torch.cat((x0, x1, x2, x3), 1) return out class Reduction_B(nn.Module): def __init__(self): super(Reduction_B, self).__init__() self.branch0 = nn.Sequential( BasicConv2d(1024, 192, kernel_size=1, stride=1), BasicConv2d(192, 192, kernel_size=3, stride=2) ) self.branch1 = nn.Sequential( BasicConv2d(1024, 256, kernel_size=1, stride=1), BasicConv2d(256, 256, kernel_size=(1,7), stride=1, padding=(0,3)), BasicConv2d(256, 320, kernel_size=(7,1), stride=1, padding=(3,0)), BasicConv2d(320, 320, kernel_size=3, stride=2) ) self.branch2 = nn.MaxPool2d(3, stride=2) def forward(self, x): x0 = self.branch0(x) x1 = self.branch1(x) x2 = self.branch2(x) out = torch.cat((x0, x1, x2), 1) return out class Inception_C(nn.Module): def __init__(self): super(Inception_C, self).__init__() self.branch0 = BasicConv2d(1536, 256, kernel_size=1, stride=1) self.branch1_0 = BasicConv2d(1536, 384, kernel_size=1, stride=1) self.branch1_1a = BasicConv2d(384, 256, kernel_size=(1,3), stride=1, padding=(0,1)) self.branch1_1b = BasicConv2d(384, 256, kernel_size=(3,1), stride=1, padding=(1,0)) self.branch2_0 = BasicConv2d(1536, 384, kernel_size=1, stride=1) self.branch2_1 = BasicConv2d(384, 448, kernel_size=(3,1), stride=1, padding=(1,0)) self.branch2_2 = BasicConv2d(448, 512, kernel_size=(1,3), stride=1, padding=(0,1)) self.branch2_3a = BasicConv2d(512, 256, kernel_size=(1,3), stride=1, padding=(0,1)) self.branch2_3b = BasicConv2d(512, 256, kernel_size=(3,1), stride=1, padding=(1,0)) self.branch3 = nn.Sequential( nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False), BasicConv2d(1536, 256, kernel_size=1, stride=1) ) def forward(self, x): x0 = self.branch0(x) x1_0 = self.branch1_0(x) x1_1a = self.branch1_1a(x1_0) x1_1b = self.branch1_1b(x1_0) x1 = torch.cat((x1_1a, x1_1b), 1) x2_0 = self.branch2_0(x) x2_1 = self.branch2_1(x2_0) x2_2 = self.branch2_2(x2_1) x2_3a = self.branch2_3a(x2_2) x2_3b = self.branch2_3b(x2_2) x2 = torch.cat((x2_3a, x2_3b), 1) x3 = self.branch3(x) out = torch.cat((x0, x1, x2, x3), 1) return out class InceptionV4(nn.Module): def __init__(self, num_classes=1001): super(InceptionV4, self).__init__() # Special attributs self.input_space = None self.input_size = (299, 299, 3) self.mean = None self.std = None # Modules self.features = nn.Sequential( BasicConv2d(3, 32, kernel_size=3, stride=2), BasicConv2d(32, 32, kernel_size=3, stride=1), BasicConv2d(32, 64, kernel_size=3, stride=1, padding=1), Mixed_3a(), Mixed_4a(), Mixed_5a(), Inception_A(), Inception_A(), Inception_A(), Inception_A(), Reduction_A(), # Mixed_6a Inception_B(), Inception_B(), Inception_B(), Inception_B(), Inception_B(), Inception_B(), Inception_B(), Reduction_B(), # Mixed_7a Inception_C(), Inception_C(), Inception_C(), nn.AvgPool2d(8, count_include_pad=False) ) self.classif = nn.Linear(1536, num_classes) def forward(self, x): x = self.features(x) x = x.view(x.size(0), -1) x = self.classif(x) return x def inceptionv4(num_classes=1001, pretrained='imagenet'): if pretrained: settings = pretrained_settings['inceptionv4'][pretrained] assert num_classes == settings['num_classes'], \ "num_classes should be {}, but is {}".format(settings['num_classes'], num_classes) # both 'imagenet'&'imagenet+background' are loaded from same parameters model = InceptionV4(num_classes=1001) model.load_state_dict(model_zoo.load_url(settings['url'])) if pretrained == 'imagenet': new_classif = nn.Linear(1536, 1000) new_classif.weight.data = model.classif.weight.data[1:] new_classif.bias.data = model.classif.bias.data[1:] model.classif = new_classif model.input_space = settings['input_space'] model.input_size = settings['input_size'] model.input_range = settings['input_range'] model.mean = settings['mean'] model.std = settings['std'] else: model = InceptionV4(num_classes=num_classes) return model ''' TEST Run this code with: ``` cd $HOME/pretrained-models.pytorch python -m pretrainedmodels.inceptionv4 ``` ''' if __name__ == '__main__': assert inceptionv4(num_classes=10, pretrained=None) print('success') assert inceptionv4(num_classes=1000, pretrained='imagenet') print('success') assert inceptionv4(num_classes=1001, pretrained='imagenet+background') print('success') # fail assert inceptionv4(num_classes=1001, pretrained='imagenet') ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/nasnet.py ================================================ import torch import torch.nn as nn import torch.nn.functional as F import torch.utils.model_zoo as model_zoo from torch.autograd import Variable pretrained_settings = { 'nasnetalarge': { 'imagenet': { 'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/nasnetalarge-dc8c1432.pth', 'input_space': 'RGB', 'input_size': [3, 331, 331], # resize 354 'input_range': [0, 1], 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'num_classes': 1000 }, 'imagenet+background': { 'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/nasnetalarge-dc8c1432.pth', 'input_space': 'RGB', 'input_size': [3, 331, 331], # resize 354 'input_range': [0, 1], 'mean': [0.5, 0.5, 0.5], 'std': [0.5, 0.5, 0.5], 'num_classes': 1001 } } } class MaxPoolPad(nn.Module): def __init__(self): super(MaxPoolPad, self).__init__() self.pad = nn.ZeroPad2d((1, 0, 1, 0)) self.pool = nn.MaxPool2d(3, stride=2, padding=1) def forward(self, x): x = self.pad(x) x = self.pool(x) x = x[:, :, 1:, 1:] return x class AvgPoolPad(nn.Module): def __init__(self, stride=2, padding=1): super(AvgPoolPad, self).__init__() self.pad = nn.ZeroPad2d((1, 0, 1, 0)) self.pool = nn.AvgPool2d(3, stride=stride, padding=padding, count_include_pad=False) def forward(self, x): x = self.pad(x) x = self.pool(x) x = x[:, :, 1:, 1:] return x class SeparableConv2d(nn.Module): def __init__(self, in_channels, out_channels, dw_kernel, dw_stride, dw_padding, bias=False): super(SeparableConv2d, self).__init__() self.depthwise_conv2d = nn.Conv2d(in_channels, in_channels, dw_kernel, stride=dw_stride, padding=dw_padding, bias=bias, groups=in_channels) self.pointwise_conv2d = nn.Conv2d(in_channels, out_channels, 1, stride=1, bias=bias) def forward(self, x): x = self.depthwise_conv2d(x) x = self.pointwise_conv2d(x) return x class BranchSeparables(nn.Module): def __init__(self, in_channels, out_channels, kernel_size, stride, padding, bias=False): super(BranchSeparables, self).__init__() self.relu = nn.ReLU() self.separable_1 = SeparableConv2d(in_channels, in_channels, kernel_size, stride, padding, bias=bias) self.bn_sep_1 = nn.BatchNorm2d(in_channels, eps=0.001, momentum=0.1, affine=True) self.relu1 = nn.ReLU() self.separable_2 = SeparableConv2d(in_channels, out_channels, kernel_size, 1, padding, bias=bias) self.bn_sep_2 = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.1, affine=True) def forward(self, x): x = self.relu(x) x = self.separable_1(x) x = self.bn_sep_1(x) x = self.relu1(x) x = self.separable_2(x) x = self.bn_sep_2(x) return x class BranchSeparablesStem(nn.Module): def __init__(self, in_channels, out_channels, kernel_size, stride, padding, bias=False): super(BranchSeparablesStem, self).__init__() self.relu = nn.ReLU() self.separable_1 = SeparableConv2d(in_channels, out_channels, kernel_size, stride, padding, bias=bias) self.bn_sep_1 = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.1, affine=True) self.relu1 = nn.ReLU() self.separable_2 = SeparableConv2d(out_channels, out_channels, kernel_size, 1, padding, bias=bias) self.bn_sep_2 = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.1, affine=True) def forward(self, x): x = self.relu(x) x = self.separable_1(x) x = self.bn_sep_1(x) x = self.relu1(x) x = self.separable_2(x) x = self.bn_sep_2(x) return x class BranchSeparablesReduction(BranchSeparables): def __init__(self, in_channels, out_channels, kernel_size, stride, padding, z_padding=1, bias=False): BranchSeparables.__init__(self, in_channels, out_channels, kernel_size, stride, padding, bias) self.padding = nn.ZeroPad2d((z_padding, 0, z_padding, 0)) def forward(self, x): x = self.relu(x) x = self.padding(x) x = self.separable_1(x) x = x[:, :, 1:, 1:].contiguous() x = self.bn_sep_1(x) x = self.relu1(x) x = self.separable_2(x) x = self.bn_sep_2(x) return x class CellStem0(nn.Module): def __init__(self): super(CellStem0, self).__init__() self.conv_1x1 = nn.Sequential() self.conv_1x1.add_module('relu', nn.ReLU()) self.conv_1x1.add_module('conv', nn.Conv2d(96, 42, 1, stride=1, bias=False)) self.conv_1x1.add_module('bn', nn.BatchNorm2d(42, eps=0.001, momentum=0.1, affine=True)) self.comb_iter_0_left = BranchSeparables(42, 42, 5, 2, 2) self.comb_iter_0_right = BranchSeparablesStem(96, 42, 7, 2, 3, bias=False) self.comb_iter_1_left = nn.MaxPool2d(3, stride=2, padding=1) self.comb_iter_1_right = BranchSeparablesStem(96, 42, 7, 2, 3, bias=False) self.comb_iter_2_left = nn.AvgPool2d(3, stride=2, padding=1, count_include_pad=False) self.comb_iter_2_right = BranchSeparablesStem(96, 42, 5, 2, 2, bias=False) self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_4_left = BranchSeparables(42, 42, 3, 1, 1, bias=False) self.comb_iter_4_right = nn.MaxPool2d(3, stride=2, padding=1) def forward(self, x): x1 = self.conv_1x1(x) x_comb_iter_0_left = self.comb_iter_0_left(x1) x_comb_iter_0_right = self.comb_iter_0_right(x) x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right x_comb_iter_1_left = self.comb_iter_1_left(x1) x_comb_iter_1_right = self.comb_iter_1_right(x) x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right x_comb_iter_2_left = self.comb_iter_2_left(x1) x_comb_iter_2_right = self.comb_iter_2_right(x) x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0) x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1 x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0) x_comb_iter_4_right = self.comb_iter_4_right(x1) x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1) return x_out class CellStem1(nn.Module): def __init__(self): super(CellStem1, self).__init__() self.conv_1x1 = nn.Sequential() self.conv_1x1.add_module('relu', nn.ReLU()) self.conv_1x1.add_module('conv', nn.Conv2d(168, 84, 1, stride=1, bias=False)) self.conv_1x1.add_module('bn', nn.BatchNorm2d(84, eps=0.001, momentum=0.1, affine=True)) self.relu = nn.ReLU() self.path_1 = nn.Sequential() self.path_1.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False)) self.path_1.add_module('conv', nn.Conv2d(96, 42, 1, stride=1, bias=False)) self.path_2 = nn.ModuleList() self.path_2.add_module('pad', nn.ZeroPad2d((0, 1, 0, 1))) self.path_2.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False)) self.path_2.add_module('conv', nn.Conv2d(96, 42, 1, stride=1, bias=False)) self.final_path_bn = nn.BatchNorm2d(84, eps=0.001, momentum=0.1, affine=True) self.comb_iter_0_left = BranchSeparables(84, 84, 5, 2, 2, bias=False) self.comb_iter_0_right = BranchSeparables(84, 84, 7, 2, 3, bias=False) self.comb_iter_1_left = nn.MaxPool2d(3, stride=2, padding=1) self.comb_iter_1_right = BranchSeparables(84, 84, 7, 2, 3, bias=False) self.comb_iter_2_left = nn.AvgPool2d(3, stride=2, padding=1, count_include_pad=False) self.comb_iter_2_right = BranchSeparables(84, 84, 5, 2, 2, bias=False) self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_4_left = BranchSeparables(84, 84, 3, 1, 1, bias=False) self.comb_iter_4_right = nn.MaxPool2d(3, stride=2, padding=1) def forward(self, x_conv0, x_stem_0): x_left = self.conv_1x1(x_stem_0) x_relu = self.relu(x_conv0) # path 1 x_path1 = self.path_1(x_relu) # path 2 x_path2 = self.path_2.pad(x_relu) x_path2 = x_path2[:, :, 1:, 1:] x_path2 = self.path_2.avgpool(x_path2) x_path2 = self.path_2.conv(x_path2) # final path x_right = self.final_path_bn(torch.cat([x_path1, x_path2], 1)) x_comb_iter_0_left = self.comb_iter_0_left(x_left) x_comb_iter_0_right = self.comb_iter_0_right(x_right) x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right x_comb_iter_1_left = self.comb_iter_1_left(x_left) x_comb_iter_1_right = self.comb_iter_1_right(x_right) x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right x_comb_iter_2_left = self.comb_iter_2_left(x_left) x_comb_iter_2_right = self.comb_iter_2_right(x_right) x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0) x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1 x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0) x_comb_iter_4_right = self.comb_iter_4_right(x_left) x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1) return x_out class FirstCell(nn.Module): def __init__(self, in_channels_left, out_channels_left, in_channels_right, out_channels_right): super(FirstCell, self).__init__() self.conv_1x1 = nn.Sequential() self.conv_1x1.add_module('relu', nn.ReLU()) self.conv_1x1.add_module('conv', nn.Conv2d(in_channels_right, out_channels_right, 1, stride=1, bias=False)) self.conv_1x1.add_module('bn', nn.BatchNorm2d(out_channels_right, eps=0.001, momentum=0.1, affine=True)) self.relu = nn.ReLU() self.path_1 = nn.Sequential() self.path_1.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False)) self.path_1.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False)) self.path_2 = nn.ModuleList() self.path_2.add_module('pad', nn.ZeroPad2d((0, 1, 0, 1))) self.path_2.add_module('avgpool', nn.AvgPool2d(1, stride=2, count_include_pad=False)) self.path_2.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False)) self.final_path_bn = nn.BatchNorm2d(out_channels_left * 2, eps=0.001, momentum=0.1, affine=True) self.comb_iter_0_left = BranchSeparables(out_channels_right, out_channels_right, 5, 1, 2, bias=False) self.comb_iter_0_right = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False) self.comb_iter_1_left = BranchSeparables(out_channels_right, out_channels_right, 5, 1, 2, bias=False) self.comb_iter_1_right = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False) self.comb_iter_2_left = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_3_left = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_4_left = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False) def forward(self, x, x_prev): x_relu = self.relu(x_prev) # path 1 x_path1 = self.path_1(x_relu) # path 2 x_path2 = self.path_2.pad(x_relu) x_path2 = x_path2[:, :, 1:, 1:] x_path2 = self.path_2.avgpool(x_path2) x_path2 = self.path_2.conv(x_path2) # final path x_left = self.final_path_bn(torch.cat([x_path1, x_path2], 1)) x_right = self.conv_1x1(x) x_comb_iter_0_left = self.comb_iter_0_left(x_right) x_comb_iter_0_right = self.comb_iter_0_right(x_left) x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right x_comb_iter_1_left = self.comb_iter_1_left(x_left) x_comb_iter_1_right = self.comb_iter_1_right(x_left) x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right x_comb_iter_2_left = self.comb_iter_2_left(x_right) x_comb_iter_2 = x_comb_iter_2_left + x_left x_comb_iter_3_left = self.comb_iter_3_left(x_left) x_comb_iter_3_right = self.comb_iter_3_right(x_left) x_comb_iter_3 = x_comb_iter_3_left + x_comb_iter_3_right x_comb_iter_4_left = self.comb_iter_4_left(x_right) x_comb_iter_4 = x_comb_iter_4_left + x_right x_out = torch.cat([x_left, x_comb_iter_0, x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1) return x_out class NormalCell(nn.Module): def __init__(self, in_channels_left, out_channels_left, in_channels_right, out_channels_right): super(NormalCell, self).__init__() self.conv_prev_1x1 = nn.Sequential() self.conv_prev_1x1.add_module('relu', nn.ReLU()) self.conv_prev_1x1.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False)) self.conv_prev_1x1.add_module('bn', nn.BatchNorm2d(out_channels_left, eps=0.001, momentum=0.1, affine=True)) self.conv_1x1 = nn.Sequential() self.conv_1x1.add_module('relu', nn.ReLU()) self.conv_1x1.add_module('conv', nn.Conv2d(in_channels_right, out_channels_right, 1, stride=1, bias=False)) self.conv_1x1.add_module('bn', nn.BatchNorm2d(out_channels_right, eps=0.001, momentum=0.1, affine=True)) self.comb_iter_0_left = BranchSeparables(out_channels_right, out_channels_right, 5, 1, 2, bias=False) self.comb_iter_0_right = BranchSeparables(out_channels_left, out_channels_left, 3, 1, 1, bias=False) self.comb_iter_1_left = BranchSeparables(out_channels_left, out_channels_left, 5, 1, 2, bias=False) self.comb_iter_1_right = BranchSeparables(out_channels_left, out_channels_left, 3, 1, 1, bias=False) self.comb_iter_2_left = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_3_left = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_4_left = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False) def forward(self, x, x_prev): x_left = self.conv_prev_1x1(x_prev) x_right = self.conv_1x1(x) x_comb_iter_0_left = self.comb_iter_0_left(x_right) x_comb_iter_0_right = self.comb_iter_0_right(x_left) x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right x_comb_iter_1_left = self.comb_iter_1_left(x_left) x_comb_iter_1_right = self.comb_iter_1_right(x_left) x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right x_comb_iter_2_left = self.comb_iter_2_left(x_right) x_comb_iter_2 = x_comb_iter_2_left + x_left x_comb_iter_3_left = self.comb_iter_3_left(x_left) x_comb_iter_3_right = self.comb_iter_3_right(x_left) x_comb_iter_3 = x_comb_iter_3_left + x_comb_iter_3_right x_comb_iter_4_left = self.comb_iter_4_left(x_right) x_comb_iter_4 = x_comb_iter_4_left + x_right x_out = torch.cat([x_left, x_comb_iter_0, x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1) return x_out class ReductionCell0(nn.Module): def __init__(self, in_channels_left, out_channels_left, in_channels_right, out_channels_right): super(ReductionCell0, self).__init__() self.conv_prev_1x1 = nn.Sequential() self.conv_prev_1x1.add_module('relu', nn.ReLU()) self.conv_prev_1x1.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False)) self.conv_prev_1x1.add_module('bn', nn.BatchNorm2d(out_channels_left, eps=0.001, momentum=0.1, affine=True)) self.conv_1x1 = nn.Sequential() self.conv_1x1.add_module('relu', nn.ReLU()) self.conv_1x1.add_module('conv', nn.Conv2d(in_channels_right, out_channels_right, 1, stride=1, bias=False)) self.conv_1x1.add_module('bn', nn.BatchNorm2d(out_channels_right, eps=0.001, momentum=0.1, affine=True)) self.comb_iter_0_left = BranchSeparablesReduction(out_channels_right, out_channels_right, 5, 2, 2, bias=False) self.comb_iter_0_right = BranchSeparablesReduction(out_channels_right, out_channels_right, 7, 2, 3, bias=False) self.comb_iter_1_left = MaxPoolPad() self.comb_iter_1_right = BranchSeparablesReduction(out_channels_right, out_channels_right, 7, 2, 3, bias=False) self.comb_iter_2_left = AvgPoolPad() self.comb_iter_2_right = BranchSeparablesReduction(out_channels_right, out_channels_right, 5, 2, 2, bias=False) self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_4_left = BranchSeparablesReduction(out_channels_right, out_channels_right, 3, 1, 1, bias=False) self.comb_iter_4_right = MaxPoolPad() def forward(self, x, x_prev): x_left = self.conv_prev_1x1(x_prev) x_right = self.conv_1x1(x) x_comb_iter_0_left = self.comb_iter_0_left(x_right) x_comb_iter_0_right = self.comb_iter_0_right(x_left) x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right x_comb_iter_1_left = self.comb_iter_1_left(x_right) x_comb_iter_1_right = self.comb_iter_1_right(x_left) x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right x_comb_iter_2_left = self.comb_iter_2_left(x_right) x_comb_iter_2_right = self.comb_iter_2_right(x_left) x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0) x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1 x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0) x_comb_iter_4_right = self.comb_iter_4_right(x_right) x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1) return x_out class ReductionCell1(nn.Module): def __init__(self, in_channels_left, out_channels_left, in_channels_right, out_channels_right): super(ReductionCell1, self).__init__() self.conv_prev_1x1 = nn.Sequential() self.conv_prev_1x1.add_module('relu', nn.ReLU()) self.conv_prev_1x1.add_module('conv', nn.Conv2d(in_channels_left, out_channels_left, 1, stride=1, bias=False)) self.conv_prev_1x1.add_module('bn', nn.BatchNorm2d(out_channels_left, eps=0.001, momentum=0.1, affine=True)) self.conv_1x1 = nn.Sequential() self.conv_1x1.add_module('relu', nn.ReLU()) self.conv_1x1.add_module('conv', nn.Conv2d(in_channels_right, out_channels_right, 1, stride=1, bias=False)) self.conv_1x1.add_module('bn', nn.BatchNorm2d(out_channels_right, eps=0.001, momentum=0.1, affine=True)) self.comb_iter_0_left = BranchSeparables(out_channels_right, out_channels_right, 5, 2, 2, bias=False) self.comb_iter_0_right = BranchSeparables(out_channels_right, out_channels_right, 7, 2, 3, bias=False) self.comb_iter_1_left = nn.MaxPool2d(3, stride=2, padding=1) self.comb_iter_1_right = BranchSeparables(out_channels_right, out_channels_right, 7, 2, 3, bias=False) self.comb_iter_2_left = nn.AvgPool2d(3, stride=2, padding=1, count_include_pad=False) self.comb_iter_2_right = BranchSeparables(out_channels_right, out_channels_right, 5, 2, 2, bias=False) self.comb_iter_3_right = nn.AvgPool2d(3, stride=1, padding=1, count_include_pad=False) self.comb_iter_4_left = BranchSeparables(out_channels_right, out_channels_right, 3, 1, 1, bias=False) self.comb_iter_4_right = nn.MaxPool2d(3, stride=2, padding=1) def forward(self, x, x_prev): x_left = self.conv_prev_1x1(x_prev) x_right = self.conv_1x1(x) x_comb_iter_0_left = self.comb_iter_0_left(x_right) x_comb_iter_0_right = self.comb_iter_0_right(x_left) x_comb_iter_0 = x_comb_iter_0_left + x_comb_iter_0_right x_comb_iter_1_left = self.comb_iter_1_left(x_right) x_comb_iter_1_right = self.comb_iter_1_right(x_left) x_comb_iter_1 = x_comb_iter_1_left + x_comb_iter_1_right x_comb_iter_2_left = self.comb_iter_2_left(x_right) x_comb_iter_2_right = self.comb_iter_2_right(x_left) x_comb_iter_2 = x_comb_iter_2_left + x_comb_iter_2_right x_comb_iter_3_right = self.comb_iter_3_right(x_comb_iter_0) x_comb_iter_3 = x_comb_iter_3_right + x_comb_iter_1 x_comb_iter_4_left = self.comb_iter_4_left(x_comb_iter_0) x_comb_iter_4_right = self.comb_iter_4_right(x_right) x_comb_iter_4 = x_comb_iter_4_left + x_comb_iter_4_right x_out = torch.cat([x_comb_iter_1, x_comb_iter_2, x_comb_iter_3, x_comb_iter_4], 1) return x_out class NASNetALarge(nn.Module): def __init__(self, num_classes=1001): super(NASNetALarge, self).__init__() self.num_classes = num_classes self.conv0 = nn.Sequential() self.conv0.add_module('conv', nn.Conv2d(in_channels=3, out_channels=96, kernel_size=3, padding=0, stride=2, bias=False)) self.conv0.add_module('bn', nn.BatchNorm2d(96, eps=0.001, momentum=0.1, affine=True)) self.cell_stem_0 = CellStem0() self.cell_stem_1 = CellStem1() self.cell_0 = FirstCell(in_channels_left=168, out_channels_left=84, in_channels_right=336, out_channels_right=168) self.cell_1 = NormalCell(in_channels_left=336, out_channels_left=168, in_channels_right=1008, out_channels_right=168) self.cell_2 = NormalCell(in_channels_left=1008, out_channels_left=168, in_channels_right=1008, out_channels_right=168) self.cell_3 = NormalCell(in_channels_left=1008, out_channels_left=168, in_channels_right=1008, out_channels_right=168) self.cell_4 = NormalCell(in_channels_left=1008, out_channels_left=168, in_channels_right=1008, out_channels_right=168) self.cell_5 = NormalCell(in_channels_left=1008, out_channels_left=168, in_channels_right=1008, out_channels_right=168) self.reduction_cell_0 = ReductionCell0(in_channels_left=1008, out_channels_left=336, in_channels_right=1008, out_channels_right=336) self.cell_6 = FirstCell(in_channels_left=1008, out_channels_left=168, in_channels_right=1344, out_channels_right=336) self.cell_7 = NormalCell(in_channels_left=1344, out_channels_left=336, in_channels_right=2016, out_channels_right=336) self.cell_8 = NormalCell(in_channels_left=2016, out_channels_left=336, in_channels_right=2016, out_channels_right=336) self.cell_9 = NormalCell(in_channels_left=2016, out_channels_left=336, in_channels_right=2016, out_channels_right=336) self.cell_10 = NormalCell(in_channels_left=2016, out_channels_left=336, in_channels_right=2016, out_channels_right=336) self.cell_11 = NormalCell(in_channels_left=2016, out_channels_left=336, in_channels_right=2016, out_channels_right=336) self.reduction_cell_1 = ReductionCell1(in_channels_left=2016, out_channels_left=672, in_channels_right=2016, out_channels_right=672) self.cell_12 = FirstCell(in_channels_left=2016, out_channels_left=336, in_channels_right=2688, out_channels_right=672) self.cell_13 = NormalCell(in_channels_left=2688, out_channels_left=672, in_channels_right=4032, out_channels_right=672) self.cell_14 = NormalCell(in_channels_left=4032, out_channels_left=672, in_channels_right=4032, out_channels_right=672) self.cell_15 = NormalCell(in_channels_left=4032, out_channels_left=672, in_channels_right=4032, out_channels_right=672) self.cell_16 = NormalCell(in_channels_left=4032, out_channels_left=672, in_channels_right=4032, out_channels_right=672) self.cell_17 = NormalCell(in_channels_left=4032, out_channels_left=672, in_channels_right=4032, out_channels_right=672) self.relu = nn.ReLU() self.avgpool = nn.AvgPool2d(11, stride=1, padding=0) self.dropout = nn.Dropout() self.linear = nn.Linear(4032, self.num_classes) def features(self, x): x_conv0 = self.conv0(x) x_stem_0 = self.cell_stem_0(x_conv0) x_stem_1 = self.cell_stem_1(x_conv0, x_stem_0) x_cell_0 = self.cell_0(x_stem_1, x_stem_0) x_cell_1 = self.cell_1(x_cell_0, x_stem_1) x_cell_2 = self.cell_2(x_cell_1, x_cell_0) x_cell_3 = self.cell_3(x_cell_2, x_cell_1) x_cell_4 = self.cell_4(x_cell_3, x_cell_2) x_cell_5 = self.cell_5(x_cell_4, x_cell_3) x_reduction_cell_0 = self.reduction_cell_0(x_cell_5, x_cell_4) x_cell_6 = self.cell_6(x_reduction_cell_0, x_cell_4) x_cell_7 = self.cell_7(x_cell_6, x_reduction_cell_0) x_cell_8 = self.cell_8(x_cell_7, x_cell_6) x_cell_9 = self.cell_9(x_cell_8, x_cell_7) x_cell_10 = self.cell_10(x_cell_9, x_cell_8) x_cell_11 = self.cell_11(x_cell_10, x_cell_9) x_reduction_cell_1 = self.reduction_cell_1(x_cell_11, x_cell_10) x_cell_12 = self.cell_12(x_reduction_cell_1, x_cell_10) x_cell_13 = self.cell_13(x_cell_12, x_reduction_cell_1) x_cell_14 = self.cell_14(x_cell_13, x_cell_12) x_cell_15 = self.cell_15(x_cell_14, x_cell_13) x_cell_16 = self.cell_16(x_cell_15, x_cell_14) x_cell_17 = self.cell_17(x_cell_16, x_cell_15) return x_cell_17 def classifier(self, x): x = self.relu(x) x = self.avgpool(x) x = x.view(x.size(0), -1) x = self.dropout(x) x = self.linear(x) return x def forward(self, x): x = self.features(x) x = self.classifier(x) return x def nasnetalarge(num_classes=1001, pretrained='imagenet'): r"""NASNetALarge model architecture from the `"NASNet" `_ paper. """ if pretrained: settings = pretrained_settings['nasnetalarge'][pretrained] assert num_classes == settings['num_classes'], \ "num_classes should be {}, but is {}".format(settings['num_classes'], num_classes) # both 'imagenet'&'imagenet+background' are loaded from same parameters model = NASNetALarge(num_classes=1001) model.load_state_dict(model_zoo.load_url(settings['url'])) if pretrained == 'imagenet': new_linear = nn.Linear(model.linear.in_features, 1000) new_linear.weight.data = model.linear.weight.data[1:] new_linear.bias.data = model.linear.bias.data[1:] model.linear = new_linear model.input_space = settings['input_space'] model.input_size = settings['input_size'] model.input_range = settings['input_range'] model.mean = settings['mean'] model.std = settings['std'] else: model = NASNetALarge(num_classes=num_classes) return model if __name__ == "__main__": model = NasNetALarge() input = Variable(torch.randn(2,3,331,331)) output = model(input) print(output.size()) ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/resnext.py ================================================ import os from os.path import expanduser import collections import torch import torch.nn as nn from torch.autograd import Variable from .resnext_features import resnext101_32x4d_features from .resnext_features import resnext101_64x4d_features __all__ = ['ResNeXt101_32x4d', 'resnext101_32x4d', 'ResNeXt101_64x4d', 'resnext101_64x4d'] pretrained_settings = { 'resnext101_32x4d': { 'imagenet': { 'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/resnext101_32x4d.pth', 'input_space': 'RGB', 'input_size': [3, 224, 224], 'input_range': [0, 1], 'mean': [0.485, 0.456, 0.406], 'std': [0.229, 0.224, 0.225], 'num_classes': 1000 } }, 'resnext101_64x4d': { 'imagenet': { 'url': 'http://webia.lip6.fr/~cadene/Downloads/pretrained-models.pytorch/resnext101_64x4d.pth', 'input_space': 'RGB', 'input_size': [3, 224, 224], 'input_range': [0, 1], 'mean': [0.485, 0.456, 0.406], 'std': [0.229, 0.224, 0.225], 'num_classes': 1000 } } } class ResNeXt101_32x4d(nn.Module): def __init__(self, nb_classes=1000): super(ResNeXt101_32x4d, self).__init__() self.features = resnext101_32x4d_features self.avgpool = nn.AvgPool2d((7, 7), (1, 1)) self.fc = nn.Linear(2048, nb_classes) def forward(self, input): x = self.features(input) x = self.avgpool(x) x = x.view(x.size(0), -1) x = self.fc(x) return x class ResNeXt101_64x4d(nn.Module): def __init__(self, nb_classes=1000): super(ResNeXt101_64x4d, self).__init__() self.features = resnext101_64x4d_features self.avgpool = nn.AvgPool2d((7, 7), (1, 1)) self.fc = nn.Linear(2048, nb_classes) def forward(self, input): x = self.features(input) x = self.avgpool(x) x = x.view(x.size(0), -1) x = self.fc(x) return x def resnext101_32x4d(num_classes=1000, pretrained='imagenet'): model = ResNeXt101_32x4d() if pretrained: settings = pretrained_settings['resnext101_32x4d'][pretrained] assert num_classes == settings['num_classes'], \ "num_classes should be {}, but is {}".format(settings['num_classes'], num_classes) dir_models = os.path.join(expanduser("~"), '.torch/resnext') path_pth = os.path.join(dir_models, 'resnext101_32x4d.pth') if not os.path.isfile(path_pth): os.system('mkdir -p ' + dir_models) os.system('wget {} -O {}'.format(settings['url'], path_pth)) state_dict_features = torch.load(path_pth) state_dict_fc = collections.OrderedDict() state_dict_fc['weight'] = state_dict_features['10.1.weight'] state_dict_fc['bias'] = state_dict_features['10.1.bias'] del state_dict_features['10.1.weight'] del state_dict_features['10.1.bias'] model.features.load_state_dict(state_dict_features) model.fc.load_state_dict(state_dict_fc) model.input_space = settings['input_space'] model.input_size = settings['input_size'] model.mean = settings['mean'] model.std = settings['std'] return model def resnext101_64x4d(num_classes=1000, pretrained='imagenet'): model = ResNeXt101_64x4d() if pretrained: settings = pretrained_settings['resnext101_64x4d'][pretrained] assert num_classes == settings['num_classes'], \ "num_classes should be {}, but is {}".format(settings['num_classes'], num_classes) dir_models = os.path.join(expanduser("~"), '.torch/resnext') path_pth = os.path.join(dir_models, 'resnext101_64x4d.pth') if not os.path.isfile(path_pth): os.system('mkdir -p ' + dir_models) os.system('wget {} -O {}'.format(settings['url'], path_pth)) state_dict_features = torch.load(path_pth) state_dict_fc = collections.OrderedDict() state_dict_fc['weight'] = state_dict_features['10.1.weight'] state_dict_fc['bias'] = state_dict_features['10.1.bias'] del state_dict_features['10.1.weight'] del state_dict_features['10.1.bias'] model.features.load_state_dict(state_dict_features) model.fc.load_state_dict(state_dict_fc) model.input_space = settings['input_space'] model.input_size = settings['input_size'] model.input_range = settings['input_range'] model.mean = settings['mean'] model.std = settings['std'] return model ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/resnext_features/__init__.py ================================================ from .resnext101_32x4d_features import resnext101_32x4d_features from .resnext101_64x4d_features import resnext101_64x4d_features ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/resnext_features/resnext101_32x4d_features.py ================================================ import torch import torch.nn as nn from torch.autograd import Variable from functools import reduce class LambdaBase(nn.Sequential): def __init__(self, fn, *args): super(LambdaBase, self).__init__(*args) self.lambda_func = fn def forward_prepare(self, input): output = [] for module in self._modules.values(): output.append(module(input)) return output if output else input class Lambda(LambdaBase): def forward(self, input): return self.lambda_func(self.forward_prepare(input)) class LambdaMap(LambdaBase): def forward(self, input): return list(map(self.lambda_func,self.forward_prepare(input))) class LambdaReduce(LambdaBase): def forward(self, input): return reduce(self.lambda_func,self.forward_prepare(input)) resnext101_32x4d_features = nn.Sequential( # Sequential, nn.Conv2d(3,64,(7, 7),(2, 2),(3, 3),1,1,bias=False), nn.BatchNorm2d(64), nn.ReLU(), nn.MaxPool2d((3, 3),(2, 2),(1, 1)), nn.Sequential( # Sequential, nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(64,128,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(128), nn.ReLU(), nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(128), nn.ReLU(), ), nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(256), ), nn.Sequential( # Sequential, nn.Conv2d(64,256,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(256), ), ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(128), nn.ReLU(), nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(128), nn.ReLU(), ), nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(256), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(256,128,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(128), nn.ReLU(), nn.Conv2d(128,128,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(128), nn.ReLU(), ), nn.Conv2d(128,256,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(256), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), ), nn.Sequential( # Sequential, nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(256,256,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256,256,(3, 3),(2, 2),(1, 1),1,32,bias=False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), ), nn.Sequential( # Sequential, nn.Conv2d(256,512,(1, 1),(2, 2),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), ), ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(512,256,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256,256,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), ), nn.Sequential( # Sequential, nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(512,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(2, 2),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), nn.Sequential( # Sequential, nn.Conv2d(512,1024,(1, 1),(2, 2),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,512,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512,512,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), ), nn.Sequential( # Sequential, nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(1024,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024,1024,(3, 3),(2, 2),(1, 1),1,32,bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(2048), ), nn.Sequential( # Sequential, nn.Conv2d(1024,2048,(1, 1),(2, 2),(0, 0),1,1,bias=False), nn.BatchNorm2d(2048), ), ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(2048), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), nn.Sequential( # Sequential, LambdaMap(lambda x: x, # ConcatTable, nn.Sequential( # Sequential, nn.Sequential( # Sequential, nn.Conv2d(2048,1024,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024,1024,(3, 3),(1, 1),(1, 1),1,32,bias=False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024,2048,(1, 1),(1, 1),(0, 0),1,1,bias=False), nn.BatchNorm2d(2048), ), Lambda(lambda x: x), # Identity, ), LambdaReduce(lambda x,y: x+y), # CAddTable, nn.ReLU(), ), ) ) ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/resnext_features/resnext101_64x4d_features.py ================================================ import torch import torch.nn as nn from torch.autograd import Variable from functools import reduce class LambdaBase(nn.Sequential): def __init__(self, fn, *args): super(LambdaBase, self).__init__(*args) self.lambda_func = fn def forward_prepare(self, input): output = [] for module in self._modules.values(): output.append(module(input)) return output if output else input class Lambda(LambdaBase): def forward(self, input): return self.lambda_func(self.forward_prepare(input)) class LambdaMap(LambdaBase): def forward(self, input): return list(map(self.lambda_func,self.forward_prepare(input))) class LambdaReduce(LambdaBase): def forward(self, input): return reduce(self.lambda_func,self.forward_prepare(input)) resnext101_64x4d_features = nn.Sequential(#Sequential, nn.Conv2d(3, 64, (7, 7), (2, 2), (3, 3), 1, 1, bias = False), nn.BatchNorm2d(64), nn.ReLU(), nn.MaxPool2d((3, 3), (2, 2), (1, 1)), nn.Sequential(#Sequential, nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(64, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256, 256, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(256), ), nn.Sequential(#Sequential, nn.Conv2d(64, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(256), ), ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256, 256, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(256), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(256), nn.ReLU(), nn.Conv2d(256, 256, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(256), nn.ReLU(), ), nn.Conv2d(256, 256, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(256), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), ), nn.Sequential(#Sequential, nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(256, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512, 512, (3, 3), (2, 2), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(512), ), nn.Sequential(#Sequential, nn.Conv2d(256, 512, (1, 1), (2, 2), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(512), ), ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512, 512, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(512), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512, 512, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(512), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(512), nn.ReLU(), nn.Conv2d(512, 512, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(512), nn.ReLU(), ), nn.Conv2d(512, 512, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(512), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), ), nn.Sequential(#Sequential, nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(512, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (2, 2), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), nn.Sequential(#Sequential, nn.Conv2d(512, 1024, (1, 1), (2, 2), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), nn.Conv2d(1024, 1024, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(1024), nn.ReLU(), ), nn.Conv2d(1024, 1024, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(1024), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), ), nn.Sequential(#Sequential, nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(1024, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(2048), nn.ReLU(), nn.Conv2d(2048, 2048, (3, 3), (2, 2), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(2048), nn.ReLU(), ), nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(2048), ), nn.Sequential(#Sequential, nn.Conv2d(1024, 2048, (1, 1), (2, 2), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(2048), ), ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(2048), nn.ReLU(), nn.Conv2d(2048, 2048, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(2048), nn.ReLU(), ), nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(2048), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), nn.Sequential(#Sequential, LambdaMap(lambda x: x, #ConcatTable, nn.Sequential(#Sequential, nn.Sequential(#Sequential, nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(2048), nn.ReLU(), nn.Conv2d(2048, 2048, (3, 3), (1, 1), (1, 1), 1, 64, bias = False), nn.BatchNorm2d(2048), nn.ReLU(), ), nn.Conv2d(2048, 2048, (1, 1), (1, 1), (0, 0), 1, 1, bias = False), nn.BatchNorm2d(2048), ), Lambda(lambda x: x), #Identity, ), LambdaReduce(lambda x, y: x + y), #CAddTable, nn.ReLU(), ), ) ) ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/torchvision.py ================================================ import torchvision.models as models import torch.utils.model_zoo as model_zoo __all__ = [ 'alexnet', 'densenet121', 'densenet169', 'densenet201', 'densenet161', 'resnet18', 'resnet34', 'resnet50', 'resnet101', 'resnet152', 'inceptionv3', 'squeezenet1_0', 'squeezenet1_1', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn', 'vgg19_bn', 'vgg19' ] model_urls = { 'alexnet': 'https://download.pytorch.org/models/alexnet-owt-4df8aa71.pth', 'densenet121': 'https://download.pytorch.org/models/densenet121-241335ed.pth', 'densenet169': 'https://download.pytorch.org/models/densenet169-6f0f7f60.pth', 'densenet201': 'https://download.pytorch.org/models/densenet201-4c113574.pth', 'densenet161': 'https://download.pytorch.org/models/densenet161-17b70270.pth', 'inceptionv3': 'https://download.pytorch.org/models/inception_v3_google-1a9a5a14.pth', 'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth', 'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth', 'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth', 'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth', 'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth', 'squeezenet1_0': 'https://download.pytorch.org/models/squeezenet1_0-a815701f.pth', 'squeezenet1_1': 'https://download.pytorch.org/models/squeezenet1_1-f364aa15.pth', 'vgg11': 'https://download.pytorch.org/models/vgg11-bbd30ac9.pth', 'vgg13': 'https://download.pytorch.org/models/vgg13-c768596a.pth', 'vgg16': 'https://download.pytorch.org/models/vgg16-397923af.pth', 'vgg19': 'https://download.pytorch.org/models/vgg19-dcbb9e9d.pth', 'vgg11_bn': 'https://download.pytorch.org/models/vgg11_bn-6002323d.pth', 'vgg13_bn': 'https://download.pytorch.org/models/vgg13_bn-abd245e5.pth', 'vgg16_bn': 'https://download.pytorch.org/models/vgg16_bn-6c64b313.pth', 'vgg19_bn': 'https://download.pytorch.org/models/vgg19_bn-c79401a0.pth', # 'vgg16_caffe': 'https://s3-us-west-2.amazonaws.com/jcjohns-models/vgg16-00b39a1b.pth', # 'vgg19_caffe': 'https://s3-us-west-2.amazonaws.com/jcjohns-models/vgg19-d01eb7cb.pth' } input_sizes = {} means = {} stds = {} for model_name in __all__: input_sizes[model_name] = [3, 224, 224] means[model_name] = [0.485, 0.456, 0.406] stds[model_name] = [0.229, 0.224, 0.225] for model_name in ['inceptionv3']: input_sizes[model_name] = [3, 299, 299] means[model_name] = [0.5, 0.5, 0.5] stds[model_name] = [0.5, 0.5, 0.5] pretrained_settings = {} for model_name in __all__: pretrained_settings[model_name] = { 'imagenet': { 'url': model_urls[model_name], 'input_space': 'RGB', 'input_size': input_sizes[model_name], 'input_range': [0, 1], 'mean': means[model_name], 'std': stds[model_name], 'num_classes': 1000 } } # for model_name in ['vgg16', 'vgg19']: # pretrained_settings[model_name]['imagenet_caffe'] = { # 'url': model_urls[model_name + '_caffe'], # 'input_space': 'BGR', # 'input_size': input_sizes[model_name], # 'input_range': [0, 255], # 'mean': [103.939, 116.779, 123.68], # 'std': [1., 1., 1.], # 'num_classes': 1000 # } def load_pretrained(model, num_classes, settings): assert num_classes == settings['num_classes'], \ "num_classes should be {}, but is {}".format(settings['num_classes'], num_classes) model.load_state_dict(model_zoo.load_url(settings['url'])) model.input_space = settings['input_space'] model.input_size = settings['input_size'] model.input_range = settings['input_range'] model.mean = settings['mean'] model.std = settings['std'] return model def alexnet(num_classes=1000, pretrained='imagenet'): r"""AlexNet model architecture from the `"One weird trick..." `_ paper. """ model = models.alexnet(pretrained=False) if pretrained is not None: settings = pretrained_settings['alexnet'][pretrained] model = load_pretrained(model, num_classes, settings) return model def densenet121(num_classes=1000, pretrained='imagenet'): r"""Densenet-121 model from `"Densely Connected Convolutional Networks" ` """ model = models.densenet121(pretrained=False) if pretrained is not None: settings = pretrained_settings['densenet121'][pretrained] model = load_pretrained(model, num_classes, settings) return model def densenet169(num_classes=1000, pretrained='imagenet'): r"""Densenet-169 model from `"Densely Connected Convolutional Networks" ` """ model = models.densenet169(pretrained=False) if pretrained is not None: settings = pretrained_settings['densenet169'][pretrained] model = load_pretrained(model, num_classes, settings) return model def densenet201(num_classes=1000, pretrained='imagenet'): r"""Densenet-201 model from `"Densely Connected Convolutional Networks" ` """ model = models.densenet201(pretrained=False) if pretrained is not None: settings = pretrained_settings['densenet201'][pretrained] model = load_pretrained(model, num_classes, settings) return model def densenet161(num_classes=1000, pretrained='imagenet'): r"""Densenet-161 model from `"Densely Connected Convolutional Networks" ` """ model = models.densenet161(pretrained=False) if pretrained is not None: settings = pretrained_settings['densenet161'][pretrained] model = load_pretrained(model, num_classes, settings) return model def inceptionv3(num_classes=1000, pretrained='imagenet'): r"""Inception v3 model architecture from `"Rethinking the Inception Architecture for Computer Vision" `_. """ model = models.inception_v3(pretrained=False) if pretrained is not None: settings = pretrained_settings['inceptionv3'][pretrained] model = load_pretrained(model, num_classes, settings) return model def resnet18(num_classes=1000, pretrained='imagenet'): """Constructs a ResNet-18 model. """ model = models.resnet18(pretrained=False) if pretrained is not None: settings = pretrained_settings['resnet18'][pretrained] model = load_pretrained(model, num_classes, settings) return model def resnet34(num_classes=1000, pretrained='imagenet'): """Constructs a ResNet-34 model. """ model = models.resnet34(pretrained=False) if pretrained is not None: settings = pretrained_settings['resnet34'][pretrained] model = load_pretrained(model, num_classes, settings) return model def resnet50(num_classes=1000, pretrained='imagenet'): """Constructs a ResNet-50 model. """ model = models.resnet50(pretrained=False) if pretrained is not None: settings = pretrained_settings['resnet50'][pretrained] model = load_pretrained(model, num_classes, settings) return model def resnet101(num_classes=1000, pretrained='imagenet'): """Constructs a ResNet-101 model. """ model = models.resnet101(pretrained=False) if pretrained is not None: settings = pretrained_settings['resnet101'][pretrained] model = load_pretrained(model, num_classes, settings) return model def resnet152(num_classes=1000, pretrained='imagenet'): """Constructs a ResNet-152 model. """ model = models.resnet152(pretrained=False) if pretrained is not None: settings = pretrained_settings['resnet152'][pretrained] model = load_pretrained(model, num_classes, settings) return model def squeezenet1_0(num_classes=1000, pretrained='imagenet'): r"""SqueezeNet model architecture from the `"SqueezeNet: AlexNet-level accuracy with 50x fewer parameters and <0.5MB model size" `_ paper. """ model = models.squeezenet1_0(pretrained=False) if pretrained is not None: settings = pretrained_settings['squeezenet1_0'][pretrained] model = load_pretrained(model, num_classes, settings) return model def squeezenet1_1(num_classes=1000, pretrained='imagenet'): r"""SqueezeNet 1.1 model from the `official SqueezeNet repo `_. SqueezeNet 1.1 has 2.4x less computation and slightly fewer parameters than SqueezeNet 1.0, without sacrificing accuracy. """ model = models.squeezenet1_1(pretrained=False) if pretrained is not None: settings = pretrained_settings['squeezenet1_1'][pretrained] model = load_pretrained(model, num_classes, settings) return model def vgg11(num_classes=1000, pretrained='imagenet'): """VGG 11-layer model (configuration "A") """ model = models.vgg11(pretrained=False) if pretrained is not None: settings = pretrained_settings['vgg11'][pretrained] model = load_pretrained(model, num_classes, settings) return model def vgg11_bn(num_classes=1000, pretrained='imagenet'): """VGG 11-layer model (configuration "A") with batch normalization """ model = models.vgg11_bn(pretrained=False) if pretrained is not None: settings = pretrained_settings['vgg11_bn'][pretrained] model = load_pretrained(model, num_classes, settings) return model def vgg13(num_classes=1000, pretrained='imagenet'): """VGG 13-layer model (configuration "B") """ model = models.vgg13(pretrained=False) if pretrained is not None: settings = pretrained_settings['vgg13'][pretrained] model = load_pretrained(model, num_classes, settings) return model def vgg13_bn(num_classes=1000, pretrained='imagenet'): """VGG 13-layer model (configuration "B") with batch normalization """ model = models.vgg13_bn(pretrained=False) if pretrained is not None: settings = pretrained_settings['vgg13_bn'][pretrained] model = load_pretrained(model, num_classes, settings) return model def vgg16(num_classes=1000, pretrained='imagenet'): """VGG 16-layer model (configuration "D") """ model = models.vgg16(pretrained=False) if pretrained is not None: settings = pretrained_settings['vgg16'][pretrained] model = load_pretrained(model, num_classes, settings) return model def vgg16_bn(num_classes=1000, pretrained='imagenet'): """VGG 16-layer model (configuration "D") with batch normalization """ model = models.vgg16_bn(pretrained=False) if pretrained is not None: settings = pretrained_settings['vgg16_bn'][pretrained] model = load_pretrained(model, num_classes, settings) return model def vgg19(num_classes=1000, pretrained='imagenet'): """VGG 19-layer model (configuration "E") """ model = models.vgg19(pretrained=False) if pretrained is not None: settings = pretrained_settings['vgg19'][pretrained] model = load_pretrained(model, num_classes, settings) return model def vgg19_bn(num_classes=1000, pretrained='imagenet'): """VGG 19-layer model (configuration 'E') with batch normalization """ model = models.vgg19_bn(pretrained=False) if pretrained is not None: settings = pretrained_settings['vgg19_bn'][pretrained] model = load_pretrained(model, num_classes, settings) return model ================================================ FILE: pretrained_models_pytorch/pretrainedmodels/wideresnet.py ================================================ import os from os.path import expanduser import hickle as hkl import torch import torch.nn.functional as F from torch.autograd import Variable __all__ = ['wideresnet50'] model_urls = { 'wideresnet152': 'https://s3.amazonaws.com/pytorch/h5models/wide-resnet-50-2-export.hkl' } def define_model(params): def conv2d(input, params, base, stride=1, pad=0): return F.conv2d(input, params[base + '.weight'], params[base + '.bias'], stride, pad) def group(input, params, base, stride, n): o = input for i in range(0,n): b_base = ('%s.block%d.conv') % (base, i) x = o o = conv2d(x, params, b_base + '0') o = F.relu(o) o = conv2d(o, params, b_base + '1', stride=i==0 and stride or 1, pad=1) o = F.relu(o) o = conv2d(o, params, b_base + '2') if i == 0: o += conv2d(x, params, b_base + '_dim', stride=stride) else: o += x o = F.relu(o) return o # determine network size by parameters blocks = [sum([re.match('group%d.block\d+.conv0.weight'%j, k) is not None for k in params.keys()]) for j in range(4)] def f(input, params, pooling_classif=True): o = F.conv2d(input, params['conv0.weight'], params['conv0.bias'], 2, 3) o = F.relu(o) o = F.max_pool2d(o, 3, 2, 1) o_g0 = group(o, params, 'group0', 1, blocks[0]) o_g1 = group(o_g0, params, 'group1', 2, blocks[1]) o_g2 = group(o_g1, params, 'group2', 2, blocks[2]) o_g3 = group(o_g2, params, 'group3', 2, blocks[3]) if pooling_classif: o = F.avg_pool2d(o_g3, 7, 1, 0) o = o.view(o.size(0), -1) o = F.linear(o, params['fc.weight'], params['fc.bias']) return o return f class WideResNet(nn.Module): def __init__(self, pooling): super(WideResNet, self).__init__() self.pooling = pooling self.params = params def forward(self, x): x = f(x, self.params, self.pooling) return x def wideresnet50(pooling): dir_models = os.path.join(expanduser("~"), '.torch/wideresnet') path_hkl = os.path.join(dir_models, 'wideresnet50.hkl') if os.path.isfile(path_hkl): params = hkl.load(path_hkl) # convert numpy arrays to torch Variables for k,v in sorted(params.items()): print k, v.shape params[k] = Variable(torch.from_numpy(v), requires_grad=True) else: os.system('mkdir -p ' + dir_models) os.system('wget {} -O {}'.format(model_urls['wideresnet50'], path_hkl)) f = define_model(params) model = WideResNet(pooling) return model ================================================ FILE: pretrained_models_pytorch/test/imagenet.py ================================================ import argparse import os import shutil import time import torch import torch.nn as nn import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim import torch.utils.data import torchvision.transforms as transforms import torchvision.datasets as datasets #import torchvision.models as models import sys sys.path.append('.') import pretrainedmodels as models # models.__dict__['fbresnet152'] = pretrainedmodels.__dict__['fbresnet152'] # models.__dict__['resnext101_32x4d'] = pretrainedmodels.__dict__['resnext101_32x4d'] # models.__dict__['resnext101_64x4d'] = pretrainedmodels.__dict__['resnext101_64x4d'] model_names = sorted(name for name in models.__dict__ if not name.startswith("__") and callable(models.__dict__[name])) parser = argparse.ArgumentParser(description='PyTorch ImageNet Training') parser.add_argument('data', metavar='DIR', help='path to dataset') parser.add_argument('--arch', '-a', metavar='ARCH', default='fbresnet152', choices=model_names, help='model architecture: ' + ' | '.join(model_names) + ' (default: fbresnet152)') parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', help='number of data loading workers (default: 4)') parser.add_argument('--epochs', default=90, type=int, metavar='N', help='number of total epochs to run') parser.add_argument('--start-epoch', default=0, type=int, metavar='N', help='manual epoch number (useful on restarts)') parser.add_argument('-b', '--batch-size', default=256, type=int, metavar='N', help='mini-batch size (default: 256)') parser.add_argument('--lr', '--learning-rate', default=0.1, type=float, metavar='LR', help='initial learning rate') parser.add_argument('--momentum', default=0.9, type=float, metavar='M', help='momentum') parser.add_argument('--weight-decay', '--wd', default=1e-4, type=float, metavar='W', help='weight decay (default: 1e-4)') parser.add_argument('--print-freq', '-p', default=10, type=int, metavar='N', help='print frequency (default: 10)') parser.add_argument('--resume', default='', type=str, metavar='PATH', help='path to latest checkpoint (default: none)') parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true', help='evaluate model on validation set') parser.add_argument('--pretrained', default='imagenet', help='use pre-trained model') best_prec1 = 0 class ToSpaceBGR(object): def __init__(self, is_bgr): self.is_bgr = is_bgr def __call__(self, tensor): if self.is_bgr: new_tensor = tensor.clone() new_tensor[0] = tensor[2] new_tensor[2] = tensor[0] tensor = new_tensor return tensor class ToRange255(object): def __init__(self, is_255): self.is_255 = is_255 def __call__(self, tensor): if self.is_255: tensor.mul_(255) return tensor def main(): global args, best_prec1 args = parser.parse_args() # create model print("=> creating model '{}'".format(args.arch)) if args.pretrained.lower() not in ['false', 'none', 'not', 'no', '0']: print("=> using pre-trained parameters '{}'".format(args.pretrained)) model = models.__dict__[args.arch](num_classes=1000, pretrained=args.pretrained) else: model = models.__dict__[args.arch]() # if args.arch.startswith('alexnet') or args.arch.startswith('vgg'): # model.features = torch.nn.DataParallel(model.features) # model.cuda() # else: # optionally resume from a checkpoint if args.resume: if os.path.isfile(args.resume): print("=> loading checkpoint '{}'".format(args.resume)) checkpoint = torch.load(args.resume) args.start_epoch = checkpoint['epoch'] best_prec1 = checkpoint['best_prec1'] model.load_state_dict(checkpoint['state_dict']) print("=> loaded checkpoint '{}' (epoch {})" .format(args.resume, checkpoint['epoch'])) else: print("=> no checkpoint found at '{}'".format(args.resume)) cudnn.benchmark = True # Data loading code traindir = os.path.join(args.data, 'train') valdir = os.path.join(args.data, 'val') normalize = transforms.Normalize(mean=model.mean, std=model.std) # train_loader = torch.utils.data.DataLoader( # datasets.ImageFolder(traindir, transforms.Compose([ # transforms.RandomSizedCrop(max(model.input_size)), # transforms.RandomHorizontalFlip(), # transforms.ToTensor(), # normalize, # ])), # batch_size=args.batch_size, shuffle=True, # num_workers=args.workers, pin_memory=True) print('Images transformed from size {} to {}'.format( round(max(model.input_size)*1.050), model.input_size)) val_loader = torch.utils.data.DataLoader( datasets.ImageFolder(valdir, transforms.Compose([ transforms.Scale(round(max(model.input_size)*1.050)), transforms.CenterCrop(max(model.input_size)), transforms.ToTensor(), ToSpaceBGR(model.input_space=='BGR'), ToRange255(max(model.input_range)==255), normalize, ])), batch_size=args.batch_size, shuffle=False, num_workers=args.workers, pin_memory=True) # define loss function (criterion) and optimizer criterion = nn.CrossEntropyLoss().cuda() optimizer = torch.optim.SGD(model.parameters(), args.lr, momentum=args.momentum, weight_decay=args.weight_decay) model = torch.nn.DataParallel(model).cuda() if args.evaluate: validate(val_loader, model, criterion) return for epoch in range(args.start_epoch, args.epochs): adjust_learning_rate(optimizer, epoch) # train for one epoch train(train_loader, model, criterion, optimizer, epoch) # evaluate on validation set prec1 = validate(val_loader, model, criterion) # remember best prec@1 and save checkpoint is_best = prec1 > best_prec1 best_prec1 = max(prec1, best_prec1) save_checkpoint({ 'epoch': epoch + 1, 'arch': args.arch, 'state_dict': model.state_dict(), 'best_prec1': best_prec1, }, is_best) def train(train_loader, model, criterion, optimizer, epoch): batch_time = AverageMeter() data_time = AverageMeter() losses = AverageMeter() top1 = AverageMeter() top5 = AverageMeter() # switch to train mode model.train() end = time.time() for i, (input, target) in enumerate(train_loader): # measure data loading time data_time.update(time.time() - end) target = target.cuda(async=True) input_var = torch.autograd.Variable(input) target_var = torch.autograd.Variable(target) # compute output output = model(input_var) loss = criterion(output, target_var) # measure accuracy and record loss prec1, prec5 = accuracy(output.data, target, topk=(1, 5)) losses.update(loss.data[0], input.size(0)) top1.update(prec1[0], input.size(0)) top5.update(prec5[0], input.size(0)) # compute gradient and do SGD step optimizer.zero_grad() loss.backward() optimizer.step() # measure elapsed time batch_time.update(time.time() - end) end = time.time() if i % args.print_freq == 0: print('Epoch: [{0}][{1}/{2}]\t' 'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t' 'Data {data_time.val:.3f} ({data_time.avg:.3f})\t' 'Loss {loss.val:.4f} ({loss.avg:.4f})\t' 'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t' 'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format( epoch, i, len(train_loader), batch_time=batch_time, data_time=data_time, loss=losses, top1=top1, top5=top5)) def validate(val_loader, model, criterion): batch_time = AverageMeter() losses = AverageMeter() top1 = AverageMeter() top5 = AverageMeter() # switch to evaluate mode model.eval() end = time.time() for i, (input, target) in enumerate(val_loader): target = target.cuda(async=True) input_var = torch.autograd.Variable(input, volatile=True) target_var = torch.autograd.Variable(target, volatile=True) # compute output output = model(input_var) loss = criterion(output, target_var) # measure accuracy and record loss prec1, prec5 = accuracy(output.data, target, topk=(1, 5)) losses.update(loss.data[0], input.size(0)) top1.update(prec1[0], input.size(0)) top5.update(prec5[0], input.size(0)) # measure elapsed time batch_time.update(time.time() - end) end = time.time() if i % args.print_freq == 0: print('Test: [{0}/{1}]\t' 'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t' 'Loss {loss.val:.4f} ({loss.avg:.4f})\t' 'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t' 'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format( i, len(val_loader), batch_time=batch_time, loss=losses, top1=top1, top5=top5)) print(' * Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f}' .format(top1=top1, top5=top5)) return top1.avg def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'): torch.save(state, filename) if is_best: shutil.copyfile(filename, 'model_best.pth.tar') class AverageMeter(object): """Computes and stores the average and current value""" def __init__(self): self.reset() def reset(self): self.val = 0 self.avg = 0 self.sum = 0 self.count = 0 def update(self, val, n=1): self.val = val self.sum += val * n self.count += n self.avg = self.sum / self.count def adjust_learning_rate(optimizer, epoch): """Sets the learning rate to the initial LR decayed by 10 every 30 epochs""" lr = args.lr * (0.1 ** (epoch // 30)) for param_group in optimizer.param_groups: param_group['lr'] = lr def accuracy(output, target, topk=(1,)): """Computes the precision@k for the specified values of k""" maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0) res.append(correct_k.mul_(100.0 / batch_size)) return res if __name__ == '__main__': main() ================================================ FILE: pretrained_models_pytorch/test/toy-example.py ================================================ import argparse from PIL import Image import torch import torchvision.transforms as transforms import sys sys.path.append('../pretrained-models.pytorch') import pretrainedmodels model_names = sorted(name for name in pretrainedmodels.__dict__ if not name.startswith("__") and callable(pretrainedmodels.__dict__[name])) parser = argparse.ArgumentParser(description='PyTorch ImageNet Training') parser.add_argument('--arch', '-a', metavar='ARCH', default='fbresnet152', choices=model_names, help='model architecture: ' + ' | '.join(model_names) + ' (default: fbresnet152)') args = parser.parse_args() # Load Model model = pretrainedmodels.__dict__[args.arch](num_classes=1000, pretrained='imagenet') model.eval() # Load One Input Image path_img = 'data/ILSVRC2012_val_00002147.JPEG' with open(path_img, 'rb') as f: with Image.open(f) as img: input_data = img.convert(model.input_space) tf = transforms.Compose([ transforms.Scale(round(max(model.input_size)*1.143)), transforms.CenterCrop(max(model.input_size)), transforms.ToTensor(), transforms.Normalize(mean=model.mean, std=model.std) ]) input_data = tf(input_data) # 3x400x225 -> 3x299x299 input_data = input_data.unsqueeze(0) # 3x299x299 -> 1x3x299x299 input = torch.autograd.Variable(input_data) print(input) exit() # Load Imagenet Synsets with open('data/imagenet_synsets.txt', 'r') as f: synsets = f.readlines() # len(synsets)==1001 # sysnets[0] == background synsets = [x.strip() for x in synsets] splits = [line.split(' ') for line in synsets] key_to_classname = {spl[0]:' '.join(spl[1:]) for spl in splits} with open('data/imagenet_classes.txt', 'r') as f: class_id_to_key = f.readlines() class_id_to_key = [x.strip() for x in class_id_to_key] # Make predictions output = model(input) # size(1, 1000) max, argmax = output.data.squeeze().max(0) class_id = argmax[0] class_key = class_id_to_key[class_id] classname = key_to_classname[class_key] print(path_img, 'is a', classname) ================================================ FILE: utils.py ================================================ import os import sys import time import math import numpy as np import torch import torch.nn as nn import torch.nn.init as init from torch.autograd import Variable from scipy.ndimage.interpolation import rotate _, term_width = os.popen('stty size', 'r').read().split() term_width = int(term_width) TOTAL_BAR_LENGTH = 35. last_time = time.time() begin_time = last_time def progress_bar(current, total, msg=None): global last_time, begin_time if current == 0: begin_time = time.time() # Reset for new bar. cur_len = int(TOTAL_BAR_LENGTH*current/total) rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1 sys.stdout.write(' [') for i in range(cur_len): sys.stdout.write('=') sys.stdout.write('>') for i in range(rest_len): sys.stdout.write('.') sys.stdout.write(']') cur_time = time.time() step_time = cur_time - last_time last_time = cur_time tot_time = cur_time - begin_time L = [] if msg: L.append(' ' + msg) L.append(' | Step: %s' % format_time(step_time)) L.append(' | Tot: %s' % format_time(tot_time)) msg = ''.join(L) sys.stdout.write(msg) for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3): sys.stdout.write(' ') # Go back to the center of the bar. for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2): sys.stdout.write('\b') sys.stdout.write(' %d/%d ' % (current+1, total)) if current < total-1: sys.stdout.write('\r') else: sys.stdout.write('\n') sys.stdout.flush() def format_time(seconds): days = int(seconds / 3600/24) seconds = seconds - days*3600*24 hours = int(seconds / 3600) seconds = seconds - hours*3600 minutes = int(seconds / 60) seconds = seconds - minutes*60 secondsf = int(seconds) seconds = seconds - secondsf millis = int(seconds*1000) f = '' i = 1 if days > 0: f += str(days) + 'D' i += 1 if hours > 0 and i <= 2: f += str(hours) + 'h' i += 1 if minutes > 0 and i <= 2: f += str(minutes) + 'm' i += 1 if secondsf > 0 and i <= 2: f += str(secondsf) + 's' i += 1 if millis > 0 and i <= 2: f += str(millis) + 'ms' i += 1 if f == '': f = '0ms' return f def submatrix(arr): x, y = np.nonzero(arr) # Using the smallest and largest x and y indices of nonzero elements, # we can find the desired rectangular bounds. # And don't forget to add 1 to the top bound to avoid the fencepost problem. return arr[x.min():x.max()+1, y.min():y.max()+1] class ToSpaceBGR(object): def __init__(self, is_bgr): self.is_bgr = is_bgr def __call__(self, tensor): if self.is_bgr: new_tensor = tensor.clone() new_tensor[0] = tensor[2] new_tensor[2] = tensor[0] tensor = new_tensor return tensor class ToRange255(object): def __init__(self, is_255): self.is_255 = is_255 def __call__(self, tensor): if self.is_255: tensor.mul_(255) return tensor def init_patch_circle(image_size, patch_size): image_size = image_size**2 noise_size = int(image_size*patch_size) radius = int(math.sqrt(noise_size/math.pi)) patch = np.zeros((1, 3, radius*2, radius*2)) for i in range(3): a = np.zeros((radius*2, radius*2)) cx, cy = radius, radius # The center of circle y, x = np.ogrid[-radius: radius, -radius: radius] index = x**2 + y**2 <= radius**2 a[cy-radius:cy+radius, cx-radius:cx+radius][index] = np.random.rand() idx = np.flatnonzero((a == 0).all((1))) a = np.delete(a, idx, axis=0) patch[0][i] = np.delete(a, idx, axis=1) return patch, patch.shape def circle_transform(patch, data_shape, patch_shape, image_size): # get dummy image x = np.zeros(data_shape) # get shape m_size = patch_shape[-1] for i in range(x.shape[0]): # random rotation rot = np.random.choice(360) for j in range(patch[i].shape[0]): patch[i][j] = rotate(patch[i][j], angle=rot, reshape=False) # random location random_x = np.random.choice(image_size) if random_x + m_size > x.shape[-1]: while random_x + m_size > x.shape[-1]: random_x = np.random.choice(image_size) random_y = np.random.choice(image_size) if random_y + m_size > x.shape[-1]: while random_y + m_size > x.shape[-1]: random_y = np.random.choice(image_size) # apply patch to dummy image x[i][0][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][0] x[i][1][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][1] x[i][2][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][2] mask = np.copy(x) mask[mask != 0] = 1.0 return x, mask, patch.shape def init_patch_square(image_size, patch_size): # get mask image_size = image_size**2 noise_size = image_size*patch_size noise_dim = int(noise_size**(0.5)) patch = np.random.rand(1,3,noise_dim,noise_dim) return patch, patch.shape def square_transform(patch, data_shape, patch_shape, image_size): # get dummy image x = np.zeros(data_shape) # get shape m_size = patch_shape[-1] for i in range(x.shape[0]): # random rotation rot = np.random.choice(4) for j in range(patch[i].shape[0]): patch[i][j] = np.rot90(patch[i][j], rot) # random location random_x = np.random.choice(image_size) if random_x + m_size > x.shape[-1]: while random_x + m_size > x.shape[-1]: random_x = np.random.choice(image_size) random_y = np.random.choice(image_size) if random_y + m_size > x.shape[-1]: while random_y + m_size > x.shape[-1]: random_y = np.random.choice(image_size) # apply patch to dummy image x[i][0][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][0] x[i][1][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][1] x[i][2][random_x:random_x+patch_shape[-1], random_y:random_y+patch_shape[-1]] = patch[i][2] mask = np.copy(x) mask[mask != 0] = 1.0 return x, mask