Full Code of daiquocnguyen/ConvKB for AI

master 0befa12fad55 cached
107 files
151.3 MB
8.0M tokens
144 symbols
1 requests
Copy disabled (too large) Download .txt
Showing preview only (31,832K chars total). Download the full file to get everything.
Repository: daiquocnguyen/ConvKB
Branch: master
Commit: 0befa12fad55
Files: 107
Total size: 151.3 MB

Directory structure:
gitextract_2ysrl9k6/

├── ConvKB_pytorch/
│   ├── Config.py
│   ├── ConvKB.py
│   ├── ConvKB_1D.py
│   ├── Model.py
│   ├── base/
│   │   ├── Base.cpp
│   │   ├── Corrupt.h
│   │   ├── Random.h
│   │   ├── Reader.h
│   │   ├── Setting.h
│   │   ├── Test.h
│   │   ├── Triple.h
│   │   └── Valid.h
│   ├── benchmarks/
│   │   ├── FB15K/
│   │   │   ├── 1-1.txt
│   │   │   ├── 1-n.txt
│   │   │   ├── entity2id.txt
│   │   │   ├── n-1.txt
│   │   │   ├── n-n.py
│   │   │   ├── n-n.txt
│   │   │   ├── relation2id.txt
│   │   │   ├── test2id.txt
│   │   │   ├── test2id_all.txt
│   │   │   ├── train2id.txt
│   │   │   ├── type_constrain.txt
│   │   │   └── valid2id.txt
│   │   ├── FB15K237/
│   │   │   ├── 1-1.txt
│   │   │   ├── 1-n.txt
│   │   │   ├── entity2id.txt
│   │   │   ├── entity2id_100init.txt
│   │   │   ├── entity2vec100.init
│   │   │   ├── n-1.txt
│   │   │   ├── n-n.py
│   │   │   ├── n-n.txt
│   │   │   ├── relation2id.txt
│   │   │   ├── relation2id_100init.txt
│   │   │   ├── relation2vec100.init
│   │   │   ├── test2id.txt
│   │   │   ├── test2id_all.txt
│   │   │   ├── train2id.txt
│   │   │   ├── type_constrain.txt
│   │   │   └── valid2id.txt
│   │   ├── WN18/
│   │   │   ├── 1-1.txt
│   │   │   ├── 1-n.txt
│   │   │   ├── entity2id.txt
│   │   │   ├── n-1.txt
│   │   │   ├── n-n.py
│   │   │   ├── n-n.txt
│   │   │   ├── relation2id.txt
│   │   │   ├── test2id.txt
│   │   │   ├── test2id_all.txt
│   │   │   ├── train2id.txt
│   │   │   ├── type_constrain.txt
│   │   │   └── valid2id.txt
│   │   └── WN18RR/
│   │       ├── 1-1.txt
│   │       ├── 1-n.txt
│   │       ├── _also_see.txt
│   │       ├── _derivationally_related_form.txt
│   │       ├── _has_part.txt
│   │       ├── _hypernym.txt
│   │       ├── _instance_hypernym.txt
│   │       ├── _member_meronym.txt
│   │       ├── _member_of_domain_region.txt
│   │       ├── _member_of_domain_usage.txt
│   │       ├── _similar_to.txt
│   │       ├── _synset_domain_topic_of.txt
│   │       ├── _verb_group.txt
│   │       ├── entity2id.txt
│   │       ├── entity2id_50init.txt
│   │       ├── entity2vec50.init
│   │       ├── n-1.txt
│   │       ├── n-n.py
│   │       ├── n-n.txt
│   │       ├── relation2id.txt
│   │       ├── relation2id_50init.txt
│   │       ├── relation2vec50.init
│   │       ├── test2id.txt
│   │       ├── test2id_all.txt
│   │       ├── train2id.txt
│   │       ├── type_constrain.txt
│   │       └── valid2id.txt
│   ├── make.sh
│   └── train_ConvKB.py
├── ConvKB_tf/
│   ├── batching.py
│   ├── builddata.py
│   ├── data/
│   │   ├── FB15k-237/
│   │   │   ├── entity2id.txt
│   │   │   ├── entity2vec100.init
│   │   │   ├── relation2id.txt
│   │   │   ├── relation2vec100.init
│   │   │   ├── test.txt
│   │   │   ├── train.txt
│   │   │   └── valid.txt
│   │   └── WN18RR/
│   │       ├── entity2id.txt
│   │       ├── entity2vec100.init
│   │       ├── entity2vec50.init
│   │       ├── relation2id.txt
│   │       ├── relation2vec100.init
│   │       ├── relation2vec50.init
│   │       ├── test.txt
│   │       ├── train.txt
│   │       └── valid.txt
│   ├── eval.py
│   ├── evalFB15k-237.sh
│   ├── evalWN18RR.sh
│   ├── model.py
│   ├── train.py
│   └── training_commands.txt
├── LICENSE
└── README.md

================================================
FILE CONTENTS
================================================

================================================
FILE: ConvKB_pytorch/Config.py
================================================
# coding:utf-8
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
import os
import time
import sys
import datetime
import ctypes
import json
import numpy as np
from tqdm import tqdm

use_gpu=False
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
if torch.cuda.is_available():
    use_gpu=True

class MyDataParallel(nn.DataParallel):
    def _getattr__(self, name):
        return getattr(self.module, name)
  
            
def to_var(x):
    return Variable(torch.from_numpy(x).to(device))


class Config(object):
    def __init__(self):
        base_file = os.path.abspath(
            os.path.join(os.path.dirname(__file__), "release/Base.so")
        )
        self.lib = ctypes.cdll.LoadLibrary(base_file)
        """argtypes"""
        """'sample"""
        self.lib.sampling.argtypes = [
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_int64,
            ctypes.c_int64,
            ctypes.c_int64,
        ]
        """'valid"""
        self.lib.getValidHeadBatch.argtypes = [
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
        ]
        self.lib.getValidTailBatch.argtypes = [
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
        ]
        self.lib.validHead.argtypes = [ctypes.c_void_p]
        self.lib.validTail.argtypes = [ctypes.c_void_p]
        """test link prediction"""
        self.lib.getHeadBatch.argtypes = [
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
        ]
        self.lib.getTailBatch.argtypes = [
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
        ]
        self.lib.testHead.argtypes = [ctypes.c_void_p]
        self.lib.testTail.argtypes = [ctypes.c_void_p]
        """test triple classification"""
        self.lib.getValidBatch.argtypes = [
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
        ]
        self.lib.getTestBatch.argtypes = [
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
        ]
        self.lib.getBestThreshold.argtypes = [
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
        ]
        self.lib.test_triple_classification.argtypes = [
            ctypes.c_void_p,
            ctypes.c_void_p,
            ctypes.c_void_p,
        ]
        """restype"""
        self.lib.getValidHit10.restype = ctypes.c_float

        # for triple classification
        self.lib.test_triple_classification.restype = ctypes.c_float
        """set essential parameters"""

        self.in_path = "./"
        self.batch_size = 100
        self.bern = 0
        self.work_threads = 8
        self.hidden_size = 100
        self.negative_ent = 1
        self.negative_rel = 0
        self.ent_size = self.hidden_size
        self.rel_size = self.hidden_size
        self.margin = 1.0
        self.valid_steps = 5
        self.save_steps = 5
        self.opt_method = "SGD"
        self.optimizer = None
        self.lr_decay = 0
        self.weight_decay = 0
        self.lmbda = 0.0
        self.lmbda_two = 0.0
        self.alpah = 0.001
        self.early_stopping_patience = 10
        self.nbatches = 100
        self.p_norm = 1
        self.test_link = True
        self.test_triple = False
        self.model = None
        self.trainModel = None
        self.testModel = None
        self.pretrain_model = None
        self.ent_dropout = 0
        self.rel_dropout = 0
        self.use_init_embeddings = False
        self.test_file_path = None

    def init(self):
        self.lib.setInPath(
            ctypes.create_string_buffer(self.in_path.encode(), len(self.in_path) * 2)
        )

        self.lib.setTestFilePath(
            ctypes.create_string_buffer(self.test_file_path.encode(), len(self.test_file_path) * 2)
        )

        self.lib.setBern(self.bern)
        self.lib.setWorkThreads(self.work_threads)
        self.lib.randReset()
        self.lib.importTrainFiles()
        self.lib.importTestFiles()
        self.lib.importTypeFiles()
        self.relTotal = self.lib.getRelationTotal()
        self.entTotal = self.lib.getEntityTotal()
        self.trainTotal = self.lib.getTrainTotal()
        self.testTotal = self.lib.getTestTotal()
        self.validTotal = self.lib.getValidTotal()

        self.batch_size = int(self.trainTotal / self.nbatches)
        self.batch_seq_size = self.batch_size * (
            1 + self.negative_ent + self.negative_rel
        )
        self.batch_h = np.zeros(self.batch_seq_size, dtype=np.int64)
        self.batch_t = np.zeros(self.batch_seq_size, dtype=np.int64)
        self.batch_r = np.zeros(self.batch_seq_size, dtype=np.int64)
        self.batch_y = np.zeros(self.batch_seq_size, dtype=np.float32)
        self.batch_h_addr = self.batch_h.__array_interface__["data"][0]
        self.batch_t_addr = self.batch_t.__array_interface__["data"][0]
        self.batch_r_addr = self.batch_r.__array_interface__["data"][0]
        self.batch_y_addr = self.batch_y.__array_interface__["data"][0]

        self.valid_h = np.zeros(self.entTotal, dtype=np.int64)
        self.valid_t = np.zeros(self.entTotal, dtype=np.int64)
        self.valid_r = np.zeros(self.entTotal, dtype=np.int64)
        self.valid_h_addr = self.valid_h.__array_interface__["data"][0]
        self.valid_t_addr = self.valid_t.__array_interface__["data"][0]
        self.valid_r_addr = self.valid_r.__array_interface__["data"][0]

        self.test_h = np.zeros(self.entTotal, dtype=np.int64)
        self.test_t = np.zeros(self.entTotal, dtype=np.int64)
        self.test_r = np.zeros(self.entTotal, dtype=np.int64)
        self.test_h_addr = self.test_h.__array_interface__["data"][0]
        self.test_t_addr = self.test_t.__array_interface__["data"][0]
        self.test_r_addr = self.test_r.__array_interface__["data"][0]

        self.valid_pos_h = np.zeros(self.validTotal, dtype=np.int64)
        self.valid_pos_t = np.zeros(self.validTotal, dtype=np.int64)
        self.valid_pos_r = np.zeros(self.validTotal, dtype=np.int64)
        self.valid_pos_h_addr = self.valid_pos_h.__array_interface__["data"][0]
        self.valid_pos_t_addr = self.valid_pos_t.__array_interface__["data"][0]
        self.valid_pos_r_addr = self.valid_pos_r.__array_interface__["data"][0]
        self.valid_neg_h = np.zeros(self.validTotal, dtype=np.int64)
        self.valid_neg_t = np.zeros(self.validTotal, dtype=np.int64)
        self.valid_neg_r = np.zeros(self.validTotal, dtype=np.int64)
        self.valid_neg_h_addr = self.valid_neg_h.__array_interface__["data"][0]
        self.valid_neg_t_addr = self.valid_neg_t.__array_interface__["data"][0]
        self.valid_neg_r_addr = self.valid_neg_r.__array_interface__["data"][0]

        self.test_pos_h = np.zeros(self.testTotal, dtype=np.int64)
        self.test_pos_t = np.zeros(self.testTotal, dtype=np.int64)
        self.test_pos_r = np.zeros(self.testTotal, dtype=np.int64)
        self.test_pos_h_addr = self.test_pos_h.__array_interface__["data"][0]
        self.test_pos_t_addr = self.test_pos_t.__array_interface__["data"][0]
        self.test_pos_r_addr = self.test_pos_r.__array_interface__["data"][0]
        self.test_neg_h = np.zeros(self.testTotal, dtype=np.int64)
        self.test_neg_t = np.zeros(self.testTotal, dtype=np.int64)
        self.test_neg_r = np.zeros(self.testTotal, dtype=np.int64)
        self.test_neg_h_addr = self.test_neg_h.__array_interface__["data"][0]
        self.test_neg_t_addr = self.test_neg_t.__array_interface__["data"][0]
        self.test_neg_r_addr = self.test_neg_r.__array_interface__["data"][0]
        self.relThresh = np.zeros(self.relTotal, dtype=np.float32)
        self.relThresh_addr = self.relThresh.__array_interface__["data"][0]

    def set_test_link(self, test_link):
        self.test_link = test_link

    def set_test_triple(self, test_triple):
        self.test_triple = test_triple

    def set_margin(self, margin):
        self.margin = margin

    def set_in_path(self, in_path):
        self.in_path = in_path

    def set_test_file_path(self, test_file_path):
        self.test_file_path = test_file_path

    def set_nbatches(self, nbatches):
        self.nbatches = nbatches

    def set_p_norm(self, p_norm):
        self.p_norm = p_norm

    def set_valid_steps(self, valid_steps):
        self.valid_steps = valid_steps

    def set_save_steps(self, save_steps):
        self.save_steps = save_steps

    def set_checkpoint_dir(self, checkpoint_dir):
        self.checkpoint_dir = checkpoint_dir

    def set_result_dir(self, result_dir):
        self.result_dir = result_dir

    def set_alpha(self, alpha):
        self.alpha = alpha

    def set_lmbda(self, lmbda):
        self.lmbda = lmbda
        
    def set_lmbda_two(self, lmbda_two):
        self.lmbda_two = lmbda_two

    def set_lr_decay(self, lr_decay):
        self.lr_decay = lr_decay

    def set_weight_decay(self, weight_decay):
        self.weight_decay = weight_decay

    def set_opt_method(self, opt_method):
        self.opt_method = opt_method

    def set_bern(self, bern):
        self.bern = bern

    def set_init_embeddings(self, entity_embs, rel_embs):
        self.use_init_embeddings = True
        self.init_ent_embs = torch.from_numpy(entity_embs).to(device)
        self.init_rel_embs = torch.from_numpy(rel_embs).to(device)

    def set_config_CNN(self, num_of_filters, drop_prob, kernel_size=1):
        self.out_channels = num_of_filters
        self.convkb_drop_prob = drop_prob
        self.kernel_size = kernel_size

    def set_dimension(self, dim):
        self.hidden_size = dim
        self.ent_size = dim
        self.rel_size = dim

    def set_ent_dimension(self, dim):
        self.ent_size = dim

    def set_rel_dimension(self, dim):
        self.rel_size = dim

    def set_train_times(self, train_times):
        self.train_times = train_times

    def set_work_threads(self, work_threads):
        self.work_threads = work_threads

    def set_ent_neg_rate(self, rate):
        self.negative_ent = rate

    def set_rel_neg_rate(self, rate):
        self.negative_rel = rate

    def set_ent_dropout(self, ent_dropout):
        self.ent_dropout = ent_dropout

    def set_rel_dropout(self, rel_dropout):
        self.rel_dropout = rel_dropout
        
    def set_early_stopping_patience(self, early_stopping_patience):
        self.early_stopping_patience = early_stopping_patience

    def set_pretrain_model(self, pretrain_model):
        self.pretrain_model = pretrain_model

    def get_parameters(self, param_dict, mode="numpy"):
        for param in param_dict:
            param_dict[param] = param_dict[param].cpu()
        res = {}
        for param in param_dict:
            if mode == "numpy":
                res[param] = param_dict[param].numpy()
            elif mode == "list":
                res[param] = param_dict[param].numpy().tolist()
            else:
                res[param] = param_dict[param]
        return res

    def save_embedding_matrix(self, best_model):
        path = os.path.join(self.result_dir, self.model.__name__ + ".json")
        f = open(path, "w")
        f.write(json.dumps(self.get_parameters(best_model, "list")))
        f.close()

    def set_train_model(self, model):
        print("Initializing training model...")
        self.model = model
        self.trainModel = self.model(config=self)
        #self.trainModel = nn.DataParallel(self.trainModel, device_ids=[2,3,4])
        
        self.trainModel.to(device)
        if self.optimizer != None:
            pass
        elif self.opt_method == "Adagrad" or self.opt_method == "adagrad":
            self.optimizer = optim.Adagrad(
                self.trainModel.parameters(),
                lr=self.alpha,
                lr_decay=self.lr_decay,
                weight_decay=self.weight_decay,
            )
        elif self.opt_method == "Adadelta" or self.opt_method == "adadelta":
            self.optimizer = optim.Adadelta(
                self.trainModel.parameters(),
                lr=self.alpha,
                weight_decay=self.weight_decay,
            )
        elif self.opt_method == "Adam" or self.opt_method == "adam":
            self.optimizer = optim.Adam(
                self.trainModel.parameters(),
                lr=self.alpha,
                weight_decay=self.weight_decay,
            )
        else:
            self.optimizer = optim.SGD(
                self.trainModel.parameters(),
                lr=self.alpha,
                weight_decay=self.weight_decay,
            )
        print("Finish initializing")

    def set_test_model(self, model, path=None):
        print("Initializing test model...")
        self.model = model
        self.testModel = self.model(config=self)
        if path == None:
            path = os.path.join(self.result_dir, self.model.__name__ + ".ckpt")
        self.testModel.load_state_dict(torch.load(path))
        self.testModel.to(device)
        self.testModel.eval()
        print("Finish initializing")

    def sampling(self):
        self.lib.sampling(
            self.batch_h_addr,
            self.batch_t_addr,
            self.batch_r_addr,
            self.batch_y_addr,
            self.batch_size,
            self.negative_ent,
            self.negative_rel,
        )

    def save_checkpoint(self, model, epoch):
        path = os.path.join(
            self.checkpoint_dir, self.model.__name__ + "-" + str(epoch) + ".ckpt"
        )
        torch.save(model, path)

    def save_best_checkpoint(self, best_model):
        path = os.path.join(self.result_dir, self.model.__name__ + ".ckpt")
        torch.save(best_model, path)

    def train_one_step(self):
        self.trainModel.train()
        self.trainModel.batch_h = to_var(self.batch_h)
        self.trainModel.batch_t = to_var(self.batch_t)
        self.trainModel.batch_r = to_var(self.batch_r)
        self.trainModel.batch_y = to_var(self.batch_y)
        
        self.optimizer.zero_grad()
        loss = self.trainModel()
        loss.backward()
        torch.nn.utils.clip_grad_norm_(self.trainModel.parameters(), 0.5)
        self.optimizer.step()
        
        return loss.item()

    def test_one_step(self, model, test_h, test_t, test_r):
        model.eval()
        with torch.no_grad():
            model.batch_h = to_var(test_h)
            model.batch_t = to_var(test_t)
            model.batch_r = to_var(test_r)
        return model.predict()

    def valid(self, model):
        self.lib.validInit()
        for i in range(self.validTotal):
            sys.stdout.write("%d\r" % (i))
            sys.stdout.flush()
            self.lib.getValidHeadBatch(
                self.valid_h_addr, self.valid_t_addr, self.valid_r_addr
            )
            res = self.test_one_step(model, self.valid_h, self.valid_t, self.valid_r)

            self.lib.validHead(res.__array_interface__["data"][0])

            self.lib.getValidTailBatch(
                self.valid_h_addr, self.valid_t_addr, self.valid_r_addr
            )
            res = self.test_one_step(model, self.valid_h, self.valid_t, self.valid_r)
            self.lib.validTail(res.__array_interface__["data"][0])
        return self.lib.getValidHit10()


    def training_model(self):
        if not os.path.exists(self.checkpoint_dir):
            os.mkdir(self.checkpoint_dir)
        best_epoch = 0
        best_hit10 = 0.0
        best_model = None
        bad_counts = 0
        training_range = tqdm(range(self.train_times))
        for epoch in training_range:
            res = 0.0
            for batch in range(self.nbatches):
                self.sampling()
                loss = self.train_one_step()
                res += loss
            training_range.set_description("Epoch %d | loss: %f" % (epoch, res))
            # print("Epoch %d | loss: %f" % (epoch, res))
            if (epoch + 1) % self.save_steps == 0:
                training_range.set_description("Epoch %d has finished, saving..." % (epoch))
                self.save_checkpoint(self.trainModel.state_dict(), epoch)
            if (epoch + 1) % self.valid_steps == 0:
                training_range.set_description("Epoch %d has finished | loss: %f, validating..." % (epoch, res))
                hit10 = self.valid(self.trainModel)
                if hit10 > best_hit10:
                    best_hit10 = hit10
                    best_epoch = epoch
                    best_model = self.trainModel.state_dict()
                    print("Best model | hit@10 of valid set is %f" % (best_hit10))
                    bad_counts = 0
                else:
                    print("Hit@10 of valid set is %f | bad count is %d" % (hit10, bad_counts))
                    bad_counts += 1
                if bad_counts == self.early_stopping_patience:
                    print("Early stopping at epoch %d" % (epoch))
                    break
        if best_model == None:
            best_model = self.trainModel.state_dict()
            best_epoch = self.train_times - 1
            best_hit10 = self.valid(self.trainModel)
        print("Best epoch is %d | hit@10 of valid set is %f" % (best_epoch, best_hit10))
        print("Store checkpoint of best result at epoch %d..." % (best_epoch))
        if not os.path.isdir(self.result_dir):
            os.mkdir(self.result_dir)
        self.save_best_checkpoint(best_model)
        self.save_embedding_matrix(best_model)
        print("Finish storing")
        print("Testing...")
        self.set_test_model(self.model)
        self.test()
        print("Finish test")
        return best_model

    def valid_triple_classification(self, model):
        self.lib.getValidBatch(
            self.valid_pos_h_addr,
            self.valid_pos_t_addr,
            self.valid_pos_r_addr,
            self.valid_neg_h_addr,
            self.valid_neg_t_addr,
            self.valid_neg_r_addr,
        )
        res_pos = self.test_one_step(
            model, self.valid_pos_h, self.valid_pos_t, self.valid_pos_r
        )
        res_neg = self.test_one_step(
            model, self.valid_neg_h, self.valid_neg_t, self.valid_neg_r
        )
        self.lib.getBestThreshold(
            self.relThresh_addr,
            res_pos.__array_interface__["data"][0],
            res_neg.__array_interface__["data"][0],
        )

        return self.lib.test_triple_classification(
            self.relThresh_addr,
            res_pos.__array_interface__["data"][0],
            res_neg.__array_interface__["data"][0],
        )

    def training_triple_classification(self):
        if not os.path.exists(self.checkpoint_dir):
            os.mkdir(self.checkpoint_dir)
        best_epoch = 0
        best_acc = 0.0
        best_model = None
        bad_counts = 0
        training_range = tqdm(range(self.train_times))
        for epoch in training_range:
            res = 0.0
            for batch in range(self.nbatches):
                self.sampling()
                loss = self.train_one_step()
                res += loss
            training_range.set_description("Epoch %d | loss: %f" % (epoch, res))
            if (epoch + 1) % self.save_steps == 0:
                training_range.set_description("Epoch %d has finished, saving..." % (epoch))
                self.save_checkpoint(self.trainModel.state_dict(), epoch)
            if (epoch + 1) % self.valid_steps == 0:
                training_range.set_description("Epoch %d has finished | loss: %f, validating..." % (epoch, res))
                acc = self.valid_triple_classification(self.trainModel)
                if acc > best_acc:
                    best_acc = acc
                    best_epoch = epoch
                    best_model = self.trainModel.state_dict()
                    print("Best model | Acc of valid set is %f" % (best_acc))
                    bad_counts = 0
                else:
                    print("Acc of valid set is %f | bad count is %d" % (acc, bad_counts))
                    bad_counts += 1
                if bad_counts == self.early_stopping_patience:
                    print("Early stopping at epoch %d" % (epoch))
                    break
        if best_model == None:
            best_model = self.trainModel.state_dict()
            best_epoch = self.train_times - 1
            best_acc = self.valid_triple_classification(self.trainModel)
        print("Best epoch is %d | Acc of valid set is %f" % (best_epoch, best_acc))
        print("Store checkpoint of best result at epoch %d..." % (best_epoch))
        if not os.path.isdir(self.result_dir):
            os.mkdir(self.result_dir)
        self.save_best_checkpoint(best_model)
        self.save_embedding_matrix(best_model)
        print("Finish storing")
        print("Testing...")
        self.set_test_model(self.model)
        self.test()
        print("Finish test")
        return best_model

    def link_prediction(self):
        print("The total of test triple is %d" % (self.testTotal))
        for i in range(self.testTotal):
            sys.stdout.write("%d\r" % (i))
            sys.stdout.flush()
            self.lib.getHeadBatch(self.test_h_addr, self.test_t_addr, self.test_r_addr)
            res = self.test_one_step(
                self.testModel, self.test_h, self.test_t, self.test_r
            )
            self.lib.testHead(res.__array_interface__["data"][0])

            self.lib.getTailBatch(self.test_h_addr, self.test_t_addr, self.test_r_addr)
            res = self.test_one_step(
                self.testModel, self.test_h, self.test_t, self.test_r
            )
            self.lib.testTail(res.__array_interface__["data"][0])
        self.lib.test_link_prediction()

    def triple_classification(self):
        self.lib.getValidBatch(
            self.valid_pos_h_addr,
            self.valid_pos_t_addr,
            self.valid_pos_r_addr,
            self.valid_neg_h_addr,
            self.valid_neg_t_addr,
            self.valid_neg_r_addr,
        )
        res_pos = self.test_one_step(
            self.testModel, self.valid_pos_h, self.valid_pos_t, self.valid_pos_r
        )
        res_neg = self.test_one_step(
            self.testModel, self.valid_neg_h, self.valid_neg_t, self.valid_neg_r
        )
        self.lib.getBestThreshold(
            self.relThresh_addr,
            res_pos.__array_interface__["data"][0],
            res_neg.__array_interface__["data"][0],
        )

        self.lib.getTestBatch(
            self.test_pos_h_addr,
            self.test_pos_t_addr,
            self.test_pos_r_addr,
            self.test_neg_h_addr,
            self.test_neg_t_addr,
            self.test_neg_r_addr,
        )
        res_pos = self.test_one_step(
            self.testModel, self.test_pos_h, self.test_pos_t, self.test_pos_r
        )
        res_neg = self.test_one_step(
            self.testModel, self.test_neg_h, self.test_neg_t, self.test_neg_r
        )
        self.lib.test_triple_classification(
            self.relThresh_addr,
            res_pos.__array_interface__["data"][0],
            res_neg.__array_interface__["data"][0],
        )

    def test(self):
        if self.test_link:
            self.link_prediction()
        if self.test_triple:
            self.triple_classification()


================================================
FILE: ConvKB_pytorch/ConvKB.py
================================================
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
import numpy as np
from Model import Model
from numpy.random import RandomState

torch.manual_seed(123)
if torch.cuda.is_available():
    torch.cuda.manual_seed_all(123)

class ConvKB(Model):

    def __init__(self, config):
        super(ConvKB, self).__init__(config)

        self.ent_embeddings = nn.Embedding(self.config.entTotal, self.config.hidden_size) 
        self.rel_embeddings = nn.Embedding(self.config.relTotal, self.config.hidden_size)

        self.conv1_bn = nn.BatchNorm2d(1)
        self.conv_layer = nn.Conv2d(1, self.config.out_channels, (self.config.kernel_size, 3))  # kernel size x 3
        self.conv2_bn = nn.BatchNorm2d(self.config.out_channels)
        self.dropout = nn.Dropout(self.config.convkb_drop_prob)
        self.non_linearity = nn.ReLU() # you should also tune with torch.tanh() or torch.nn.Tanh()
        self.fc_layer = nn.Linear((self.config.hidden_size - self.config.kernel_size + 1) * self.config.out_channels, 1, bias=False)

        self.criterion = nn.Softplus()
        self.init_parameters()

    def init_parameters(self):
        if self.config.use_init_embeddings == False:
            nn.init.xavier_uniform_(self.ent_embeddings.weight.data)
            nn.init.xavier_uniform_(self.rel_embeddings.weight.data)

        else:
            self.ent_embeddings.weight.data = self.config.init_ent_embs
            self.rel_embeddings.weight.data = self.config.init_rel_embs

        nn.init.xavier_uniform_(self.fc_layer.weight.data)
        nn.init.xavier_uniform_(self.conv_layer.weight.data)

    def _calc(self, h, r, t):
        h = h.unsqueeze(1) # bs x 1 x dim
        r = r.unsqueeze(1)
        t = t.unsqueeze(1)

        conv_input = torch.cat([h, r, t], 1)  # bs x 3 x dim
        conv_input = conv_input.transpose(1, 2)
        # To make tensor of size 4, where second dim is for input channels
        conv_input = conv_input.unsqueeze(1)
        conv_input = self.conv1_bn(conv_input)
        out_conv = self.conv_layer(conv_input)
        out_conv = self.conv2_bn(out_conv)
        out_conv = self.non_linearity(out_conv)
        out_conv = out_conv.view(-1, (self.config.hidden_size - self.config.kernel_size + 1) * self.config.out_channels)
        input_fc = self.dropout(out_conv)
        score = self.fc_layer(input_fc).view(-1)

        return -score

    def loss(self, score, regul):
        return torch.mean(self.criterion(score * self.batch_y)) + self.config.lmbda * regul

    def forward(self):
        h = self.ent_embeddings(self.batch_h)
        r = self.rel_embeddings(self.batch_r)
        t = self.ent_embeddings(self.batch_t)
        score = self._calc(h, r, t)

        # regularization
        l2_reg = torch.mean(h ** 2) + torch.mean(t ** 2) + torch.mean(r ** 2)
        for W in self.conv_layer.parameters():
            l2_reg = l2_reg + W.norm(2)
        for W in self.fc_layer.parameters():
            l2_reg = l2_reg + W.norm(2)

        return self.loss(score, l2_reg)

    def predict(self):

        h = self.ent_embeddings(self.batch_h)
        r = self.rel_embeddings(self.batch_r)
        t = self.ent_embeddings(self.batch_t)
        score = self._calc(h, r, t)

        return score.cpu().data.numpy()


================================================
FILE: ConvKB_pytorch/ConvKB_1D.py
================================================
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
import numpy as np
from Model import Model
from numpy.random import RandomState

torch.manual_seed(123)
if torch.cuda.is_available():
    torch.cuda.manual_seed_all(123)

class ConvKB(Model):

    def __init__(self, config):
        super(ConvKB, self).__init__(config)

        self.ent_embeddings = nn.Embedding(self.config.entTotal, self.config.hidden_size) 
        self.rel_embeddings = nn.Embedding(self.config.relTotal, self.config.hidden_size)

        self.conv1_bn = nn.BatchNorm1d(3)
        self.conv_layer = nn.Conv1d(3, self.config.out_channels, self.config.kernel_size)  # kernel size x 3
        self.conv2_bn = nn.BatchNorm1d(self.config.out_channels)
        self.dropout = nn.Dropout(self.config.convkb_drop_prob)
        self.non_linearity = nn.ReLU() # you should also tune with torch.tanh() or torch.nn.Tanh()
        self.fc_layer = nn.Linear((self.config.hidden_size - self.config.kernel_size + 1) * self.config.out_channels, 1, bias=False)

        self.criterion = nn.Softplus()
        self.init_parameters()

    def init_parameters(self):
        if self.config.use_init_embeddings == False:
            nn.init.xavier_uniform_(self.ent_embeddings.weight.data)
            nn.init.xavier_uniform_(self.rel_embeddings.weight.data)

        else:
            self.ent_embeddings.weight.data = self.config.init_ent_embs
            self.rel_embeddings.weight.data = self.config.init_rel_embs

        nn.init.xavier_uniform_(self.fc_layer.weight.data)
        nn.init.xavier_uniform_(self.conv_layer.weight.data)

    def _calc(self, h, r, t):
        h = h.unsqueeze(1) # bs x 1 x dim
        r = r.unsqueeze(1)
        t = t.unsqueeze(1)

        conv_input = torch.cat([h, r, t], 1)  # bs x 3 x dim
        conv_input = self.conv1_bn(conv_input)
        out_conv = self.conv_layer(conv_input)
        out_conv = self.conv2_bn(out_conv)
        out_conv = self.non_linearity(out_conv)
        out_conv = out_conv.view(-1, (self.config.hidden_size - self.config.kernel_size + 1) * self.config.out_channels)
        input_fc = self.dropout(out_conv)
        score = self.fc_layer(input_fc).view(-1)

        return -score

    def loss(self, score, regul):
        return torch.mean(self.criterion(score * self.batch_y)) + self.config.lmbda * regul

    def forward(self):
        h = self.ent_embeddings(self.batch_h)
        r = self.rel_embeddings(self.batch_r)
        t = self.ent_embeddings(self.batch_t)
        score = self._calc(h, r, t)

        # regularization
        l2_reg = torch.mean(h ** 2) + torch.mean(t ** 2) + torch.mean(r ** 2)
        for W in self.conv_layer.parameters():
            l2_reg = l2_reg + W.norm(2)
        for W in self.fc_layer.parameters():
            l2_reg = l2_reg + W.norm(2)

        return self.loss(score, l2_reg)

    def predict(self):

        h = self.ent_embeddings(self.batch_h)
        r = self.rel_embeddings(self.batch_r)
        t = self.ent_embeddings(self.batch_t)
        score = self._calc(h, r, t)

        return score.cpu().data.numpy()


================================================
FILE: ConvKB_pytorch/Model.py
================================================
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable

class Model(nn.Module):
	def __init__(self, config):
		super(Model, self).__init__()
		self.config = config
		self.batch_h = None
		self.batch_t = None
		self.batch_r = None
		self.batch_y = None

	def get_positive_score(self, score):
		return score[0:self.config.batch_size]

	def get_negative_score(self, score):
		negative_score = score[self.config.batch_size:self.config.batch_seq_size]
		negative_score = negative_score.view(-1, self.config.batch_size)
		negative_score = torch.mean(negative_score, 0)
		return negative_score
	
	def forward(self):
		raise NotImplementedError
	
	def predict(self):
		raise NotImplementedError	


================================================
FILE: ConvKB_pytorch/base/Base.cpp
================================================
#include "Setting.h"
#include "Random.h"
#include "Reader.h"
#include "Corrupt.h"
#include "Test.h"
#include "Valid.h"
#include <cstdlib>
#include <pthread.h>

extern "C"
void setInPath(char *path);

extern "C"
void setTestFilePath(char *path);

extern "C"
void setOutPath(char *path);

extern "C"
void setWorkThreads(INT threads);

extern "C"
void setBern(INT con);

extern "C"
INT getWorkThreads();

extern "C"
INT getEntityTotal();

extern "C"
INT getRelationTotal();

extern "C"
INT getTripleTotal();

extern "C"
INT getTrainTotal();

extern "C"
INT getTestTotal();

extern "C"
INT getValidTotal();

extern "C"
void randReset();

extern "C"
void importTrainFiles();

struct Parameter {
	INT id;
	INT *batch_h;
	INT *batch_t;
	INT *batch_r;
	REAL *batch_y;
	INT batchSize;
	INT negRate;
	INT negRelRate;
};

void* getBatch(void* con) {
	Parameter *para = (Parameter *)(con);
	INT id = para -> id;
	INT *batch_h = para -> batch_h;
	INT *batch_t = para -> batch_t;
	INT *batch_r = para -> batch_r;
	REAL *batch_y = para -> batch_y;
	INT batchSize = para -> batchSize;
	INT negRate = para -> negRate;
	INT negRelRate = para -> negRelRate;
	INT lef, rig;
	if (batchSize % workThreads == 0) {
		lef = id * (batchSize / workThreads);
		rig = (id + 1) * (batchSize / workThreads);
	} else {
		lef = id * (batchSize / workThreads + 1);
		rig = (id + 1) * (batchSize / workThreads + 1);
		if (rig > batchSize) rig = batchSize;
	}
	REAL prob = 500;
	for (INT batch = lef; batch < rig; batch++) {
		INT i = rand_max(id, trainTotal);
		batch_h[batch] = trainList[i].h;
		batch_t[batch] = trainList[i].t;
		batch_r[batch] = trainList[i].r;
		batch_y[batch] = 1;
		INT last = batchSize;
		for (INT times = 0; times < negRate; times ++) {
			if (bernFlag)
				prob = 1000 * right_mean[trainList[i].r] / (right_mean[trainList[i].r] + left_mean[trainList[i].r]);
			if (randd(id) % 1000 < prob) {
				batch_h[batch + last] = trainList[i].h;
				batch_t[batch + last] = corrupt_head(id, trainList[i].h, trainList[i].r);
				batch_r[batch + last] = trainList[i].r;
			} else {
				batch_h[batch + last] = corrupt_tail(id, trainList[i].t, trainList[i].r);;
				batch_t[batch + last] = trainList[i].t;
				batch_r[batch + last] = trainList[i].r;
			}
			batch_y[batch + last] = -1;
			last += batchSize;
		}
		for (INT times = 0; times < negRelRate; times++) {
			batch_h[batch + last] = trainList[i].h;
			batch_t[batch + last] = trainList[i].t;
			batch_r[batch + last] = corrupt_rel(id, trainList[i].h, trainList[i].t);
			batch_y[batch + last] = -1;
			last += batchSize;
		}
	}
	pthread_exit(NULL);
}

extern "C"
void sampling(INT *batch_h, INT *batch_t, INT *batch_r, REAL *batch_y, INT batchSize, INT negRate = 1, INT negRelRate = 0) {
	pthread_t *pt = (pthread_t *)malloc(workThreads * sizeof(pthread_t));
	Parameter *para = (Parameter *)malloc(workThreads * sizeof(Parameter));
	for (INT threads = 0; threads < workThreads; threads++) {
		para[threads].id = threads;
		para[threads].batch_h = batch_h;
		para[threads].batch_t = batch_t;
		para[threads].batch_r = batch_r;
		para[threads].batch_y = batch_y;
		para[threads].batchSize = batchSize;
		para[threads].negRate = negRate;
		para[threads].negRelRate = negRelRate;
		pthread_create(&pt[threads], NULL, getBatch, (void*)(para+threads));
	}
	for (INT threads = 0; threads < workThreads; threads++)
		pthread_join(pt[threads], NULL);
	free(pt);
	free(para);
}

int main() {
	importTrainFiles();
	return 0;
}


================================================
FILE: ConvKB_pytorch/base/Corrupt.h
================================================
#ifndef CORRUPT_H
#define CORRUPT_H
#include "Random.h"
#include "Triple.h"
#include "Reader.h"

INT corrupt_head(INT id, INT h, INT r) {
	INT lef, rig, mid, ll, rr;
	lef = lefHead[h] - 1;
	rig = rigHead[h];
	while (lef + 1 < rig) {
		mid = (lef + rig) >> 1;
		if (trainHead[mid].r >= r) rig = mid; else
		lef = mid;
	}
	ll = rig;
	lef = lefHead[h];
	rig = rigHead[h] + 1;
	while (lef + 1 < rig) {
		mid = (lef + rig) >> 1;
		if (trainHead[mid].r <= r) lef = mid; else
		rig = mid;
	}
	rr = lef;
	INT tmp = rand_max(id, entityTotal - (rr - ll + 1));
	if (tmp < trainHead[ll].t) return tmp;
	if (tmp > trainHead[rr].t - rr + ll - 1) return tmp + rr - ll + 1;
	lef = ll, rig = rr + 1;
	while (lef + 1 < rig) {
		mid = (lef + rig) >> 1;
		if (trainHead[mid].t - mid + ll - 1 < tmp)
			lef = mid;
		else 
			rig = mid;
	}
	return tmp + lef - ll + 1;
}

INT corrupt_tail(INT id, INT t, INT r) {
	INT lef, rig, mid, ll, rr;
	lef = lefTail[t] - 1;
	rig = rigTail[t];
	while (lef + 1 < rig) {
		mid = (lef + rig) >> 1;
		if (trainTail[mid].r >= r) rig = mid; else
		lef = mid;
	}
	ll = rig;
	lef = lefTail[t];
	rig = rigTail[t] + 1;
	while (lef + 1 < rig) {
		mid = (lef + rig) >> 1;
		if (trainTail[mid].r <= r) lef = mid; else
		rig = mid;
	}
	rr = lef;
	INT tmp = rand_max(id, entityTotal - (rr - ll + 1));
	if (tmp < trainTail[ll].h) return tmp;
	if (tmp > trainTail[rr].h - rr + ll - 1) return tmp + rr - ll + 1;
	lef = ll, rig = rr + 1;
	while (lef + 1 < rig) {
		mid = (lef + rig) >> 1;
		if (trainTail[mid].h - mid + ll - 1 < tmp)
			lef = mid;
		else 
			rig = mid;
	}
	return tmp + lef - ll + 1;
}


INT corrupt_rel(INT id, INT h, INT t) {
	INT lef, rig, mid, ll, rr;
	lef = lefRel[h] - 1;
	rig = rigRel[h];
	while (lef + 1 < rig) {
		mid = (lef + rig) >> 1;
		if (trainRel[mid].t >= t) rig = mid; else
		lef = mid;
	}
	ll = rig;
	lef = lefRel[h];
	rig = rigRel[h] + 1;
	while (lef + 1 < rig) {
		mid = (lef + rig) >> 1;
		if (trainRel[mid].t <= t) lef = mid; else
		rig = mid;
	}
	rr = lef;
	INT tmp = rand_max(id, relationTotal - (rr - ll + 1));
	if (tmp < trainRel[ll].r) return tmp;
	if (tmp > trainRel[rr].r - rr + ll - 1) return tmp + rr - ll + 1;
	lef = ll, rig = rr + 1;
	while (lef + 1 < rig) {
		mid = (lef + rig) >> 1;
		if (trainRel[mid].r - mid + ll - 1 < tmp)
			lef = mid;
		else 
			rig = mid;
	}
	return tmp + lef - ll + 1;
}


bool _find(INT h, INT t, INT r) {
    INT lef = 0;
    INT rig = tripleTotal - 1;
    INT mid;
    while (lef + 1 < rig) {
        INT mid = (lef + rig) >> 1;
        if ((tripleList[mid]. h < h) || (tripleList[mid]. h == h && tripleList[mid]. r < r) || (tripleList[mid]. h == h && tripleList[mid]. r == r && tripleList[mid]. t < t)) lef = mid; else rig = mid;
    }
    if (tripleList[lef].h == h && tripleList[lef].r == r && tripleList[lef].t == t) return true;
    if (tripleList[rig].h == h && tripleList[rig].r == r && tripleList[rig].t == t) return true;
    return false;
}

INT corrupt(INT h, INT r){
	INT ll = tail_lef[r];
	INT rr = tail_rig[r];
	INT loop = 0;
	INT t;
	while(1) {
		t = tail_type[rand(ll, rr)];
		if (not _find(h, t, r)) {
		//	printf("r:%ld\tt:%ld\n", r, t);
			return t;
		} else {
			loop ++;
			if (loop >= 1000){
			//	printf("drop\n");
				return corrupt_head(0, h, r);
			}
		} 
	}
}
#endif


================================================
FILE: ConvKB_pytorch/base/Random.h
================================================
#ifndef RANDOM_H
#define RANDOM_H
#include "Setting.h"
#include <cstdlib>

unsigned long long *next_random;

extern "C"
void randReset() {
	next_random = (unsigned long long *)calloc(workThreads, sizeof(unsigned long long));
	for (INT i = 0; i < workThreads; i++)
		next_random[i] = rand();
}

unsigned long long randd(INT id) {
	next_random[id] = next_random[id] * (unsigned long long)25214903917 + 11;
	return next_random[id];
}

INT rand_max(INT id, INT x) {
	INT res = randd(id) % x;
	while (res < 0)
		res += x;
	return res;
}

//[a,b)
INT rand(INT a, INT b){
	return (rand() % (b-a))+ a;
}
#endif


================================================
FILE: ConvKB_pytorch/base/Reader.h
================================================
#ifndef READER_H
#define READER_H
#include "Setting.h"
#include "Triple.h"
#include <cstdlib>
#include <algorithm>

INT *freqRel, *freqEnt;
INT *lefHead, *rigHead;
INT *lefTail, *rigTail;
INT *lefRel, *rigRel;
REAL *left_mean, *right_mean;

Triple *trainList;
Triple *trainHead;
Triple *trainTail;
Triple *trainRel;

INT *testLef, *testRig;
INT *validLef, *validRig;

extern "C"
void importTrainFiles() {

	printf("The toolkit is importing datasets.\n");
	FILE *fin;
	int tmp;

	fin = fopen((inPath + "relation2id.txt").c_str(), "r");
	tmp = fscanf(fin, "%ld", &relationTotal);
	printf("The total of relations is %ld.\n", relationTotal);
	fclose(fin);

	fin = fopen((inPath + "entity2id.txt").c_str(), "r");
	tmp = fscanf(fin, "%ld", &entityTotal);
	printf("The total of entities is %ld.\n", entityTotal);
	fclose(fin);

	fin = fopen((inPath + "train2id.txt").c_str(), "r");
	tmp = fscanf(fin, "%ld", &trainTotal);
	trainList = (Triple *)calloc(trainTotal, sizeof(Triple));
	trainHead = (Triple *)calloc(trainTotal, sizeof(Triple));
	trainTail = (Triple *)calloc(trainTotal, sizeof(Triple));
	trainRel = (Triple *)calloc(trainTotal, sizeof(Triple));
	freqRel = (INT *)calloc(relationTotal, sizeof(INT));
	freqEnt = (INT *)calloc(entityTotal, sizeof(INT));
	for (INT i = 0; i < trainTotal; i++) {
		tmp = fscanf(fin, "%ld", &trainList[i].h);
		tmp = fscanf(fin, "%ld", &trainList[i].t);
		tmp = fscanf(fin, "%ld", &trainList[i].r);
	}
	fclose(fin);
	std::sort(trainList, trainList + trainTotal, Triple::cmp_head);
	tmp = trainTotal; trainTotal = 1;
	trainHead[0] = trainTail[0] = trainRel[0] = trainList[0];
	freqEnt[trainList[0].t] += 1;
	freqEnt[trainList[0].h] += 1;
	freqRel[trainList[0].r] += 1;
	for (INT i = 1; i < tmp; i++)
		if (trainList[i].h != trainList[i - 1].h || trainList[i].r != trainList[i - 1].r || trainList[i].t != trainList[i - 1].t) {
			trainHead[trainTotal] = trainTail[trainTotal] = trainRel[trainTotal] = trainList[trainTotal] = trainList[i];
			trainTotal++;
			freqEnt[trainList[i].t]++;
			freqEnt[trainList[i].h]++;
			freqRel[trainList[i].r]++;
		}

	std::sort(trainHead, trainHead + trainTotal, Triple::cmp_head);
	std::sort(trainTail, trainTail + trainTotal, Triple::cmp_tail);
	std::sort(trainRel, trainRel + trainTotal, Triple::cmp_rel);
	printf("The total of train triples is %ld.\n", trainTotal);

	lefHead = (INT *)calloc(entityTotal, sizeof(INT));
	rigHead = (INT *)calloc(entityTotal, sizeof(INT));
	lefTail = (INT *)calloc(entityTotal, sizeof(INT));
	rigTail = (INT *)calloc(entityTotal, sizeof(INT));
	lefRel = (INT *)calloc(entityTotal, sizeof(INT));
	rigRel = (INT *)calloc(entityTotal, sizeof(INT));
	memset(rigHead, -1, sizeof(INT)*entityTotal);
	memset(rigTail, -1, sizeof(INT)*entityTotal);
	memset(rigRel, -1, sizeof(INT)*entityTotal);
	for (INT i = 1; i < trainTotal; i++) {
		if (trainTail[i].t != trainTail[i - 1].t) {
			rigTail[trainTail[i - 1].t] = i - 1;
			lefTail[trainTail[i].t] = i;
		}
		if (trainHead[i].h != trainHead[i - 1].h) {
			rigHead[trainHead[i - 1].h] = i - 1;
			lefHead[trainHead[i].h] = i;
		}
		if (trainRel[i].h != trainRel[i - 1].h) {
			rigRel[trainRel[i - 1].h] = i - 1;
			lefRel[trainRel[i].h] = i;
		}
	}
	lefHead[trainHead[0].h] = 0;
	rigHead[trainHead[trainTotal - 1].h] = trainTotal - 1;
	lefTail[trainTail[0].t] = 0;
	rigTail[trainTail[trainTotal - 1].t] = trainTotal - 1;
	lefRel[trainRel[0].h] = 0;
	rigRel[trainRel[trainTotal - 1].h] = trainTotal - 1;

	left_mean = (REAL *)calloc(relationTotal,sizeof(REAL));
	right_mean = (REAL *)calloc(relationTotal,sizeof(REAL));
	for (INT i = 0; i < entityTotal; i++) {
		for (INT j = lefHead[i] + 1; j <= rigHead[i]; j++)
			if (trainHead[j].r != trainHead[j - 1].r)
				left_mean[trainHead[j].r] += 1.0;
		if (lefHead[i] <= rigHead[i])
			left_mean[trainHead[lefHead[i]].r] += 1.0;
		for (INT j = lefTail[i] + 1; j <= rigTail[i]; j++)
			if (trainTail[j].r != trainTail[j - 1].r)
				right_mean[trainTail[j].r] += 1.0;
		if (lefTail[i] <= rigTail[i])
			right_mean[trainTail[lefTail[i]].r] += 1.0;
	}
	for (INT i = 0; i < relationTotal; i++) {
		left_mean[i] = freqRel[i] / left_mean[i];
		right_mean[i] = freqRel[i] / right_mean[i];
	}
}

Triple *testList;
Triple *validList;
Triple *tripleList;

extern "C"
void importTestFiles() {
    FILE *fin;
    INT tmp;
    
    fin = fopen((inPath + "relation2id.txt").c_str(), "r");
    tmp = fscanf(fin, "%ld", &relationTotal);
    fclose(fin);

    fin = fopen((inPath + "entity2id.txt").c_str(), "r");
    tmp = fscanf(fin, "%ld", &entityTotal);
    fclose(fin);
    if (testFilePath == "")
        testFilePath = inPath + "test2id.txt";
    FILE* f_kb1 = fopen(testFilePath.c_str(), "r");
    FILE* f_kb2 = fopen((inPath + "train2id.txt").c_str(), "r");
    FILE* f_kb3 = fopen((inPath + "valid2id.txt").c_str(), "r");
    tmp = fscanf(f_kb1, "%ld", &testTotal);
    tmp = fscanf(f_kb2, "%ld", &trainTotal);
    tmp = fscanf(f_kb3, "%ld", &validTotal);
    tripleTotal = testTotal + trainTotal + validTotal;
    testList = (Triple *)calloc(testTotal, sizeof(Triple));
    validList = (Triple *)calloc(validTotal, sizeof(Triple));
    tripleList = (Triple *)calloc(tripleTotal, sizeof(Triple));
    for (INT i = 0; i < testTotal; i++) {
        tmp = fscanf(f_kb1, "%ld", &testList[i].h);
        tmp = fscanf(f_kb1, "%ld", &testList[i].t);
        tmp = fscanf(f_kb1, "%ld", &testList[i].r);
        tripleList[i] = testList[i];
    }
    for (INT i = 0; i < trainTotal; i++) {
        tmp = fscanf(f_kb2, "%ld", &tripleList[i + testTotal].h);
        tmp = fscanf(f_kb2, "%ld", &tripleList[i + testTotal].t);
        tmp = fscanf(f_kb2, "%ld", &tripleList[i + testTotal].r);
    }
    for (INT i = 0; i < validTotal; i++) {
        tmp = fscanf(f_kb3, "%ld", &tripleList[i + testTotal + trainTotal].h);
        tmp = fscanf(f_kb3, "%ld", &tripleList[i + testTotal + trainTotal].t);
        tmp = fscanf(f_kb3, "%ld", &tripleList[i + testTotal + trainTotal].r);
        validList[i] = tripleList[i + testTotal + trainTotal];
    }
    fclose(f_kb1);
    fclose(f_kb2);
    fclose(f_kb3);

    std::sort(tripleList, tripleList + tripleTotal, Triple::cmp_head);
    std::sort(testList, testList + testTotal, Triple::cmp_rel2);
    std::sort(validList, validList + validTotal, Triple::cmp_rel2);
    printf("The total of test triples is %ld.\n", testTotal);
    printf("The total of valid triples is %ld.\n", validTotal);

    testLef = (INT *)calloc(relationTotal, sizeof(INT));
    testRig = (INT *)calloc(relationTotal, sizeof(INT));
    memset(testLef, -1, sizeof(INT) * relationTotal);
    memset(testRig, -1, sizeof(INT) * relationTotal);
    for (INT i = 1; i < testTotal; i++) {
	if (testList[i].r != testList[i-1].r) {
	    testRig[testList[i-1].r] = i - 1;
	    testLef[testList[i].r] = i;
	}
    }
    testLef[testList[0].r] = 0;
    testRig[testList[testTotal - 1].r] = testTotal - 1;

    validLef = (INT *)calloc(relationTotal, sizeof(INT));
    validRig = (INT *)calloc(relationTotal, sizeof(INT));
    memset(validLef, -1, sizeof(INT)*relationTotal);
    memset(validRig, -1, sizeof(INT)*relationTotal);
    for (INT i = 1; i < validTotal; i++) {
	if (validList[i].r != validList[i-1].r) {
	    validRig[validList[i-1].r] = i - 1;
	    validLef[validList[i].r] = i;
	}
    }
    validLef[validList[0].r] = 0;
    validRig[validList[validTotal - 1].r] = validTotal - 1;
}

INT* head_lef;
INT* head_rig;
INT* tail_lef;
INT* tail_rig;
INT* head_type;
INT* tail_type;

extern "C"
void importTypeFiles() {

    head_lef = (INT *)calloc(relationTotal, sizeof(INT));
    head_rig = (INT *)calloc(relationTotal, sizeof(INT));
    tail_lef = (INT *)calloc(relationTotal, sizeof(INT));
    tail_rig = (INT *)calloc(relationTotal, sizeof(INT));
    INT total_lef = 0;
    INT total_rig = 0;
    FILE* f_type = fopen((inPath + "type_constrain.txt").c_str(),"r");
    INT tmp;
    tmp = fscanf(f_type, "%ld", &tmp);
    for (INT i = 0; i < relationTotal; i++) {
        INT rel, tot;
        tmp = fscanf(f_type, "%ld %ld", &rel, &tot);
        for (INT j = 0; j < tot; j++) {
            tmp = fscanf(f_type, "%ld", &tmp);
            total_lef++;
        }
        tmp = fscanf(f_type, "%ld%ld", &rel, &tot);
        for (INT j = 0; j < tot; j++) {
            tmp = fscanf(f_type, "%ld", &tmp);
            total_rig++;
        }
    }
    fclose(f_type);
    head_type = (INT *)calloc(total_lef, sizeof(INT)); 
    tail_type = (INT *)calloc(total_rig, sizeof(INT));
    total_lef = 0;
    total_rig = 0;
    f_type = fopen((inPath + "type_constrain.txt").c_str(),"r");
    tmp = fscanf(f_type, "%ld", &tmp);
    for (INT i = 0; i < relationTotal; i++) {
        INT rel, tot;
        tmp = fscanf(f_type, "%ld%ld", &rel, &tot);
        head_lef[rel] = total_lef;
        for (INT j = 0; j < tot; j++) {
            tmp = fscanf(f_type, "%ld", &head_type[total_lef]);
            total_lef++;
        }
        head_rig[rel] = total_lef;
        std::sort(head_type + head_lef[rel], head_type + head_rig[rel]);
        tmp = fscanf(f_type, "%ld%ld", &rel, &tot);
        tail_lef[rel] = total_rig;
        for (INT j = 0; j < tot; j++) {
            tmp = fscanf(f_type, "%ld", &tail_type[total_rig]);
            total_rig++;
        }
        tail_rig[rel] = total_rig;
        std::sort(tail_type + tail_lef[rel], tail_type + tail_rig[rel]);
    }
    fclose(f_type);
}


#endif


================================================
FILE: ConvKB_pytorch/base/Setting.h
================================================
#ifndef SETTING_H
#define SETTING_H
#define INT long
#define REAL float
#include <cstring>
#include <cstdio>
#include <string>

std::string inPath = "../data/FB15K/";
std::string outPath = "../data/FB15K/";
std::string testFilePath = "";

extern "C"
void setInPath(char *path) {
	INT len = strlen(path);
	inPath = "";
	for (INT i = 0; i < len; i++)
		inPath = inPath + path[i];
	printf("Input Files Path : %s\n", inPath.c_str());
}

extern "C"
void setTestFilePath(char *path) {
	INT len = strlen(path);
	testFilePath = "";
	for (INT i = 0; i < len; i++)
		testFilePath = testFilePath + path[i];
	printf("Test File Path : %s\n", testFilePath.c_str());
}

extern "C"
void setOutPath(char *path) {
	INT len = strlen(path);
	outPath = "";
	for (INT i = 0; i < len; i++)
		outPath = outPath + path[i];
	printf("Output Files Path : %s\n", outPath.c_str());
}

/*
============================================================
*/

INT workThreads = 1;

extern "C"
void setWorkThreads(INT threads) {
	workThreads = threads;
}

extern "C"
INT getWorkThreads() {
	return workThreads;
}

/*
============================================================
*/

INT relationTotal = 0;
INT entityTotal = 0;
INT tripleTotal = 0;
INT testTotal = 0;
INT trainTotal = 0;
INT validTotal = 0;

extern "C"
INT getEntityTotal() {
	return entityTotal;
}

extern "C"
INT getRelationTotal() {
	return relationTotal;
}

extern "C"
INT getTripleTotal() {
	return tripleTotal;
}

extern "C"
INT getTrainTotal() {
	return trainTotal;
}

extern "C"
INT getTestTotal() {
	return testTotal;
}

extern "C"
INT getValidTotal() {
	return validTotal;
}
/*
============================================================
*/

INT bernFlag = 0;

extern "C"
void setBern(INT con) {
	bernFlag = con;
}

#endif


================================================
FILE: ConvKB_pytorch/base/Test.h
================================================
#ifndef TEST_H
#define TEST_H
#include "Setting.h"
#include "Reader.h"
#include "Corrupt.h"

/*=====================================================================================
link prediction
======================================================================================*/
INT lastHead = 0;
INT lastTail = 0;
REAL l1_filter_tot = 0, l1_tot = 0, r1_tot = 0, r1_filter_tot = 0, l_tot = 0, r_tot = 0, l_filter_rank = 0, l_rank = 0, l_filter_reci_rank = 0, l_reci_rank = 0;
REAL l3_filter_tot = 0, l3_tot = 0, r3_tot = 0, r3_filter_tot = 0, l_filter_tot = 0, r_filter_tot = 0, r_filter_rank = 0, r_rank = 0, r_filter_reci_rank = 0, r_reci_rank = 0;

REAL l1_filter_tot_constrain = 0, l1_tot_constrain = 0, r1_tot_constrain = 0, r1_filter_tot_constrain = 0, l_tot_constrain = 0, r_tot_constrain = 0, l_filter_rank_constrain = 0, l_rank_constrain = 0, l_filter_reci_rank_constrain = 0, l_reci_rank_constrain = 0;
REAL l3_filter_tot_constrain = 0, l3_tot_constrain = 0, r3_tot_constrain = 0, r3_filter_tot_constrain = 0, l_filter_tot_constrain = 0, r_filter_tot_constrain = 0, r_filter_rank_constrain = 0, r_rank_constrain = 0, r_filter_reci_rank_constrain = 0, r_reci_rank_constrain = 0;
extern "C"
void initTest() {
    lastHead = 0;
    lastTail = 0;
    l1_filter_tot = 0, l1_tot = 0, r1_tot = 0, r1_filter_tot = 0, l_tot = 0, r_tot = 0, l_filter_rank = 0, l_rank = 0, l_filter_reci_rank = 0, l_reci_rank = 0;
    l3_filter_tot = 0, l3_tot = 0, r3_tot = 0, r3_filter_tot = 0, l_filter_tot = 0, r_filter_tot = 0, r_filter_rank = 0, r_rank = 0, r_filter_reci_rank = 0, r_reci_rank = 0;

    l1_filter_tot_constrain = 0, l1_tot_constrain = 0, r1_tot_constrain = 0, r1_filter_tot_constrain = 0, l_tot_constrain = 0, r_tot_constrain = 0, l_filter_rank_constrain = 0, l_rank_constrain = 0, l_filter_reci_rank_constrain = 0, l_reci_rank_constrain = 0;
    l3_filter_tot_constrain = 0, l3_tot_constrain = 0, r3_tot_constrain = 0, r3_filter_tot_constrain = 0, l_filter_tot_constrain = 0, r_filter_tot_constrain = 0, r_filter_rank_constrain = 0, r_rank_constrain = 0, r_filter_reci_rank_constrain = 0, r_reci_rank_constrain = 0;
}
extern "C"
void getHeadBatch(INT *ph, INT *pt, INT *pr) {
    for (INT i = 0; i < entityTotal; i++) {
        ph[i] = i;
        pt[i] = testList[lastHead].t;
        pr[i] = testList[lastHead].r;
    }
}

extern "C"
void getTailBatch(INT *ph, INT *pt, INT *pr) {
    for (INT i = 0; i < entityTotal; i++) {
        ph[i] = testList[lastTail].h;
        pt[i] = i;
        pr[i] = testList[lastTail].r;
    }
}

extern "C"
void testHead(REAL *con) {
    INT h = testList[lastHead].h;
    INT t = testList[lastHead].t;
    INT r = testList[lastHead].r;
    INT lef = head_lef[r], rig = head_rig[r];

    REAL minimal = con[h];
    INT l_s = 0;
    INT l_filter_s = 0;
    INT l_s_constrain = 0;
    INT l_filter_s_constrain = 0;

    for (INT j = 0; j < entityTotal; j++) {
        if (j != h) {
            REAL value = con[j];
            if (value < minimal) {
                l_s += 1;
                if (not _find(j, t, r))
                    l_filter_s += 1;
            }
            while (lef < rig && head_type[lef] < j) lef ++;
            if (lef < rig && j == head_type[lef]) {
                if (value < minimal) {
                    l_s_constrain += 1;
                    if (not _find(j, t, r)) {
                        l_filter_s_constrain += 1;
                    }
                }
            }
        }
    }

    if (l_filter_s < 10) l_filter_tot += 1;
    if (l_s < 10) l_tot += 1;
    if (l_filter_s < 3) l3_filter_tot += 1;
    if (l_s < 3) l3_tot += 1;
    if (l_filter_s < 1) l1_filter_tot += 1;
    if (l_s < 1) l1_tot += 1;

    if (l_filter_s_constrain < 10) l_filter_tot_constrain += 1;
    if (l_s_constrain < 10) l_tot_constrain += 1;
    if (l_filter_s_constrain < 3) l3_filter_tot_constrain += 1;
    if (l_s_constrain < 3) l3_tot_constrain += 1;
    if (l_filter_s_constrain < 1) l1_filter_tot_constrain += 1;
    if (l_s_constrain < 1) l1_tot_constrain += 1;

    l_filter_rank += (l_filter_s+1);
    l_rank += (1+l_s);
    l_filter_reci_rank += 1.0/(l_filter_s+1);
    l_reci_rank += 1.0/(l_s+1);

    l_filter_rank_constrain += (l_filter_s_constrain+1);
    l_rank_constrain += (1+l_s_constrain);
    l_filter_reci_rank_constrain += 1.0/(l_filter_s_constrain+1);
    l_reci_rank_constrain += 1.0/(l_s_constrain+1);

    lastHead++;

    //printf("l_filter_s: %ld\n", l_filter_s);
    //printf("%f %f %f %f \n", l_tot / lastHead, l_filter_tot / lastHead, l_rank / lastHead, l_filter_rank / lastHead);
}

extern "C"
void testTail(REAL *con) {
    INT h = testList[lastTail].h;
    INT t = testList[lastTail].t;
    INT r = testList[lastTail].r;
    INT lef = tail_lef[r], rig = tail_rig[r];
    REAL minimal = con[t];
    INT r_s = 0;
    INT r_filter_s = 0;
    INT r_s_constrain = 0;
    INT r_filter_s_constrain = 0;
    for (INT j = 0; j < entityTotal; j++) {
        if (j != t) {
            REAL value = con[j];
            if (value < minimal) {
                r_s += 1;
                if (not _find(h, j, r))
                    r_filter_s += 1;
            }
            while (lef < rig && tail_type[lef] < j) lef ++;
            if (lef < rig && j == tail_type[lef]) {
                    if (value < minimal) {
                        r_s_constrain += 1;
                        if (not _find(h, j ,r)) {
                            r_filter_s_constrain += 1;
                        }
                    }
            }
        }

    }

    if (r_filter_s < 10) r_filter_tot += 1;
    if (r_s < 10) r_tot += 1;
    if (r_filter_s < 3) r3_filter_tot += 1;
    if (r_s < 3) r3_tot += 1;
    if (r_filter_s < 1) r1_filter_tot += 1;
    if (r_s < 1) r1_tot += 1;

    if (r_filter_s_constrain < 10) r_filter_tot_constrain += 1;
    if (r_s_constrain < 10) r_tot_constrain += 1;
    if (r_filter_s_constrain < 3) r3_filter_tot_constrain += 1;
    if (r_s_constrain < 3) r3_tot_constrain += 1;
    if (r_filter_s_constrain < 1) r1_filter_tot_constrain += 1;
    if (r_s_constrain < 1) r1_tot_constrain += 1;

    r_filter_rank += (1+r_filter_s);
    r_rank += (1+r_s);
    r_filter_reci_rank += 1.0/(1+r_filter_s);
    r_reci_rank += 1.0/(1+r_s);

    r_filter_rank_constrain += (1+r_filter_s_constrain);
    r_rank_constrain += (1+r_s_constrain);
    r_filter_reci_rank_constrain += 1.0/(1+r_filter_s_constrain);
    r_reci_rank_constrain += 1.0/(1+r_s_constrain);

    lastTail++;
    //printf("r_filter_s: %ld\n", r_filter_s);
    //printf("%f %f %f %f\n", r_tot /lastTail, r_filter_tot /lastTail, r_rank /lastTail, r_filter_rank /lastTail);
}

extern "C"
void test_link_prediction() {
    l_rank /= testTotal;
    r_rank /= testTotal;
    l_reci_rank /= testTotal;
    r_reci_rank /= testTotal;

    l_tot /= testTotal;
    l3_tot /= testTotal;
    l1_tot /= testTotal;

    r_tot /= testTotal;
    r3_tot /= testTotal;
    r1_tot /= testTotal;

    // with filter
    l_filter_rank /= testTotal;
    r_filter_rank /= testTotal;
    l_filter_reci_rank /= testTotal;
    r_filter_reci_rank /= testTotal;

    l_filter_tot /= testTotal;
    l3_filter_tot /= testTotal;
    l1_filter_tot /= testTotal;

    r_filter_tot /= testTotal;
    r3_filter_tot /= testTotal;
    r1_filter_tot /= testTotal;

    //printf("metric:\t\t\t MRR \t\t MR \t\t hit@10 \t hit@3  \t hit@1 \n");
    //printf("l(raw):\t\t\t %f \t %f \t %f \t %f \t %f \n", l_reci_rank, l_rank, l_tot, l3_tot, l1_tot);
    //printf("r(raw):\t\t\t %f \t %f \t %f \t %f \t %f \n", r_reci_rank, r_rank, r_tot, r3_tot, r1_tot);
    //printf("averaged(raw):\t\t %f \t %f \t %f \t %f \t %f \n",
    //        (l_reci_rank+r_reci_rank)/2, (l_rank+r_rank)/2, (l_tot+r_tot)/2, (l3_tot+r3_tot)/2, (l1_tot+r1_tot)/2);
    
    printf("Final (filter) results:\n");
    
    printf("\n");
    printf("l(filter):\t\t %f \t %f \t %f \t %f \t %f \n", l_filter_reci_rank, l_filter_rank, l_filter_tot, l3_filter_tot, l1_filter_tot);
    printf("r(filter):\t\t %f \t %f \t %f \t %f \t %f \n", r_filter_reci_rank, r_filter_rank, r_filter_tot, r3_filter_tot, r1_filter_tot);
    printf("averaged(filter):\t %f \t %f \t %f \t %f \t %f \n",
            (l_filter_reci_rank+r_filter_reci_rank)/2, (l_filter_rank+r_filter_rank)/2, (l_filter_tot+r_filter_tot)/2, (l3_filter_tot+r3_filter_tot)/2, (l1_filter_tot+r1_filter_tot)/2);

}

/*=====================================================================================
triple classification
======================================================================================*/
Triple *negTestList;
extern "C"
void getNegTest() {
    negTestList = (Triple *)calloc(testTotal, sizeof(Triple));
    for (INT i = 0; i < testTotal; i++) {
        negTestList[i] = testList[i];
        negTestList[i].t = corrupt(testList[i].h, testList[i].r);
    }
    /*
    FILE* fout = fopen((inPath + "test_neg.txt").c_str(), "w");
    for (INT i = 0; i < testTotal; i++) {
        fprintf(fout, "%ld\t%ld\t%ld\t%ld\n", testList[i].h, testList[i].t, testList[i].r, INT(1));
        fprintf(fout, "%ld\t%ld\t%ld\t%ld\n", negTestList[i].h, negTestList[i].t, negTestList[i].r, INT(-1));
    }
    fclose(fout);
    */
}

Triple *negValidList;
extern "C"
void getNegValid() {
    negValidList = (Triple *)calloc(validTotal, sizeof(Triple));
    for (INT i = 0; i < validTotal; i++) {
        negValidList[i] = validList[i];
        negValidList[i].t = corrupt(validList[i].h, validList[i].r);
    }
    /*
    FILE* fout = fopen((inPath + "valid_neg.txt").c_str(), "w");
    for (INT i = 0; i < validTotal; i++) {
        fprintf(fout, "%ld\t%ld\t%ld\t%ld\n", validList[i].h, validList[i].t, validList[i].r, INT(1));
        fprintf(fout, "%ld\t%ld\t%ld\t%ld\n", negValidList[i].h, negValidList[i].t, negValidList[i].r, INT(-1));
    }
    fclose(fout);
    */
}

extern "C"
void getTestBatch(INT *ph, INT *pt, INT *pr, INT *nh, INT *nt, INT *nr) {
    getNegTest();
    for (INT i = 0; i < testTotal; i++) {
        ph[i] = testList[i].h;
        pt[i] = testList[i].t;
        pr[i] = testList[i].r;
        nh[i] = negTestList[i].h;
        nt[i] = negTestList[i].t;
        nr[i] = negTestList[i].r;
    }
}

extern "C"
void getValidBatch(INT *ph, INT *pt, INT *pr, INT *nh, INT *nt, INT *nr) {
    getNegValid();
    for (INT i = 0; i < validTotal; i++) {
        ph[i] = validList[i].h;
        pt[i] = validList[i].t;
        pr[i] = validList[i].r;
        nh[i] = negValidList[i].h;
        nt[i] = negValidList[i].t;
        nr[i] = negValidList[i].r;
    }
}
REAL threshEntire;
extern "C"
void getBestThreshold(REAL *relThresh, REAL *score_pos, REAL *score_neg) {
    REAL interval = 0.01;
    REAL min_score, max_score, bestThresh, tmpThresh, bestAcc, tmpAcc;
    INT n_interval, correct, total;
    for (INT r = 0; r < relationTotal; r++) {
        if (validLef[r] == -1) continue;
        total = (validRig[r] - validLef[r] + 1) * 2;
        min_score = score_pos[validLef[r]];
        if (score_neg[validLef[r]] < min_score) min_score = score_neg[validLef[r]];
        max_score = score_pos[validLef[r]];
        if (score_neg[validLef[r]] > max_score) max_score = score_neg[validLef[r]];
        for (INT i = validLef[r]+1; i <= validRig[r]; i++) {
            if(score_pos[i] < min_score) min_score = score_pos[i];
            if(score_pos[i] > max_score) max_score = score_pos[i];
            if(score_neg[i] < min_score) min_score = score_neg[i];
            if(score_neg[i] > max_score) max_score = score_neg[i];
        }
        n_interval = INT((max_score - min_score)/interval);
        for (INT i = 0; i <= n_interval; i++) {
            tmpThresh = min_score + i * interval;
            correct = 0;
            for (INT j = validLef[r]; j <= validRig[r]; j++) {
                if (score_pos[j] <= tmpThresh) correct ++;
                if (score_neg[j] > tmpThresh) correct ++;
            }
            tmpAcc = 1.0 * correct / total;
            if (i == 0) {
                bestThresh = tmpThresh;
                bestAcc = tmpAcc;
            } else if (tmpAcc > bestAcc) {
                bestAcc = tmpAcc;
                bestThresh = tmpThresh;
            }
        }
        relThresh[r] = bestThresh;
    }
}

REAL *testAcc;
REAL aveAcc;
extern "C"
REAL test_triple_classification(REAL *relThresh, REAL *score_pos, REAL *score_neg) {
    testAcc = (REAL *)calloc(relationTotal, sizeof(REAL));
    INT aveCorrect = 0, aveTotal = 0;
    REAL aveAcc;
    for (INT r = 0; r < relationTotal; r++) {
        if (validLef[r] == -1 || testLef[r] ==-1) continue;
        INT correct = 0, total = 0;
        for (INT i = testLef[r]; i <= testRig[r]; i++) {
            if (score_pos[i] <= relThresh[r]) correct++;
            if (score_neg[i] > relThresh[r]) correct++;
            total += 2;
        }
        testAcc[r] = 1.0 * correct / total;
        aveCorrect += correct;
        aveTotal += total;
    }
    aveAcc = 1.0 * aveCorrect / aveTotal;
    printf("triple classification accuracy is %lf\n", aveAcc);
    return aveAcc;
}

#endif


================================================
FILE: ConvKB_pytorch/base/Triple.h
================================================
#ifndef TRIPLE_H
#define TRIPLE_H
#include "Setting.h"

struct Triple {

	INT h, r, t;

	static INT minimal(INT a,INT b) {
		if (a > b) return b;
		return a;
	}
	
	static bool cmp_list(const Triple &a, const Triple &b) {
		return (minimal(a.h, a.t) > minimal(b.h, b.t));
	}

	static bool cmp_head(const Triple &a, const Triple &b) {
		return (a.h < b.h)||(a.h == b.h && a.r < b.r)||(a.h == b.h && a.r == b.r && a.t < b.t);
	}

	static bool cmp_tail(const Triple &a, const Triple &b) {
		return (a.t < b.t)||(a.t == b.t && a.r < b.r)||(a.t == b.t && a.r == b.r && a.h < b.h);
	}

	static bool cmp_rel(const Triple &a, const Triple &b) {
		return (a.h < b.h)||(a.h == b.h && a.t < b.t)||(a.h == b.h && a.t == b.t && a.r < b.r);
	}

	static bool cmp_rel2(const Triple &a, const Triple &b) {
		return (a.r < b.r)||(a.r == b.r && a.h < b.h)||(a.r == b.r && a.h == b.h && a.t < b.t);
	}

};

#endif


================================================
FILE: ConvKB_pytorch/base/Valid.h
================================================
#ifndef VALID_H
#define VALID_H
#include "Setting.h"
#include "Reader.h"
#include "Corrupt.h"

INT lastValidHead = 0;
INT lastValidTail = 0;
	
REAL l_valid_filter_tot = 0;
REAL r_valid_filter_tot = 0;

extern "C"
void validInit() {
    lastValidHead = 0;
    lastValidTail = 0;
    l_valid_filter_tot = 0;
    r_valid_filter_tot = 0;
}

extern "C"
void getValidHeadBatch(INT *ph, INT *pt, INT *pr) {
    for (INT i = 0; i < entityTotal; i++) {
	ph[i] = i;
	pt[i] = validList[lastValidHead].t;
	pr[i] = validList[lastValidHead].r;
    }
}

extern "C"
void getValidTailBatch(INT *ph, INT *pt, INT *pr) {
    for (INT i = 0; i < entityTotal; i++) {
	ph[i] = validList[lastValidTail].h;
	pt[i] = i;
	pr[i] = validList[lastValidTail].r;
    }
}

extern "C"
void validHead(REAL *con) {
    INT h = validList[lastValidHead].h;
    INT t = validList[lastValidHead].t;
    INT r = validList[lastValidHead].r;
    REAL minimal = con[h];
    INT l_filter_s = 0;
    for (INT j = 0; j < entityTotal; j++) {
	if (j != h) {
	    REAL value = con[j];
   	    if (value < minimal && ! _find(j, t, r)) {
		l_filter_s += 1;
	    }
	}
    }
    if (l_filter_s < 10) l_valid_filter_tot += 1;
    lastValidHead ++;
  //  printf("head: l_valid_filter_tot = %f | l_filter_hit10 = %f\n", l_valid_filter_tot, l_valid_filter_tot / lastValidHead);
}

extern "C"
void validTail(REAL *con) {
    INT h = validList[lastValidTail].h;
    INT t = validList[lastValidTail].t;
    INT r = validList[lastValidTail].r;
    REAL minimal = con[t];
    INT r_filter_s = 0;
    for (INT j = 0; j < entityTotal; j++) {
	if (j != t) {
	    REAL value = con[j];
	    if (value < minimal && ! _find(h, j, r)) {
	        r_filter_s += 1;
	    }
	}
    }
    if (r_filter_s < 10) r_valid_filter_tot += 1;
    lastValidTail ++;
//    printf("tail: r_valid_filter_tot = %f | r_filter_hit10 = %f\n", r_valid_filter_tot, r_valid_filter_tot / lastValidTail);
}

REAL validHit10 = 0;
extern "C"
REAL  getValidHit10() {
    l_valid_filter_tot /= validTotal;
    r_valid_filter_tot /= validTotal;
    validHit10 = (l_valid_filter_tot + r_valid_filter_tot) / 2;
   // printf("result: %f\n", validHit10);
    return validHit10;
}

#endif


================================================
FILE: ConvKB_pytorch/benchmarks/FB15K/1-1.txt
================================================
832
2326 6886 637
4843 4843 149
9671 7174 190
8055 8055 305
9549 11909 855
13652 13585 446
1100 1100 305
1321 14529 176
11074 1002 444
4668 9007 1172
7266 11750 444
12271 12271 101
4766 6205 470
2129 13806 176
1624 10851 190
3106 3107 275
11815 8842 226
376 376 305
4142 4142 149
7929 7929 305
3939 4387 190
4753 10689 423
1785 4433 190
197 5800 423
1675 8660 190
11216 11216 149
487 6941 698
14413 3636 95
3296 8269 190
2188 5216 358
4881 13935 446
2999 4598 966
7333 6560 226
6349 1227 444
13468 11918 446
13763 13763 305
2187 2187 101
13177 3513 470
3017 270 637
9844 9844 149
13518 13518 101
11751 14295 242
4544 4544 149
9703 9703 101
0 10598 95
11863 14219 446
14396 14396 101
5438 5255 444
11017 5390 242
8703 8703 305
7023 10286 396
714 4627 190
11670 11670 101
7363 2764 964
5476 10060 667
12102 14703 1213
5333 2095 667
1391 5311 966
5288 5288 149
1432 7674 190
1016 3095 470
8103 6135 439
13611 5360 159
7951 9751 444
3117 3117 149
8053 6619 446
7655 2280 712
6023 9808 470
8020 1068 1260
162 2042 292
3245 1710 358
13397 11620 856
10834 9891 190
5216 1568 358
608 11005 444
13242 5068 1241
167 424 112
183 11914 176
8381 8381 149
10771 7953 242
2818 8616 242
3242 14324 95
9517 247 176
7624 7624 305
12311 12311 305
4729 4729 101
14211 13538 176
7098 9383 444
5891 2995 242
5788 5788 149
7071 4197 1251
8928 2226 1154
3112 397 691
410 410 101
3792 314 176
12158 2953 439
8948 8948 305
11670 9300 446
10964 10964 305
12881 546 176
12666 11941 292
10770 9227 190
1275 12025 190
11923 1966 95
1886 1885 446
2514 8204 93
8684 8684 149
2671 2671 149
12903 6011 928
13736 10192 1002
11964 11964 101
4699 5258 93
865 13006 1238
13952 13952 101
2511 1565 783
10713 5332 292
1441 2711 456
3930 12369 1222
3655 13003 295
4356 7330 292
4944 4943 93
10528 2876 112
454 4645 444
11330 11330 305
13805 13805 149
5118 3629 358
8920 1513 637
13815 10036 446
11400 7886 446
1402 712 444
8332 6242 358
6193 8728 292
5375 7952 1102
657 8481 303
9745 11402 242
2514 2439 93
12559 3772 292
4036 4036 305
9883 11004 190
524 524 149
6551 8482 396
9946 12404 242
7957 2720 673
11145 10975 176
5483 1114 190
2674 2674 305
5425 7989 873
9510 11800 396
12565 5454 242
9947 11125 242
12379 12379 101
1330 632 444
1505 4371 423
2744 3116 175
2358 4026 820
9973 7255 176
1927 1927 609
12471 12471 101
751 751 305
13291 13291 101
13195 13195 149
2407 3724 190
12693 12693 305
6338 2616 190
839 321 226
4089 5380 444
4463 12665 242
5260 1994 275
8524 1983 456
3243 6903 93
703 1123 637
9043 6506 176
9695 9695 101
7477 10458 456
13786 1700 190
11417 14174 95
6485 11218 275
3480 8180 1166
6445 8360 456
3636 7829 446
8939 8939 305
6799 12231 446
1704 3245 444
2348 9132 95
7546 756 360
12932 1711 439
8831 1360 444
6581 9609 190
2692 5922 359
1633 2504 637
7363 2765 964
9794 9794 149
14849 9912 609
2818 8616 439
5817 2353 176
1259 3074 226
5834 2280 66
3242 14629 190
7219 7218 446
13808 13808 1177
13272 13272 149
8037 8037 149
4014 10989 242
10286 11313 396
14137 14137 305
7135 7135 149
9572 4410 93
4772 5823 358
3384 3836 470
5054 5054 149
1725 2028 439
7954 7954 149
1225 2754 358
5105 2286 456
11969 880 637
3140 1527 66
1798 2063 190
2394 12623 446
2239 2469 176
4612 4388 358
7341 7341 149
1513 6527 226
13235 13235 691
12559 12907 444
4036 4036 149
2272 9838 456
8481 657 1036
8858 8858 149
9960 5553 516
6883 6883 439
758 7550 190
3674 956 93
6168 8475 295
10903 10903 101
424 12094 1211
8813 2585 444
6728 6728 966
13069 13714 176
11296 10528 516
4786 1838 444
6065 1401 358
8226 1329 698
8676 8709 360
7534 7459 190
1983 8524 456
3790 573 609
5248 5248 149
8104 11310 226
4637 10534 423
2328 10498 190
8488 8488 305
10107 5208 358
2547 8002 295
11463 11463 149
8555 8555 101
6339 5180 226
1059 576 1002
14898 8354 781
11988 11988 149
7052 2589 446
2769 11161 423
9717 2445 470
1358 10383 95
14340 14340 101
2754 1744 93
9310 677 358
5515 3292 396
5483 14035 190
2421 880 637
6013 6013 305
8028 2442 873
4456 2728 423
8478 11991 190
2042 162 423
3439 4141 444
1925 12757 95
1269 652 93
726 6761 226
3939 11300 446
11619 2928 667
5986 892 1052
2701 2701 305
4545 4545 439
6205 4766 275
3456 3456 305
3476 14455 190
13554 5025 470
11421 9357 275
2107 8695 95
6758 6758 149
9713 8883 446
4954 4954 609
10638 10638 149
13293 7530 446
5728 7606 226
12501 920 803
4349 5065 226
1279 10890 793
9102 12438 439
13791 13791 149
12005 1674 981
4917 7626 423
900 4429 190
11727 11727 305
13209 4680 1216
10861 10860 446
11800 9510 396
7078 2268 444
3951 3951 305
14452 12088 543
10805 13286 295
10172 10172 305
13427 13427 101
13430 2767 446
13793 2916 176
3203 4150 456
430 9843 396
13736 10192 659
7722 10043 275
8696 8696 101
1389 2632 697
9051 9051 305
4642 6498 93
12251 12251 305
10568 2430 396
605 8745 470
5378 5378 149
14662 14662 1081
2003 10909 470
8466 8465 242
4297 7130 176
12972 12972 101
7444 7956 637
5672 462 358
2506 2506 101
7801 1134 95
12576 12576 305
487 2842 698
677 7072 358
3756 3899 444
11854 13707 446
9817 5480 659
734 8180 618
1203 5562 456
3674 6509 358
13147 6806 295
1307 11484 446
7433 7433 305
11410 13510 295
11410 13510 242
4207 4207 149
5540 6610 791
13805 13805 305
14568 13946 444
12659 10703 439
11853 11510 176
5778 2034 190
11076 11076 101
2633 2194 536
3920 706 806
7664 4707 470
6841 6841 101
13004 8360 456
2234 10530 292
12596 163 609
1264 9197 190
13071 6821 360
1711 12932 446
5158 6934 93
3663 8957 609
4764 10425 456
12814 8563 242
5800 197 292
2608 4642 358
4587 4587 149
9173 9173 149
2665 7550 931
7283 7283 305
11618 11618 101
2765 7481 470
11892 7482 470
10816 10816 305
6512 466 242
6662 12922 470
8295 6288 190
3054 3054 305
7074 13787 446
2892 3226 444
10335 10335 101
9577 9577 305
4621 2793 444
7145 5079 444
3560 4197 1108
2018 8615 446
11384 4944 93
10910 10910 101
1624 6703 190
11110 11111 242
712 9222 444
4366 4707 470
10269 10269 609
7818 361 226
3195 5438 358
13067 1034 176
9740 11192 423
9187 8207 470
12240 12240 101
11631 11631 305
8480 9310 360
10687 3415 226
1509 1509 305
9042 11831 439
5763 7005 226
9063 13539 95
8051 8051 101
6650 6650 149
1836 53 609
2774 5333 618
9058 9058 305
5637 5637 101
7962 7961 446
2699 2407 176
11421 4706 275
890 4868 360
7736 7736 305
3341 4666 190
2718 2717 516
5341 2117 840
4397 637 444
1169 3813 190
4584 4584 1112
4680 10457 456
12045 12045 305
3612 3612 305
5900 5900 305
534 7638 93
13455 13455 305
6242 3287 93
4513 10797 470
2442 2442 305
278 13654 190
9540 7538 444
5552 1416 95
10081 11661 446
430 3915 444
908 3865 396
495 13915 909
10238 3270 836
12814 8563 295
12495 12495 149
5564 12343 456
10929 10929 305
14415 7682 961
5106 5106 149
3657 280 456
8318 8318 305
5586 1247 456
12318 12318 101
7023 11067 396
7272 14881 815
6338 4186 446
5860 10024 444
2906 13983 95
44 7966 93
1744 6903 358
2353 12296 190
3099 3099 149
1021 7723 423
14344 12792 869
11250 7952 909
2234 1023 516
12112 12112 305
12400 7511 781
11257 11257 305
1775 6388 358
8280 8280 101
7666 12035 637
10095 1774 176
10123 1463 516
4743 3381 175
6903 801 931
8540 8540 305
6431 6431 149
5337 7820 358
5438 3195 93
8476 2506 242
2764 9717 359
3702 10317 470
11062 66 637
6242 3287 444
12474 1203 667
7893 7893 101
12807 2010 446
14482 14140 446
4422 4422 149
2736 2883 470
10580 1864 869
8345 9695 242
9891 10834 176
11983 1064 396
430 3520 396
1746 10104 190
585 4748 176
1544 6402 609
8028 984 931
8002 8002 101
7984 3667 516
397 3224 303
397 3900 303
10140 10140 149
3837 3485 784
3520 430 396
13072 265 444
2368 6840 444
245 244 446
13314 4634 242
2752 2639 93
1416 5444 190
5421 7997 423
7926 7926 305
12805 13801 292
1155 1155 149
1389 1389 149
4094 4094 149
13535 12781 1282
8498 8497 242
8262 8262 305
5823 3542 358
5467 1507 637
3871 900 176
8693 6508 637
12872 1729 396
692 1704 444
4017 4017 610
2614 8107 992
11337 12340 446
2445 5691 444
3409 1310 360
13581 13581 149
5143 5143 149
5257 5257 149
1744 8956 358
5159 8479 292
2439 3372 444
9944 9571 190
4441 2630 226
5421 6096 396
10286 7023 396
10286 11067 396
842 13790 176
12160 80 275
8438 8438 149
11000 11000 149
10087 9276 175
13008 2146 176
9831 11050 712
8486 11649 697
13078 8340 439
1983 1707 456
7414 9548 966
2692 5796 964
3355 3355 149
7418 9494 446
1874 8043 226
2752 5790 358
7079 7080 295
566 9584 637
2750 3463 358
58 11911 190
7292 7292 305
12292 12292 439
12179 14513 242
7915 12949 275
3415 10687 637
9775 9775 101
7074 7074 101
9853 10449 637
2194 2633 923
1728 334 609
4197 4476 1238
4135 1481 470
11669 12645 242
2754 2755 360
4078 11706 444
2018 2018 101
8126 9737 456
9305 10441 242
5464 8857 226
10541 10541 305
4544 4544 305
9728 7493 226
5733 9647 95
397 2285 303
5118 6838 423
10883 10882 242
13281 9352 446
7931 4707 444
8811 11239 292
7543 9581 295
1284 3657 456
11491 10264 444
2720 2721 1195
75 10359 190
12989 13230 1184
6637 9842 190
10753 3106 423
3796 3797 295
8772 2107 176
2187 9084 190
7847 7847 149
12975 12975 671
12945 2764 275
11479 6022 1003
1051 4603 964
3155 9043 697
12622 13206 1140
13968 13968 101
4247 7501 637
6057 9454 444
4564 2389 95
6117 10555 456
5913 5682 423
176 4219 190
11313 11067 396
7969 7273 93
6574 4667 423
9335 6879 444
9205 3585 444
6513 8107 190
1410 11632 190
7670 7670 149
10464 10464 671
5276 8745 470
2769 1338 360
13917 13772 691
11966 11966 101
14196 14193 839
5430 627 444
2743 3516 66
1906 7953 176
12948 12948 101
3937 14432 176
12585 4459 242
2098 5619 423
2098 9400 444
5499 9395 712
8476 2506 439
10146 8656 439
8223 4527 637
6540 6540 101
942 4756 637
6193 11828 292
11351 11351 305
3293 2598 396
3450 7284 923
10481 13858 242
1481 11639 292
3746 11570 637
13409 3839 176
9629 9629 149
2543 2543 305
2188 2730 444
13231 13055 446
1674 12312 806
12372 2499 446
10276 4608 659
10548 10907 190
10731 10731 101
13722 11635 439
13829 2523 190
9740 9450 423
12207 12207 671
11830 11111 295
8223 553 637
5498 10451 712
10444 12967 470
5257 5257 305
3700 887 190
328 4280 112
6656 6656 149
489 489 149
9360 10444 966
14035 5483 176
7074 254 190
12660 12660 149
25 12536 95
239 239 439
9886 111 176
4881 4881 101
6443 11570 698
4598 2999 698
195 195 149
6237 11448 691
2114 10613 190
5065 4349 637
3390 9003 966
11889 8216 439
4854 4854 305
3482 3296 176
12622 13013 1288
11160 1160 95
2580 14608 981
5143 5143 305
5770 5770 609
6970 6970 101
9604 9604 305
11947 4483 864
12903 12903 609
8055 8055 149
3476 13647 95
7957 3296 176
1674 10757 806
12946 12978 176
10790 10790 101
11602 1325 423
8326 3262 176
3674 3294 93
1565 6981 856
5068 13242 66
175 1502 536
5877 10012 226
2300 7207 242
1650 12715 242
1624 10385 190
3117 3117 305
14089 11288 95
4207 9576 697
12434 12434 149
9476 3021 275
11570 6443 966
7818 1009 637
5531 2370 275
681 2089 1239
7916 8332 93
8122 8122 305
8318 8318 149
13374 13374 101
49 5674 295
4925 4926 242
13198 2353 242
657 10242 1291
657 11299 864
5530 5529 93
8009 6785 292
4437 1779 226
12275 2603 175
13664 3241 176
2254 2085 95
10594 4346 603
12127 12127 609
4707 2287 275
6186 6186 149
8152 521 637
6135 6135 439
11929 11928 446
10327 3414 637
9877 9878 446
6121 6121 149
8458 9326 444
1484 12355 782
7899 8515 275
9126 4165 176
3420 11849 456
3869 6418 93
8504 1416 95
11336 11336 149
8676 6192 444
11086 13108 618
9611 9611 149
4827 709 667
5280 5280 149
13532 12967 470
13588 3116 175
4967 3878 1281
4687 4687 149
10805 13286 242
2469 914 190
14645 6885 95
12665 4463 446
883 12739 439
9879 1732 175
1930 12949 964
7722 11636 275
10002 5251 744
9653 9653 149
2570 4817 855
10192 9896 275
3772 3356 444
3675 7100 618
3105 3105 700
1300 7246 242
8868 6709 470
6000 11167 190
10065 5835 444
12369 534 274
11914 1372 190
11088 1034 926
9762 5558 446
8998 7447 444
2630 3459 637
13309 13309 101
9019 1818 952
3826 5906 444
5231 12768 444
8656 144 446
678 4965 1192
11842 13302 439
983 8334 242
4468 6647 360
7728 5574 275
5090 3216 360
7997 6096 292
5674 49 446
8953 10709 609
7354 1237 456
2848 2848 149
9000 12520 439
10395 8661 226
4868 969 444
111 4226 295
10906 5645 242
2082 11406 95
12300 8101 444
11510 11853 190
13592 13592 101
8664 8664 149
3920 635 906
7094 800 175
6193 11208 444
9540 971 444
8670 13057 295
4923 4923 101


================================================
FILE: ConvKB_pytorch/benchmarks/FB15K/1-n.txt
================================================
5259
147 11738 592
3572 8632 143
12761 4179 88
3244 201 129
3244 11230 129
9085 11970 1
1963 6035 203
833 10624 263
4654 6310 40
4154 1917 114
586 7752 266
736 8733 155
920 5647 114
1180 6107 438
811 12322 850
5997 3181 114
1477 4206 203
1621 12084 129
1290 6739 231
36 7006 18
36 4767 18
36 4204 18
36 11194 18
36 8551 18
36 6579 18
36 3213 18
12381 7633 114
695 2098 1
6524 7766 203
5572 1813 40
7471 12559 129
6213 2125 391
2156 4281 314
798 9960 663
10076 2020 148
4537 6159 98
7082 943 660
403 3121 91
403 6206 91
403 10168 91
403 8289 91
403 6946 91
403 518 91
403 11831 91
403 8038 91
403 2839 91
403 7409 91
403 1115 91
403 12509 91
403 8995 91
403 2282 91
938 5114 203
13075 2784 129
2187 11377 114
214 10033 756
859 9699 113
859 11995 113
859 10540 113
715 4844 297
12886 4956 155
101 9105 91
101 10167 91
101 14270 91
101 13470 91
101 3470 91
101 2773 91
101 2570 91
101 14529 91
101 9077 91
101 14457 91
101 7362 91
101 8486 91
101 10738 91
101 5900 91
101 7719 91
101 4978 91
101 12778 91
101 6459 91
2706 10419 88
7335 4254 106
12717 4723 934
103 6741 106
1054 4098 208
1054 3726 208
6084 4945 203
1521 7228 98
3504 3528 148
2209 1337 324
375 6220 113
375 8512 113
375 9498 113
1278 14905 342
6279 3456 137
6344 9317 98
12397 8256 1155
3381 12075 102
5506 9286 253
8067 9999 268
13998 12610 1074
8704 1242 1
7161 3008 560
11653 9265 114
4086 2127 203
3839 10322 114
2241 4882 199
2241 7176 199
2241 12176 199
825 9955 1
1444 13059 821
7533 9788 340
2849 12767 137
635 11448 102
344 20 106
766 767 167
1104 7137 494
1818 3012 301
2068 8741 40
1905 6214 167
5900 5081 40
8341 1652 329
9280 187 252
9280 9804 252
9280 13173 252
11206 7343 518
11150 4154 786
6102 9437 1
8264 12929 633
2293 3963 560
7806 3247 129
7806 9499 129
2076 11401 129
3939 2210 114
412 7015 155
3905 168 179
1230 5464 208
1230 8542 208
1230 6339 208
7791 4205 203
4513 11750 1
10277 6931 1
5693 5573 114
10969 13628 256
10969 166 256
10969 8061 256
13700 315 114
3672 6109 221
2379 11539 199
657 6332 114
58 10653 170
3890 13543 199
3890 10320 199
2591 5790 1
10778 7093 129
5331 6681 40
552 378 277
2074 4388 578
6696 8836 98
10770 12982 102
3116 6619 297
5442 1135 1
4512 843 143
2280 7655 705
706 13520 102
706 6444 170
3487 7688 114
7108 1670 559
147 147 771
1008 8272 417
8560 6574 541
1794 7809 315
11250 5976 329
3191 9724 252
2879 3538 253
5064 9496 505
297 11950 1
3400 7486 137
833 11277 263
8414 10194 541
13012 13202 559
1953 1094 559
3623 9791 114
1016 286 1
12743 2829 129
4467 2762 155
4467 6109 155
8911 1782 167
888 4223 253
1846 2217 288
1621 1963 129
1621 5847 129
3108 1534 996
7436 2174 148
36 4003 350
36 1169 350
36 12248 350
36 13700 350
36 677 18
36 7239 18
36 6983 18
36 1075 18
36 9954 18
36 6262 18
36 6057 18
36 85 18
36 2430 18
36 519 18
5305 6826 129
1500 7055 167
3288 4238 96
2845 12624 199
7471 5343 129
1577 5981 167
913 619 764
8246 14726 527
874 6530 114
1474 456 129
1037 5825 203
9551 10602 114
374 4812 155
10076 10077 167
7676 1954 481
3133 6085 88
8556 9363 692
7197 15 40
403 8456 91
403 7026 91
403 7236 91
403 9852 91
403 8237 91
403 3745 91
403 35 91
403 9759 91
403 1356 91
1300 7246 297
8874 3437 155
7661 7745 203
4954 11421 40
4645 659 268
859 9613 113
8582 10226 129
8884 6683 98
7673 5369 155
101 3975 91
101 9945 91
101 11605 91
101 7463 91
101 3926 91
101 2222 91
101 9198 91
101 11488 91
101 12531 91
101 12448 91
8435 10060 40
2706 1880 88
4547 1594 735
11173 8308 106
4652 3115 129
10528 1693 266
9559 1606 314
3141 348 441
13268 11332 377
3539 8753 148
4166 3647 199
4166 13887 199
375 1755 113
1278 3234 342
1278 1294 342
1278 8217 342
1278 654 342
1278 13959 342
1278 2876 342
1278 5103 342
569 9886 185
7822 459 612
1915 6542 40
2210 649 221
10828 5449 129
13933 6363 179
654 9657 158
6993 10431 102
5954 14118 114
3839 308 114
3021 5609 1
3948 8863 98
4346 11408 114
2559 1402 1
731 13580 155
1464 5900 137
342 623 268
7747 8607 520
1277 12231 297
635 12625 102
635 5748 102
344 12432 106
6026 9553 114
753 2467 40
1104 411 248
4854 11799 374
1818 2850 162
1818 9852 407
2731 7394 18
2731 7306 18
1747 7438 902
3388 6201 1
7892 7760 114
1534 2537 256
1534 4088 256
7065 4262 277
1080 5679 1
14313 318 114
8749 6447 179
922 2529 315
5105 3659 221
10773 9022 167
4017 7502 660
9900 2332 40
10131 4981 106
12154 3828 388
2407 9648 114
7806 4967 129
8957 4179 129
1574 6724 114
89 4267 40
5285 7144 155
13567 8674 441
11926 4073 129
657 9856 114
58 75 102
58 13038 170
58 12091 170
58 1129 850
58 2972 894
1659 1658 481
3890 5516 199
1732 9874 297
1903 3585 88
6022 5824 289
4946 9645 40
4946 10015 374
3194 3805 297
4503 7355 137
75 8618 114
9308 7616 98
2044 3962 297
6931 754 203
4280 6489 106
3191 351 252
12754 5549 114
1836 8126 438
1836 10679 438
833 1211 263
954 592 268
385 7049 129
11106 11990 114
504 8833 297
10935 9275 277
920 8452 114
13408 4624 40
3004 3283 626
4852 1065 106
2987 1260 40
1290 12827 231
36 11772 350
36 2094 350
36 9008 350
36 2547 350
36 1665 18
36 9437 18
36 6661 18
36 5366 18
36 3870 18
36 9220 18
36 13665 18
36 589 18
36 418 18
36 884 18
36 4948 18
36 3691 18
36 5585 18
36 5860 18
6530 9435 167
7471 10994 129
2156 2781 314
4585 3504 129
10076 3627 167
4537 3158 98
5418 3345 155
4218 4217 466
403 6389 91
403 177 91
403 9657 91
403 10782 91
403 14415 91
403 8358 91
403 4083 91
403 1083 91
403 11694 91
10371 6392 33
12134 8402 155
229 1218 114
4961 10621 489
4645 6098 203
13495 1707 462
10032 3988 401
5278 1122 221
11663 9431 221
301 14159 162
301 12686 286
301 125 825
10491 286 462
101 7858 91
101 6481 91
101 224 91
101 1511 91
101 13407 91
101 14237 91
101 6981 91
101 4129 91
101 1303 91
101 11428 91
101 10270 91
101 7799 91
101 4860 91
101 12240 91
101 13928 91
101 12373 91
10276 8650 40
2706 6248 88
4824 3115 286
7022 11348 167
10069 5070 129
7027 4486 106
103 3051 106
1054 11264 208
11370 4642 129
3845 7504 434
3845 1473 434
9921 9921 82
1282 6484 155
2882 1559 234
4166 11394 199
375 6548 113
1278 4965 342
1278 4668 342
1278 5951 342
1278 472 342
569 12908 185
5841 8387 155
443 12340 894
10837 4581 98
1359 10667 98
7161 3551 560
9008 6840 114
2241 2953 199
2241 10653 199
2241 4168 199
2241 13347 199
2241 12932 199
3021 8846 1
3449 8715 155
3948 8448 203
5441 6855 40
4155 11982 716
9198 876 114
1524 5880 185
1524 12102 254
13790 5914 114
14119 10444 340
397 728 114
7747 226 520
5600 1897 518
344 3306 106
12944 3813 1001
8567 6151 1
12017 3876 266
1104 11161 941
1104 2583 248
1818 6816 286
1818 7185 949
2731 5757 18
1534 4139 256
2631 6434 40
4929 8158 155
4860 9984 114
11108 847 954
2427 5963 40
9647 9511 114
12661 4561 82
7490 6677 40
14107 2678 106
7142 6494 900
11197 571 114
4778 13186 297
2076 1326 129
1230 113 208
7664 11602 1
8776 7287 167
13181 3474 40
10969 3511 256
1509 4067 40
1498 9155 252
1498 1576 252
514 928 155
514 2917 155
17 11226 665
278 14072 170
13534 14640 185
2359 2528 253
10890 6934 40
2379 12871 199
2379 193 199
2379 13255 199
58 2672 792
58 4414 297
58 14580 790
58 1587 894
3890 2819 199
11880 2532 373
1702 5907 253
8926 9384 129
3242 4651 114
5447 2545 148
3116 12747 102
3557 4044 288
9533 4967 324
4964 1998 40
1221 11940 971
4503 4658 137
4503 9413 137
11281 11577 98
6182 7666 518
3066 7567 1
14768 11722 114
13230 6396 1063
13053 1455 656
11988 2475 40
3216 7155 167
6697 5867 203
1836 6285 438
1792 11611 541
833 1117 263
833 6617 263
5270 10732 88
4154 6621 114
4154 1841 114
385 6664 129
385 4500 129
7320 7967 1
3895 9805 88
3701 3107 340
2998 3327 340
5597 13122 374
888 1272 253
11784 9175 155
36 11578 350
36 4398 350
36 8180 18
36 9593 18
36 1111 18
36 10739 18
36 6503 18
36 5307 18
36 2894 18
36 3163 18
36 5486 18
36 5494 18
36 12417 18
36 6306 18
36 5438 18
9601 12535 82
2028 4141 114
2956 12726 581
2773 633 361
3288 10048 96
12502 5869 520
7592 805 167
1474 7686 129
4585 12817 717
2670 2027 289
1339 8661 148
3262 3592 114
2018 156 114
9018 1361 40
1914 1959 137
403 3099 91
403 13664 91
403 3630 91
403 5720 91
403 9379 91
403 12455 91
403 2335 91
403 14166 91
403 7598 91
10488 7056 114
229 5978 114
4039 6528 234
12512 6111 113
4645 2208 268
2144 9950 560
991 1558 40
859 3668 113
859 6690 113
859 436 113
6911 11517 520
101 10389 91
101 4882 91
101 6703 91
101 3117 91
101 10750 91
101 11077 91
101 8753 91
101 6114 91
101 14844 91
101 9296 91
101 14445 91
101 13482 91
51 10123 129
4841 2165 148
3423 3772 129
4652 6551 129
7419 10923 266
12148 7055 98
5135 8597 289
4136 11161 40
377 9903 114
9 8938 665
185 1791 732
6280 81 40
5032 1503 289
1278 6857 342
569 1421 185
569 8933 185
569 3877 254
569 1207 842
569 6746 578
569 8423 578
6344 6343 98
3381 13115 102
6789 12563 203
544 13726 842
9545 11025 129
9545 3298 129
1952 10141 1
8045 7444 106
8045 7088 106
4875 8969 98
3396 6276 155
1524 2592 254
1128 9919 98
11917 2143 88
635 9735 102
1820 5332 941
12944 3813 496
5847 6281 520
1104 2642 494
12423 10832 391
1818 13774 162
7186 7777 155
10738 10024 1
10738 7309 1
2731 2166 18
540 8681 114
8839 782 221
5706 344 921
334 815 732
13586 12449 114
8076 3351 167
4980 13309 82
7971 9968 114
1236 3217 221
10406 2136 114
6571 6647 1
1381 1966 102
4227 14678 199
1611 8232 424
4312 11359 329
4026 4622 465
11407 11374 315
10969 8034 256
11381 8908 129
819 3866 179
4656 2808 315
13639 3113 297
1956 4582 520
1956 11785 520
2196 11995 252
657 10310 114
657 7719 114
657 3317 114
58 5701 114
3890 8133 199
8926 5588 129
6696 4283 167
1732 11918 299
8398 4712 167
6433 9012 40
10642 6501 106
4946 1184 40
4946 9898 374
4473 930 1
13987 2117 463
4503 11244 137
4503 12490 137
4503 3155 137
4503 6770 137
75 8908 114
3164 3575 98
706 12980 170
7250 13781 629
3572 11363 143
3066 6350 1
4280 1705 106
3244 3230 129
4360 4355 315
5064 4300 505
13033 8547 1
1836 6347 438
3400 12741 137
833 97 263
833 3327 318
8951 9533 167
5360 5371 114
11871 11619 114
1113 3007 888
9629 11070 40
3550 10077 167
1111 1110 98
239 5670 114
1858 904 148
7436 2174 98
36 1587 350
36 9041 350
36 5503 350
36 11881 18
36 4350 18
36 14052 18
36 10519 18
36 7692 18
36 5800 18
36 5418 18
36 5686 18
36 10732 18
36 5596 18
36 8490 18
36 3208 18
36 11082 18
36 668 18
36 315 18
2940 4556 167
2028 9669 114
10280 5361 315
3288 7276 96
9055 9054 40
2845 7561 199
1474 9668 129
1474 6292 129
1474 1905 129
10709 12184 129
10865 10123 40
2211 845 98
7082 9772 660
8556 8458 692
6261 9869 518
403 3956 91
403 2013 91
403 7355 91
403 5515 91
403 11686 91
403 6343 91
403 4300 91
403 6244 91
403 9010 91
403 5311 91
403 416 91
4587 10644 40
4961 7055 106
1239 7804 329
991 4419 40
5281 5603 143
3841 2663 148
859 1316 113
859 2041 113
859 2117 113
859 3601 402
715 2254 297
1076 4358 324
2030 130 203
2820 563 155
6810 1584 373
301 10562 407
101 8029 91
101 1432 91
101 1440 91
101 12965 91
101 9129 91
101 2405 91
101 9265 91
101 9030 91
101 1225 91
101 7809 91
101 8786 91
101 8473 91
101 13690 91
101 540 91
101 11100 91
101 3702 91
101 13710 91
101 8263 91
101 5091 91
101 9423 91
6190 647 253
2706 3703 88
2706 11931 88
4824 9690 96
4824 462 286
6365 2611 692
1585 5388 391
3423 4361 129
5071 11042 512
7419 6487 376
9559 6617 88
1054 2386 208
3141 4020 441
9622 9623 82
9622 583 82
7617 5222 155
2637 7438 487
11370 11005 129
8465 2369 114
5982 12614 82
9069 6306 277
377 9774 114
9 9445 500
9 2996 500
11623 8277 98
4166 4634 199
375 13159 113
14075 3618 315
13846 9025 340
569 12961 185
3828 7302 814
1915 8812 40
11888 6082 732
6032 648 1
1098 1097 98
4574 6053 622
264 14113 88
12126 12343 40
7151 2754 252
2241 10775 199
7577 5824 1
7533 8024 340
732 5489 315
1820 6032 944
344 6218 106
3513 3039 167
3513 7388 167
753 1318 40
1818 5403 494
2731 11879 18
2731 6586 18
12303 4751 40
10213 4292 179
3485 1097 203
9402 2854 155
3255 3058 221
8514 11182 775
12124 10436 221
10748 7221 40
4879 7340 155
8264 14203 633
2689 9374 810
9184 1384 106
5498 2613 773
1230 11792 208
4528 4527 155
4422 6530 40
5786 2577 113
1498 12183 252
5375 5207 944
17 8356 665
17 6270 500
7952 1507 377
2196 2092 252
2379 13475 199
657 8193 114
6567 5297 203
3890 10798 199
3890 12140 199
500 499 253
2074 9510 578
4990 10362 465
4990 1890 465
6027 339 106
12238 2455 148
3488 291 413
3546 12797 1
6490 10458 114
4067 2893 167
12152 9171 1
6535 6281 98
135 1992 277
3572 1231 143
4280 9754 106
10003 2626 203
7800 13569 40
6881 4085 329
4417 8372 155
983 12381 894
5571 6301 40
3216 3165 148
3400 10943 137
833 11724 318
9783 1998 129
5246 2969 203
586 11559 266
5220 9492 203
5007 5827 623
5007 10649 623
247 11060 297
13035 1732 199
504 2308 297
504 13723 102
3701 268 340
7160 2867 33
7805 2886 208
9135 2595 155
1846 3358 288
6278 2015 1
13089 10023 488
917 12491 1
36 7452 350
36 3342 350
36 213 18
36 7198 18
36 6677 18
36 9272 18
36 5487 18
36 910 18
36 7665 18
36 11268 18
36 8406 18
36 5192 18
36 11361 18
36 5210 18
36 4070 18
36 9130 18
36 3732 18
2370 910 1
673 13303 1
1474 10068 129
9938 1147 1
11597 11087 129
4585 13483 129
3094 847 329
5409 7271 88
2018 8796 114
7532 13942 1
403 2571 91
403 7016 91
403 9914 91
403 5144 91
403 12408 91
403 243 91
403 12312 91
403 4603 91
403 11031 91
403 10406 91
403 4723 91
403 9835 91
403 4406 91
403 10194 91
403 8295 91
403 13232 91
399 1007 167
1847 2758 40
4954 6561 40
2187 8840 114
5472 200 33
4986 6112 934
7071 2703 421
2300 7207 82
7603 11022 148
859 10192 693
859 5606 113
715 2216 297
7159 10644 40
10445 7838 40
101 7624 91
101 12267 91
101 7613 91
101 10431 91
101 2357 91
101 5568 91
101 4544 91
101 14002 91
101 10494 91
101 8104 91
101 13366 91
101 12824 91
101 2877 91
101 13810 91
101 4935 91
5295 9316 40
5295 456 40
4407 1203 129
12856 12503 155
4824 8376 286
131 1884 148
131 6575 167
4652 3172 129
1054 7649 208
2637 4665 487
258 257 664
53 4443 438
9 9445 665
9 8423 500
12209 14809 851
4166 12485 199
2416 8524 40
3256 1549 33
375 10530 693
375 10865 113
375 11844 113
375 3117 113
375 6760 113
375 13913 113
1278 2812 342
6279 11726 137
13960 2711 846
569 12029 185
569 8292 185
569 13912 185
6502 649 98
3381 14291 102
7546 3937 466
2241 58 199
5113 7597 518
3021 3888 1
931 7983 505
1043 1061 297
11939 2833 324
5951 13014 299
6313 6775 819
3329 4582 155
8489 7832 98
3513 6538 167
9895 3666 253
1274 8028 402
1818 1594 494
10027 12595 475
2731 4944 18
2731 4460 18
18 2382 167
11128 6077 289
3485 5974 203
10139 1734 750
1508 3996 549
11694 6733 1
9258 2854 155
11762 945 106
1994 2883 1
7442 4439 114
2425 4672 98
11423 9219 1
1236 12927 221
1236 8224 221
7583 6026 199
2689 6931 649
4485 10395 518
11586 3039 106
89 374 40
2066 5482 304
9156 4938 161
9447 9208 155
11053 6104 985
10969 3513 256
1509 7627 40
11729 10202 161
12679 6071 413
7227 3293 462
2146 12711 102
10006 8625 520
13534 13712 185
2379 13231 199
2379 12271 199
6951 9450 277
7602 12418 129
657 13175 114
657 6095 114
657 7515 114
58 2588 277
58 1300 299
58 7308 541
5852 791 629
4532 4981 155
4990 2650 562
4463 11670 82
3880 1387 98
5163 14030 297
8995 2758 277
1059 8828 481
8317 7124 148
4503 12051 137
4503 6907 137
1767 5068 238
7250 9479 277
4419 452 155
3244 4049 129
11396 3144 797
3191 4964 252
3698 6753 114
833 7236 318
4154 12557 114
2372 8040 40
7856 4244 314
7318 736 315
3895 11881 88
11177 6854 88
12290 9875 340
2095 4162 309
5637 10393 114
1477 3127 148
1858 1152 167
6774 6677 40
769 2862 167
36 2085 350
36 13224 350
36 2815 18
36 7661 18
36 11383 18
36 9478 18
36 11002 18
36 6432 18
36 5773 18
36 8249 18
36 9016 18
36 439 18
36 2062 18
36 10317 18
36 6884 18
36 1996 18
638 4242 756
3224 12832 374
2185 11673 40
9414 4895 129
2845 11883 199
6443 4101 1
292 5184 148
1474 8890 129
1474 6847 129
1474 12829 129
2156 1396 314
11185 4262 1
4265 10527 221
3262 10170 114
5292 1870 155
2018 8659 114
2018 8480 114
2018 2981 114
7594 2754 1
403 5998 91
403 3926 91
403 1551 91
403 5082 91
9231 7345 297
13953 14534 266
223 931 289
3416 8344 203
5128 5279 40
9733 6445 40
11469 6931 315
2810 4206 253
859 721 113
715 2586 297
5779 6757 268
101 12130 91
101 3662 91
101 12910 91
101 8037 91
101 12912 91
101 6452 91
101 5527 91
101 3537 91
101 55 91
101 4611 91
101 6430 91
101 3185 91
101 2386 91
101 7486 91
101 13441 91
101 7557 91
6681 3236 148
2706 5853 88
4824 4300 286
4824 13301 286
4547 456 735
8049 1103 114
7027 5941 106
1054 6337 208
7357 10832 391
6084 93 203
11314 5040 402
5361 5694 155
3845 9879 434
12094 2544 1124
3232 9208 391
1130 5913 443
3539 8693 98
3539 3538 98
4166 1044 199
375 9051 113
375 4303 113
375 13455 113
375 12545 113
375 9148 113
6130 537 1
1278 12363 342
13143 4150 88
569 11 185
569 7827 254
6502 6501 203
1033 3735 129
4574 12634 622
26 4138 167
13043 13016 870
8588 5910 518
7151 7166 252
2241 13932 199
11358 14436 248
2358 621 629
8764 2243 518
10425 5259 221
5439 6428 155
344 6212 625
13387 12037 1134
1818 8452 286
1818 7053 301
1818 4619 301
3208 10643 155
2731 12466 18
2731 2608 18
2731 8501 18
14080 5278 40
7065 6045 696
1508 13743 549
2805 3878 361
12279 1958 148
9647 12123 114
4178 8761 40
1994 10797 1
5778 2745 114
11839 2588 1075
10131 2109 106
10561 6207 277
6882 1323 277
4066 5771 203
5098 10282 129
6371 4963 33
998 12958 40
9542 12329 927
7113 7112 762
2076 1822 129
412 7501 155
3319 13182 114
2384 231 612
10615 1507 98
9156 9059 40
6876 6946 251
11572 9089 114
8903 7343 148
7873 1572 1
5375 2496 494
3231 7524 40
2196 3994 252
10685 12916 40
11926 3434 129
657 6692 114
58 143 792
58 10047 170
58 12773 170
58 10906 170
58 6512 170
58 9013 170
58 13742 170
58 7466 894
3890 11889 199
3890 14096 199
6395 7872 148
5892 4181 40
5840 5839 1
191 5603 635
4946 3380 40
11281 10572 203
706 4971 581
7566 810 489
13328 862 167
3244 11173 129
3244 1310 129
3244 10927 129
10481 12985 297
3191 11844 252
7074 2774 114
5571 7033 40
1288 7921 106
3400 9577 137
833 7265 263
4154 2268 114
4154 3226 114
7856 9081 314
854 274 221
8037 846 40
11177 12666 88
381 3546 340
7472 13594 581
239 9120 114
4126 9286 253
769 5774 203
36 13628 18
36 12611 18
36 6165 18
36 6808 18
36 10121 18
36 5499 18
36 8190 18
36 1851 18
10021 3585 129
2773 5517 361
3224 12161 374
6530 1195 167
6187 6012 106
2503 945 148
1474 5412 129
1474 10574 129
1474 9283 129
9337 698 167
9316 10546 167
1598 2392 732
8756 1829 253
7594 5963 1
403 13520 91
403 511 91
403 7956 91
403 680 91
403 6112 91
403 10954 91
403 2745 91
9807 14194 88
6107 7457 221
991 7947 40
859 12573 113
715 8639 297
6957 7375 88
301 12808 162
101 4729 91
101 13958 91
101 9910 91
101 4551 91
101 4010 91
101 1310 91
101 8266 91
101 8021 91
101 3048 91
101 4713 91
101 2037 91
101 10080 91
101 10432 91
101 8748 91
101 7426 91
101 4064 91
101 13255 91
4407 4415 129
7839 7567 114
2706 12967 88
2706 4891 88
51 1613 129
2212 884 1
4652 14297 129
103 793 106
5341 5178 148
4771 2638 155
3441 10386 203
2882 890 234
1612 7026 88
8838 2042 663
4166 4159 199
375 10045 113
1278 4093 342
8443 11785 98
5772 4415 1
6279 1209 137
7576 3833 40
545 504 143
6032 11590 1
2766 12638 527
11948 3377 155
26 6309 167
9893 2716 179
9545 8905 129
8395 4739 203
7934 6214 167
332 6059 113
2849 2848 137
2559 8876 1
9198 5480 114
11786 1913 114
6313 6608 819
1524 3059 254
1464 12136 137
13863 9784 1032
7747 7597 520
635 14366 527
635 14732 102
8917 9175 148
1820 9374 494
1820 12301 944
8719 5658 518
7210 11319 40
1104 2484 248
1818 5980 649
1818 8434 301
2731 1246 18
2731 712 18
2731 8317 18
10139 6552 750
7300 3856 520
2434 2630 167
9280 8601 252
2328 2246 102
7299 3333 88
6296 10048 203
486 14635 248
486 5411 248
3702 10317 329
2108 6552 311
8264 10725 633
7901 8155 40
8692 3217 148
4873 2879 40
1574 6763 114
1230 2023 208
1230 3459 208
4528 4250 155
1509 10886 40
2442 10150 40
39 10787 155
17 4298 238
2379 1412 199
9357 4769 1
657 10994 114
6851 2218 167
58 2927 102
58 9910 170
58 6360 170
58 4827 850
5892 10857 40
6779 468 106
14460 6396 256
4946 4655 40
4503 13845 137
3143 5517 40
75 4825 114
2280 6748 743
706 3241 170
6202 9800 374
1337 11058 203
2399 973 167
8364 6509 40
833 4763 263
10807 7243 1
7643 2851 106
7728 9166 148
5007 12476 623
13814 8860 98
1207 11946 266
3550 1472 167
1111 6098 268
10342 11322 114
3503 9340 465
6044 6530 40
1336 11442 98
4899 12213 40
1621 5686 129
36 9601 350
36 14324 350
36 12729 350
36 6739 350
36 1533 18
36 13284 18
36 6077 18
36 3153 18
36 9903 18
36 3052 18
36 3673 18
36 6174 18
36 3789 18
36 6994 18
5305 2901 129
12911 12419 1
6875 2699 185
5572 2769 40
8830 90 40
325 324 377
12502 2435 520
5685 5590 155
132 6893 221
6332 4981 167
10709 4485 129
13167 14325 627
11356 9378 347
2018 6429 114
5483 4171 114
403 4197 91
403 4645 91
403 6118 91
403 1127 91
403 9735 91
403 8164 91
4677 10980 82
229 3434 933
4986 2321 277
991 9633 277
4783 1357 203
301 10142 825
101 3532 1013
101 12051 91
101 14209 91
101 6320 91
101 10955 91
101 11391 91
101 5454 91
2706 5804 88
13701 11021 1
8510 6389 252
2010 12807 297
4224 1486 40
4224 6863 40
265 4349 203
1130 9252 443
14520 14924 851
3107 10062 1
8673 3144 98
807 8127 203
375 12349 113
375 7308 402
6279 8435 137
569 12681 185
569 4382 578
1683 2881 438
5616 6748 526
3424 11387 289
613 5392 560
13995 13356 361
4086 6815 148
2241 3883 199
2241 8902 199
2241 13305 199
1260 7343 520
332 5950 113
332 4740 113
332 9076 113
3883 3883 82
11743 5076 114
6603 12942 88
4295 8162 401
340 12972 82
635 4114 458
635 11323 102
635 13890 102
1820 11438 494
1820 9060 494
344 6045 625
766 8469 167
1415 11457 243
8226 6593 722
1818 4122 286
2731 6084 18
1534 11840 256
1905 8940 167
3009 6784 33
6965 2761 106
4874 908 40
9280 8211 252
4659 10720 329
6432 11924 155
4017 1775 660
4093 6326 289
5977 9685 40
4227 1303 199
576 3202 167
4161 7622 167
2076 7663 129
4873 1664 40
412 6225 155
1230 1229 208
1230 3228 208
8445 8416 253
3672 3468 98
3672 6109 98
17 3196 238
17 2743 238
1698 6554 1
8799 11803 199
10132 4411 148
6951 9450 361
559 9412 113
657 3093 114
657 8443 114
58 12932 170
58 3891 170
58 8615 170
58 6306 541
3890 1465 199
13566 3678 106
4990 1395 465
328 7905 161
1410 10034 102
3546 298 1
8024 9493 1
1622 4757 268
4512 278 143
4503 13800 137
4503 7044 137
3356 2806 98
3572 9277 143
9352 13950 297
3244 5639 129
2201 3371 248
3191 5571 252
1836 12967 438
3400 7847 137
3400 13032 137
833 5740 263
12064 13318 82
3182 7291 161
385 3000 129
5276 8745 329
1111 659 203
12523 4240 1317
2998 1643 481
5637 2769 114
1846 5381 288
1477 10374 203
6103 3867 203
36 14633 350
36 14184 350
36 5984 18
36 4985 18
36 11322 18
36 6093 18
36 11070 18
36 7896 18
36 6153 18
36 3844 18
36 5531 18
36 3687 18
36 9612 18
36 6561 18
36 5876 18
36 1380 18
36 7314 18
36 3330 18
36 1841 18
36 2340 18
36 2938 18
5305 3402 129
2773 7383 361
2960 6520 315
5904 6856 33
2185 2184 40
6818 1331 179
2148 9561 340
8886 10544 324
1474 894 129
1474 13462 129
1474 9478 129
1474 6296 129
1037 5077 203
7128 10255 581
4585 4133 129
2211 6362 167
8556 3245 692
6261 3679 518
403 4158 91
403 5865 91
403 4817 91
403 8457 91
403 12003 91
403 1771 91
403 7076 91
403 1043 91
403 2631 91
403 2103 91
5373 8945 167
991 1458 40
991 9103 40
12623 4983 297
10032 4403 401
2820 1350 155
301 12156 286
8895 782 148
1067 11492 167
101 12459 91
101 2538 91
101 7646 91
101 426 91
101 14859 91
101 5371 91
101 5587 91
101 3178 91
101 10140 91
101 4176 91
101 310 91
101 5528 91
101 8880 91
101 4156 91
101 4281 91
101 610 91
14101 9849 1127
4824 8406 286
9265 8980 155
10050 853 1
10069 8482 129
4652 13266 129
14248 4057 129
1054 9901 208
2637 5148 487
12084 7353 33
9 1830 665
1612 10107 88
48 12967 277
4768 6266 167
375 12921 113
375 9203 113
1278 1300 342
2797 7284 148
6279 2787 137
569 3985 185
569 12370 185
569 7707 254
4584 9166 377
7939 1238 1
7822 4177 612
5615 1844 221
8588 7656 518
7151 4281 252
4357 4479 462
10082 3734 221
1043 7389 299
12476 3513 161
11296 254 900
397 2267 114
7077 1402 417
10906 5645 82
4237 354 88
2479 10479 155
6618 5280 137
1415 1966 243
5424 7192 155
8260 7987 1
14438 14929 559
6830 4731 40
1818 6554 162
1818 2683 649
2731 4244 18
2731 3333 18
14080 9863 40
8189 973 148
5207 14738 619
12609 10869 40
1534 433 256
11072 12559 88
803 8554 102
10811 9658 167
9280 10823 252
7078 4208 203
4178 2377 40
5812 13482 297
7442 6934 114
4017 4543 660
5376 6006 277
1236 4250 221
8264 3846 633
11773 7996 40
9747 6033 82
7806 1351 129
6043 10637 434
1574 2752 114
1230 9803 208
89 6334 40
7286 3585 114
12656 9407 179
2640 4820 129
2640 11102 129
2640 774 129
1509 11808 40
1514 6986 520
819 421 179
657 7075 114
657 9258 114
657 5368 114
657 8389 114
58 13249 170
58 2453 170
58 9532 170
58 10789 170
11272 4158 315
2074 10450 578
5852 11913 161
5852 369 629
1732 12323 170
3116 11056 297
5840 1673 1
5404 3136 462
6433 7555 40
6433 1253 40
6769 6489 167
6769 7759 167
3557 12349 288
3557 7817 288
5729 8365 98
4503 13791 137
9983 2218 520
2412 1783 114
789 5834 487
6389 11289 277
7586 8101 40
5571 6696 40
3400 376 137
4154 13188 114
5360 11021 114
62 6084 40
9855 7437 40
920 9852 114
10168 1567 179
2095 1017 309
303 8879 607
1477 10318 98
6103 3366 167
13917 9007 622
13917 6574 622
6774 5168 40
769 773 167
12965 3248 40
36 13810 350
36 3999 18
36 10783 18
36 12592 18
36 399 18
36 4772 18
36 4353 18
36 5300 18
36 3329 18
36 3317 18
36 2606 18
10021 8956 129
8019 210 1
650 11498 268
2661 3666 167
2845 4291 199
1474 10677 129
1037 5077 167
14749 2695 351
3094 5210 329
9338 1633 167
3262 12658 114
3262 594 114
2018 7306 114
7594 11706 1
403 3277 91
403 10219 91
403 6997 91
403 2787 91
403 11886 91
403 11788 91
859 3769 693
859 4240 113
859 2014 113
715 2107 297
715 4919 297
9548 13828 1
9910 9910 82
101 2009 91
101 7207 91
101 2883 91
101 5413 91
101 8563 91
101 2574 91
101 7492 91
101 11909 91
101 1467 91
101 9342 91
101 13066 91
101 14037 91
101 6438 91
101 3319 91
101 6173 91
101 13353 91
4407 4695 129
3072 6792 203
6889 7653 854
7478 7874 601
9559 6380 314
1054 2154 208
3610 5721 40
2882 8758 796
8838 4070 735
4166 4748 199
375 6045 402
11479 90 289
260 6006 40
569 10740 185
569 7752 185
569 4132 185
4584 1127 401
11948 2677 155
7139 13933 582
3332 3628 324
2241 1848 199
2241 4424 199
2241 6793 199
1043 12718 346
3948 8493 98
7629 1323 424
3474 5700 221
11743 10533 114
6603 8284 88
1524 4409 106
1464 2372 137
10424 8631 167
635 1641 102
344 12530 625
5424 4963 155
1818 6386 96
1818 11821 286
1818 8180 301
1818 5438 301
1818 2730 301
1818 13354 301
2731 5573 18
5207 7508 381
8959 10899 129
11072 14280 88
7065 354 696
7049 7143 203
1266 3071 253
8749 295 179
14146 13601 114
7442 6180 114
4017 5757 660
4017 4195 660
176 5583 297
1140 119 167
3190 8831 40
14107 6336 462
7816 1184 40
2553 3714 106
8306 3480 253
2285 11619 40
3974 7234 40
13423 8876 129
2076 337 129
89 358 40
10312 3498 114
8046 7371 417
7422 10759 98
5786 9794 113
9732 5706 203
7227 9504 462
4656 1319 315
5820 5475 520
5820 3462 520
13534 14665 185
7952 5647 401
2196 13173 252
414 11817 559
58 1586 792
58 7246 170
58 3655 894
3890 4270 199
3890 1270 199
3890 2818 199
5725 2189 221
10778 3172 129
5852 273 629
6940 7115 520
9214 6755 155
7219 1204 114
8024 10039 1
6433 5341 40
4964 6344 40
1221 11525 971
7670 6069 40
4503 8889 137
2352 3913 98
14687 6723 405
12252 13415 481
3572 295 143
5448 4099 148
789 11087 487
12538 8914 114
3244 3229 129
5064 9984 505
3400 9094 137
8927 7774 1164
3784 7573 243
1207 4974 376
3895 12406 88
1111 3598 203
13974 2269 203
8071 2680 40
14342 5308 114
3503 10349 465
4126 5665 253
3004 8933 266
5399 13059 957
6774 4321 40
2987 2375 40
36 8002 350
36 12549 350
36 2971 18
36 7472 18
36 8482 18
36 9554 18
36 774 18
36 12439 18
36 7677 18
36 4481 18
36 12907 18
7392 10527 520
7238 3008 40
11641 6193 129
1474 8687 129
1060 3924 340
4585 3498 129
10869 1120 98
2670 7833 525
5266 2606 381
3560 12613 340
10407 10544 40
798 2833 663
10865 8885 40
10017 10228 221
396 6144 98
1800 3601 40
403 2569 91
403 10156 91
403 7684 91
403 11044 91
1250 213 1
11718 6247 106
12487 7137 314
859 9010 113
715 314 297
1542 5951 441
301 11970 286
8895 5474 98
101 9301 91
101 10481 91
101 12284 91
101 12660 91
101 8152 91
101 13717 91
101 13291 91
101 4217 91
101 8644 91
101 9322 91
51 517 129
4547 6850 735
4547 9398 735
4547 4070 735
4652 11604 129
7419 10939 266
7027 11982 106
1054 942 208
3477 13326 102
6534 5917 484
3845 6912 434
4224 9012 40
53 8291 438
11752 6306 462
8838 9638 663
4166 5546 199
2797 2798 167
569 585 185
14357 10282 114
264 8592 106
2136 2135 155
11215 2443 347
4741 8846 114
10551 3734 268
2241 14026 199
2241 13414 199
2241 12414 199
3872 5834 1116
5606 11317 481
5614 9252 592
4295 6931 649
7437 9885 167
11202 4374 148
635 14748 527
344 8187 106
344 5526 625
1209 7576 374
1209 5497 374
5641 14515 297
9895 3537 253
2082 12938 297
2191 2027 438
8596 2407 1037
7895 10779 203
10263 10559 622
8936 9072 167
10015 3603 40
4929 1192 155
7049 1216 167
154 2514 764
1266 2116 253
1813 11696 167
7299 8254 88
11839 7629 1075
11186 13774 1
176 9947 297
176 13118 299
1140 3575 148
2172 4099 98
2407 9633 114
4227 12711 199
12525 12239 520
1230 7570 208
9726 9321 40
608 5771 148
13152 11779 340
646 647 203
14478 8231 179
559 13805 113
657 5017 114
657 4248 114
657 12159 114
1702 4534 98
6111 6808 40
4990 5957 562
5085 8842 106
8656 7009 114
3116 1537 102
8716 11099 297
12788 1913 256
11130 9164 167
4503 6002 137
706 11781 170
7589 6794 114
6202 10960 40
6202 217 40
60 1679 148
4280 3982 289
8766 9692 148
8806 11630 40
14428 1228 234
833 10996 263
833 2029 263
149 3584 520
2035 1337 114
3063 9701 381
385 3201 129
385 1236 129
1432 7789 114
2432 12635 297
11177 8001 88
381 11764 277
2998 11238 340
347 9252 462
2216 6885 297
7436 4800 203
36 12514 350
36 3299 18
36 4229 18
36 10282 18
36 727 18
36 3365 18
36 4010 18
36 7158 18
36 6015 18
36 5540 18
36 6526 18
36 3971 18
13554 3293 1
2028 10164 114
650 8253 268
3224 9553 40
1500 1943 167
7471 712 129
8830 123 40
12115 11704 405
673 9526 1
8438 7408 40
2357 4053 324
2503 2504 98
1474 9278 129
3560 6287 277
3262 2541 114
11356 10701 347
4537 6797 167
403 1741 91
403 2166 91
403 10853 91
403 8458 91
403 1989 91
403 10620 91
403 7138 91
403 4893 91
403 7875 91
403 9799 91
403 4089 91
223 12363 525
4645 1110 98
12982 7100 114
6237 9479 622
779 9870 560
3930 539 388
7381 7381 465
101 6780 91
101 2147 91
101 8123 91
101 11701 91
101 7493 91
101 9562 91
101 9696 91
101 5687 91
10276 9680 40
2706 7709 88
2706 10421 88
4824 3834 96
4824 5724 96
4824 4895 286
51 100 129
9265 3366 155
5341 3407 148
5135 5961 289
11078 8175 40
6593 787 559
3107 8118 1
8838 11367 663
4166 14483 199
1278 6187 342
4204 3321 347
11011 3056 253
5912 8728 129
3424 8272 289
5992 2342 391
1033 10609 129
1033 7849 129
443 2234 541
10269 10781 114
7506 8894 40
9545 11183 129
1725 6702 82
4086 8287 203
4086 5369 203
2667 3172 289
2241 11291 199
2241 12936 199
2241 9544 199
332 8449 113
2860 1441 560
13310 6064 137
2358 9101 629
941 4757 167
7678 5712 129
397 7681 114
9550 928 520
8719 7579 518
1104 10100 941
1818 5149 401
2731 2639 18
2731 1718 18
6747 1921 185
7065 10831 696
380 3087 88
5900 5443 40
9280 5399 252
2805 868 361
7889 10817 114
11762 343 106
12240 1128 114
8010 11061 40
7299 6107 88
6102 11810 1
176 12977 299
12101 7437 40
10543 756 314
4730 5296 98
5107 9457 329
4814 4096 329
7418 7122 114
5453 8878 179
3916 5804 462
2076 722 129
412 9360 155
4528 553 155
6867 10764 82
89 10220 40
2384 194 612
5344 8771 1
3945 2558 1
2506 6107 622
2379 9399 199
8202 4945 221
58 3829 792
58 7655 458
58 657 102
58 14003 170
58 4310 170
58 1355 170
58 11731 170
3890 6068 199
10 8938 238
2052 1256 203
5133 2571 167
5133 10048 167
5133 4351 167
1221 3900 830
4512 5312 143
3773 3772 40
1001 9188 732
4503 5811 137
4503 13594 137
8077 3040 1315
12187 14444 875
4280 9164 106
584 5449 40
687 2921 155
3100 11800 902
1836 12058 438
833 12911 318
4154 12329 114
3358 6900 40
385 3865 129
11106 7053 114
11294 5044 696
5276 606 329
1846 2674 288
7209 3522 155
6222 6221 155
6222 6501 155
36 4226 350
36 14300 350
36 699 18
36 2892 18
36 5430 18
36 6713 18
36 5602 18
36 3349 18
36 11215 18
36 7934 18
36 11838 18
36 2757 18
12841 987 40
6858 5932 40
2845 9590 199
1577 1576 167
12177 11279 129
9247 12455 88
7601 4619 40
6332 6258 167
5326 3737 161
1775 9463 251
5180 2346 619
403 2960 91
403 7620 91
403 2411 91
403 3619 91
403 6238 91
403 2193 91
403 1169 91
403 6104 91
403 7559 91
403 10554 91
403 6491 91
403 6764 91
403 5040 91
2674 7128 161
1250 8274 1
6144 8551 324
4645 6098 98
991 11011 40
7278 12827 714
859 6112 113
13148 8561 277
301 8914 162
101 12233 91
101 2272 91
101 4921 91
101 11000 91
101 6185 91
101 7673 91
101 12808 91
101 390 91
101 10777 91
101 457 91
101 12862 91
101 4977 91
101 4025 91
101 4483 91
101 14661 91
101 10547 91
2706 2198 88
4547 10310 735
8570 607 1
4771 8850 155
6366 4754 40
2637 9052 631
12084 297 33
7409 12175 340
2882 4772 234
1057 11398 391
4166 12523 199
375 9336 113
569 4221 185
569 3177 254
5616 2665 1116
545 3123 143
613 10358 560
264 12146 88
264 9059 88
9987 8004 98
11467 7465 82
676 6901 407
2358 2963 629
1524 1693 185
2505 4159 875
1464 13698 137
1277 9880 297
635 12202 170
344 6845 625
1818 5742 407
1818 5486 301
7186 5532 155
2731 2743 18
979 8777 199
11540 2237 179
6943 1185 520
11108 14888 1196
1618 13358 560
3521 3923 289
10737 7568 114
4017 8579 660
8076 6098 148
176 12713 102
14932 8112 1071
9948 13380 137
4026 6628 465
11176 8189 40
5786 389 113
2442 1330 40
1073 5506 560
11731 12615 82
278 2916 102
1785 12764 527
2379 9494 199
559 6044 113
657 3356 114
657 6166 114
657 7537 114
58 6950 792
58 6631 792
58 10186 170
58 11827 170
3890 6081 199
3890 14006 199
3890 278 199
12092 8334 82
4990 968 465
4990 134 465
3242 13765 297
5447 8484 148
13907 13907 465
9214 11115 155
3116 2628 114
3557 753 288
1903 1031 88
4702 5979 692
3567 7979 155
5993 9792 373
4503 4549 137
4503 6121 137
4503 2229 137
6518 1110 155
6182 12035 518
7858 1654 203
2297 3500 253
1008 7309 933
7566 3144 106
76 11445 866
9192 6537 148
3191 5875 252
7744 1461 1
3100 6270 902
3400 4335 137
833 4082 263
833 2831 318
4950 1359 301
4950 8726 301
5246 3272 98
1411 3712 1
7728 7727 148
385 8812 129
11558 13019 40
3503 3731 465
5997 9854 114
6044 5913 40
6472 6768 203
1621 3727 129
2987 2405 40
2055 8884 329
36 4933 350
36 10891 350
36 10663 18
36 6289 18
36 5316 18
36 9637 18
36 319 18
36 921 18
36 2329 18
36 3486 18
36 4086 18
36 7201 18
36 2853 18
36 633 18
2956 4614 989
4957 1329 98
6875 13142 185
7392 10074 520
6443 4868 1
913 1032 764
7592 6136 167
1474 9894 129
1394 5582 167
4585 6591 129
5266 208 381
798 1319 663
5409 5691 88
5166 1400 98
8556 6924 692
403 6212 91
403 1125 91
403 1593 91
403 14517 91
403 3347 91
403 5804 91
403 12724 91
403 2805 91
403 1946 91
403 1236 91
403 9118 91
10628 10355 401
5373 601 167
10525 6448 114
10929 9637 40
715 8161 297
7580 11706 114
6116 14147 1
1959 4067 40
2684 417 277
301 10833 96
301 13243 754
101 9097 91
101 4654 91
101 2456 91
101 11180 91
101 1775 91
101 11786 91
101 10578 91
101 5280 91
101 1574 91
101 8331 91
101 10134 91
10155 3357 203
2706 10526 88
4824 2596 96
6365 11941 692
4547 5631 735
1585 8348 391
6165 4208 98
4652 10099 129
4652 6720 129
6624 6623 40
103 11387 289
1054 6461 208
1054 1837 208
3281 9899 373
185 10381 732
8838 8811 663
3539 1743 98
4166 8041 199
3688 839 203
375 1245 113
375 8486 113
375 7954 113
375 4373 113
375 8703 113
1278 10595 342
2797 5941 167
6279 2427 137
569 12467 185
569 9758 185
5912 6021 129
11948 3029 155
7260 4053 324
26 6864 167
4357 3001 462
9545 467 129
12695 11102 114
11852 9657 525
2241 13379 199
8053 2497 114
4338 1374 129
4932 9692 155
931 8470 505
2904 7379 1101
12781 2966 924
2849 11558 137
4263 4177 129
774 8308 167
5723 2259 88
5199 6527 155
397 5988 114
342 1752 268
13863 2566 1032
8719 5805 518
9895 3257 253
1104 4941 941
3050 2621 505
1818 4624 162
1818 457 286
1818 7057 286
1818 9032 286
3208 11882 155
2731 2396 18
13717 6219 40
6747 6424 185
9059 7192 98
118 11015 102
1140 6375 167
7490 4642 40
3452 7248 33
6371 7192 33
7806 1838 129
11744 9725 98
2076 3210 129
3939 7215 114
3905 9901 179
10339 2715 520
4903 3459 106
1509 8812 40
6145 13964 114
7952 5467 377
7952 1981 377
3231 884 40
10132 3659 167
657 6900 114
58 13088 170
58 11451 170
58 8219 170
3890 14292 199
12334 638 546
6940 1340 520
10635 678 758
9042 11831 82
13521 3332 106
1794 2646 315
10003 2626 167
3244 8036 129
2879 5296 253
1801 7230 289
11883 1461 114
1836 8907 438
3400 48 137
3400 13478 137
833 12160 263
833 4917 263
11010 4268 518
4154 7145 114
13272 4359 40
10935 3969 277
4044 10945 40
3569 2952 167
3303 11984 106
3303 3856 106
546 12709 170
36 13821 350
36 4342 18
36 2244 18
36 10609 18
36 2288 18
36 5223 18
36 1002 18
36 8206 18
36 9502 18
36 8597 18
2028 11507 114
14604 9488 622
9230 6934 40
1458 9415 167
1389 13199 40
8438 8610 40
1474 519 129
4585 1247 129
3422 3622 221
2670 7833 607
4218 2239 466
403 748 91
403 2432 91
403 10993 91
403 9677 91
403 6469 91
403 7330 91
403 1682 91
403 5497 91
403 8169 91
403 7078 91
403 1198 91
403 13322 91
403 6775 91
403 1718 91
403 4791 91
3058 92 1
7661 7068 98
229 228 114
1802 3778 438
1601 3349 315
2590 7408 40
715 6208 297
715 8629 297
8582 13512 129
2574 3001 114
2574 11466 114
13806 3421 114
9628 5913 694
5278 150 221
9416 3562 148
2684 4818 277
1817 2969 518
3494 9380 114
101 12758 91
101 4482 91
101 9074 91
101 6220 91
101 389 91
101 13330 91
101 1501 91
101 269 91
101 11495 91
101 14432 91
101 11862 91
101 1400 91
101 12299 91
101 9132 91
101 5093 91
10276 1218 40
1967 3294 114
12561 10524 88
51 6231 129
6748 2624 347
5073 3777 155
377 7110 114
9 318 665
3504 6157 148
1278 1592 342
1278 14459 342
1278 5682 342
569 3427 185
569 11559 185
569 9907 254
5912 9975 129
7151 1000 252
9545 2521 129
5954 13119 114
2241 6471 199
2241 1631 199
2241 8656 199
8998 4268 167
409 758 82
332 9372 113
4346 5127 114
13021 4150 40
2849 13264 137
4263 417 129
1524 6521 254
1464 4620 137
3983 10029 114
635 14357 102
9550 4344 520
7387 7659 98
1209 11328 374
2509 10918 289
189 5161 155
1818 11326 494
1818 4637 286
1818 3691 286
1818 9491 301
2731 7617 18
2731 10504 18
10554 6117 752
1813 9434 167
4202 5 253
7140 6495 179
5964 6152 827
714 7383 850
4017 3245 660
2863 9698 484
1925 12849 297
5977 12089 40
3355 4415 40
11044 6320 315
3568 6165 324
1230 2165 208
1230 8631 208
89 3767 40
4026 3959 465
13070 10339 129
10969 2040 256
1509 5308 40
1509 2457 40
2876 14460 699
278 13292 102
13534 6017 254
646 10687 221
2379 7052 199
9303 3914 40
58 6459 297
58 14282 170
58 1413 299
3890 13780 199
211 6477 732
8200 9457 329
10778 10777 129
6696 5947 203
7796 4308 148
10358 11939 253
13247 7463 114
6433 11384 40
6433 3382 40
6022 6640 664
4264 8810 88
10642 8378 106
4503 9795 137
6518 2825 155
2297 2737 253
789 4388 487
5571 6789 40
3400 14160 137
3383 3382 40
3063 3736 381
504 7889 297
10043 3635 381
9945 2429 40
8179 1128 324
13089 10058 488
12763 5555 155
12763 7810 155
36 607 18
36 5352 18
36 9337 18
36 9932 18
36 4139 18
36 4680 18
36 2948 18
36 1974 18
36 11616 18
36 1011 18
36 5649 18
2028 9396 114
14604 5220 622
1992 3311 148
3571 5052 520
1458 8416 167
8270 10181 155
1474 8204 129
1474 12383 129
11142 10723 562
3560 12724 340
3560 4473 340
2211 3394 167
3133 9266 88
3133 5284 88
8556 3409 692
403 5501 91
403 4810 91
403 12486 91
403 11952 91
403 8341 91
403 1573 91
403 2407 91
403 4434 91
6409 5495 167
11469 6930 315
13006 5667 857
715 247 297
715 580 297
715 6653 297
9628 5682 694
301 11902 162
101 3392 91
101 3449 91
101 2367 91
101 4907 91
101 9417 91
4547 2465 735
4547 4420 735
103 2798 106
10528 2721 266
12949 9515 1
2637 8212 487
1612 7209 88
1612 8673 88
3504 8158 148
8838 5975 663
8838 4033 663
10949 9138 1167
375 6830 113
6279 5787 137
4584 7727 868
3810 6673 98
544 6216 185
544 9249 185
8045 6258 106
2241 10838 199
2241 13168 199
2241 10771 199
2241 2036 199
5192 1289 155
332 14082 113
676 12830 248
774 773 203
1524 3085 185
1524 914 185
1524 8416 106
7437 2821 203
397 8735 581
7048 10731 82
1818 1758 301
2731 3330 18
10729 4405 696
14593 12383 114
6953 1080 340
6747 6170 254
9275 8971 167
13692 4770 114
2434 1340 167
12240 3922 114
11017 5390 82
3521 7629 289
12726 7131 137
172 5753 167
2026 3620 114
11839 4827 1075
2507 1561 289
2507 4262 289
8514 4133 775
7401 6046 203
5098 10776 129
4934 7377 309
12768 9661 167
2689 6521 649
576 840 148
8086 14139 297
7775 11650 1
1291 13306 114
6338 12142 114
1574 12065 114
1230 6792 208
6867 6000 82
13757 9889 114
4552 8205 82
4026 2620 465
2640 2232 129
2442 7098 40
14711 9907 114
17 6039 238
278 14859 935
10132 5640 167
9540 343 148
1955 9869 148
58 5254 792
58 606 114
58 3788 102
58 8103 170
14745 10364 1055
11880 11115 373
2489 154 331
2074 9122 254
3116 11928 297
10223 4830 329
5133 6315 148
3804 13636 297
5442 1297 1
4702 10026 692
11143 3296 82
4946 8579 40
4503 10890 137
1675 4075 114
706 11448 170
3572 5215 143
3066 6349 1
4419 106 155
3244 1113 129
1638 8581 129
3100 11296 902
9961 11885 297
833 11645 263
4950 3761 301
4154 3218 114
9635 3666 221
2017 7078 40
4444 4097 329
4664 7623 289
10414 6367 179
13362 2651 391
2998 80 340
8576 5161 155
36 5662 18
36 6161 18
36 5816 18
36 4218 18
36 3965 18
36 1081 18
36 7987 18
36 1128 18
36 2802 18
36 7596 18
36 9278 18
1582 6585 315
12078 6465 114
1474 368 129
1474 2797 129
1474 5731 129
11589 629 203
4243 11128 607
8208 9643 114
14753 1613 443
2785 9744 33
403 5789 91
403 2405 91
403 2416 91
403 7984 91
403 5118 91
403 7780 91
403 1480 91
403 6724 91
403 13370 91
403 6739 91
403 10504 91
403 7219 91
1802 8032 438
1399 12061 559
8867 8539 314
11469 10520 304
493 5279 1
6756 5111 315
859 4865 113
859 9672 113
6116 5353 1
2574 3132 114
3384 6545 329
5020 4577 847
101 12212 91
101 10380 91
101 476 91
101 2034 91
101 3275 91
101 4531 91
101 10372 91
101 13155 91
101 7633 91
101 1316 91
101 906 91
101 13763 91
101 7041 91
101 3202 91
101 10964 91
101 14764 91
101 6805 91
101 8024 91
12561 13101 88
4824 8564 286
4824 9464 286
5075 1617 98
3423 9402 129
4652 13904 129
7419 7498 376
11918 13629 297
1054 2150 208
2882 11619 234
53 2554 438
2416 6077 277
13996 9650 861
5106 2973 40
1278 5197 342
2797 3849 203
11460 9284 167
569 6947 185
5172 458 98
5506 2993 253
613 4126 560
613 6190 560
8704 5686 1
7161 4428 560
12065 7644 167
2241 5358 199
2241 7961 199
8411 10409 1
332 14182 113
2849 4327 137
5614 7905 592
774 11982 203
2358 1575 629
2927 6285 814
2053 2054 732
7678 4660 129
1464 13018 137
3612 4875 40
6170 875 773
342 2081 253
8489 10515 148
5381 11094 40
344 5980 625
1415 7080 243
5499 875 776
1818 1980 162
1818 541 301
5023 7884 40
10213 6537 179
11862 11861 481
1966 12603 114
2162 561 137
9275 3993 167
7049 2905 167
13653 4070 114
9280 3668 252
9697 11122 297
1670 9222 289
4535 9206 664
7299 2928 88
5778 12422 114
14902 9230 137
1140 5728 148
4879 2113 155
8775 4088 114
2689 541 649
988 10927 886
12966 13182 1
412 6522 155
1230 3102 208
13542 4599 114
4026 6179 465
1605 9978 40
2196 3519 252
2379 3898 199
58 8128 792
58 2985 792
58 2774 814
58 10197 170
58 13389 170
58 10857 850
3890 4968 199
7796 9465 148
3116 4359 458
3116 11056 102
3557 2791 288
3557 2573 288
11023 3878 1075
4946 3247 40
13259 5100 114
706 8716 170
4555 7007 340
8552 4399 114
497 5498 902
3244 1566 129
7020 11102 129
2284 8586 248
2688 9863 114
833 5075 263
833 947 263
8927 4140 1164
2017 1996 40
149 1705 520
6021 2400 148
3503 5894 465
1336 1335 98
4126 5173 253
888 7979 253
3004 3005 266
1621 5447 129
11784 7668 155
36 13500 350
36 12276 18
36 637 18
36 10993 18
36 3416 18
36 4439 18
36 10781 18
36 2338 18
36 8736 18
36 8310 18
36 10533 18
36 7051 18
36 6887 18
36 9417 18
4960 7473 114
3570 2890 167
12911 3498 1
1992 5180 148
1992 3454 148
2845 11947 199
10020 10887 88
1474 10769 129
1474 781 129
1474 6326 129
1474 7765 129
6923 6101 40
3560 13033 340
11048 13132 1167
798 1926 663
798 5515 663
3014 4928 315
6434 8415 167
7082 3078 629
403 2354 91
403 13683 91
403 1594 91
403 4467 91
403 2769 91
403 874 91
403 8333 91
403 2755 91
403 5025 91
403 12666 91
403 4923 91
1847 10017 277
1847 12268 40
5472 2236 33
7071 4197 481
2665 4037 776
5281 25 143
5281 617 143
715 2906 297
5779 7872 268
1466 90 438
101 5448 91
101 14580 91
101 9972 91
101 1364 91
101 13648 91
101 9180 91
101 8702 91
101 5466 91
101 11282 91
101 4182 91
101 11859 91
101 10834 91
101 6721 91
101 7199 91
101 14610 91
4824 3720 286
4824 10560 286
4547 4816 735
4652 9753 129
7027 4901 106
3141 1929 441
11370 3530 129
9518 2774 438
12409 14269 40
8035 5834 626
9 318 238
1612 11974 88
8838 1496 663
375 4544 113
375 4964 113
2797 3071 98
2797 2424 148
2241 11964 199
2241 12091 199
13719 14422 581
13836 6992 185
5199 3500 155
397 13828 114
4303 5967 40
3934 6523 203
3934 6523 98
2031 3202 167
344 11427 106
2082 8212 595
1104 10614 248
359 9024 1
1818 6980 301
2731 9668 18
2731 9103 18
9254 6005 315
13717 3248 277
5023 8155 40
1852 3205 148
7065 10060 696
6943 8837 520
1905 1106 148
12855 2692 340
8003 4484 114
4445 5857 40
13935 4881 82
922 1826 315
7078 4208 167
5964 10796 827
2915 6128 114
11843 5969 1
2507 4468 289
10131 121 106
6694 5495 520
3702 813 329
11423 12812 1
1236 8862 221
8812 3366 203
13322 10807 340
4455 8279 40
9029 14129 297
89 6711 40
2442 10763 40
1880 7228 203
9743 13660 82
8821 10168 562
4656 8001 315
646 6817 98
2196 11686 252
2196 12088 252
2379 14254 199
7279 7278 1127
11926 2244 129
657 10521 114
58 5558 297
58 6660 297
58 12241 170
58 13164 170
58 6675 299
6517 4150 324
4990 7405 562
10635 14899 758
5898 8376 114
10333 7562 98
1410 8460 102
6769 4674 167
4964 5023 374
11362 4176 40
9996 5733 917
6022 6015 289
11107 12293 481
4503 3580 137
4503 5870 137
4503 11726 137
14221 1953 1069
2280 7655 743
13834 12979 340
3244 6669 129
833 9777 263
833 3702 318
5220 946 203
7502 7240 106
6589 1993 463
842 1845 764
5637 9306 114
13089 4631 488
2055 7053 1
4184 3735 114
36 11751 350
36 3891 350
36 14812 350
36 1257 18
36 8993 18
36 7394 18
36 11906 18
36 6200 18
36 9233 18
36 3287 18
36 6692 18
36 4667 18
36 6643 18
36 13774 18
36 5549 18
2773 10831 361
11521 2680 40
4569 10080 40
6875 11296 254
6524 611 203
14928 617 405
12583 7454 40
4764 7431 221
3560 5319 340
10383 8547 114
8756 11956 253
2018 1285 114
12951 1858 277
403 2092 91
403 12508 91
403 14079 91
403 10231 91
403 11845 91
403 3225 91
403 13950 91
403 14048 91
403 7363 91
403 6122 91
403 2340 91
9439 6699 114
6409 1573 148
7007 1147 1
7325 11306 462
4645 1898 268
2810 10318 253
859 2092 113
859 10206 402
14230 4560 114
3740 1432 656
4082 4083 98
1817 227 518
101 14128 91
101 8579 91
101 6955 91
101 687 91
101 11494 91
101 2343 91
101 5459 91
101 14858 91
101 12526 91
101 2858 91
101 9611 91
101 8400 91
101 14295 91
101 5231 91
101 9411 91
4547 10979 735
7478 280 601
3423 90 129
4652 13119 129
5073 5388 155
2296 3345 148
5341 6439 167
5135 10159 289
6084 93 98
2045 12439 114
53 8524 438
3504 1306 148
4166 8612 199
375 10772 113
6379 637 1
1278 1649 342
1278 4038 342
5520 11715 481
569 13391 185
569 5316 254
9054 928 221
4584 4765 377
5616 4940 725
6156 8592 167
545 176 143
4366 4446 1
13878 7300 114
7976 5383 518
1703 10932 277
4357 3136 462
648 6483 167
2241 1363 199
2241 12954 199
332 4249 113
4263 1242 129
12135 9504 40
6235 2852 1
10173 10527 155
5286 7948 622
344 5901 106
1209 5100 40
1818 11821 301
1818 6244 301
1818 4648 301
1818 484 301
3208 3734 155
3208 10795 155
979 13803 199
1631 1632 82
1534 5998 256
1545 10159 40
12582 8918 462
3282 3419 373
12192 6315 155
2787 5959 374
13543 6980 114
11839 10857 1075
13532 7422 1
8076 2215 148
7490 12365 40
11753 8448 167
14040 14170 971
4879 1201 155
8264 6410 633
7876 10663 88
561 3435 40
2553 2313 106
8306 1454 253
2076 9832 129
2076 6498 129
2076 5923 129
1736 6941 221
1230 7690 208
1230 13580 208
89 11040 40
12050 5477 106
4026 916 465
5476 6177 167
11566 5679 286
10075 1258 1129
1498 10074 252
17 5316 665
2196 7077 252
2379 11454 199
2379 9287 199
2379 8275 199
11926 6681 129
1955 953 98
657 2519 114
58 11134 102
3890 13174 199
7407 9331 98
6075 7597 167
1702 7777 98
8926 7137 129
8932 8932 1124
9259 3012 40
6844 11453 1073
9755 12482 129
3880 1387 203
7379 1805 88
4964 13775 40
1833 14484 297
4473 930 381
4503 10445 137
8359 5780 167
9555 5985 268
8817 8831 1
12761 10584 88
13465 2253 167
2879 3079 253
3100 10450 902
833 10419 263
833 194 263
833 8062 318
4154 837 114
5246 8223 98
854 5803 98
62 8874 40
9246 13321 88
3358 6919 40
385 6942 129
5007 14660 623
3569 3568 203
3303 1685 106
348 12121 102
6278 921 1
6407 538 40
769 768 203
36 6975 350
36 12857 350
36 8595 18
36 2793 18
36 2332 18
36 1109 18
36 2454 18
36 11835 18
36 1442 18
36 2686 18
36 5245 18
36 5139 18
36 11230 18
36 4651 18
36 8533 18
8019 9731 1
650 1097 167
1992 4314 148
4957 4063 167
2148 5689 340
7135 1481 40
7135 5329 40
1474 7271 129
10102 10036 484
781 3053 167
7008 3941 129
403 5794 91
403 3481 91
403 473 91
403 3865 91
403 7672 91
403 1493 91
403 12755 91
403 4612 91
13278 6370 155
13308 9408 622
1087 10546 155
4645 7749 268
9237 7308 689
991 6731 277
991 12886 40
859 5260 693
859 11471 113
12647 1225 234
10998 3211 1
471 13068 494
2684 1905 277
101 12426 91
101 11106 91
101 6461 91
101 4597 91
101 2424 91
101 4746 91
101 5930 91
101 1319 91
101 14181 91
101 13777 91
101 11022 91
101 8501 91
101 4590 91
101 1066 91
101 2376 91
10276 10281 40
5160 7578 98
4824 10576 286
4824 6930 286
51 8232 129
4547 12166 735
10363 10655 40
5887 2755 129
5071 1299 512
11328 5371 40
1054 499 208
9852 8988 203
12094 3415 1124
7324 7323 88
3610 8384 40
3256 5883 33
375 4389 113
1278 11509 342
1278 1508 342
1278 2547 342
2797 5941 203
569 6387 254
4584 7307 401
6032 8482 1
10204 2790 607
264 9214 88
1725 11680 82
11852 2189 106
2241 10565 199
2958 11473 40
1464 4573 137
3983 1988 114
1063 1133 297
397 11028 114
635 1896 850
344 9059 625
753 6871 40
1818 8118 286
1818 2500 377
2731 4456 18
6747 4940 578
1534 9322 256
4871 12304 623
14325 9861 88
14325 5980 88
11108 5672 954
6965 4914 106
11762 3394 106
4659 2481 329
8188 2771 1
12200 10560 254
4814 6504 329
2689 646 810
2076 921 129
12354 4921 234
6574 635 1097
1574 14134 114
1230 7260 208
8903 6864 167
12656 1126 179
2701 12265 40
17 2743 500
13137 2305 129
6805 6006 381
58 5834 595
58 6748 458
58 9199 170
58 6649 170
58 13727 170
3890 12521 199
3890 12695 199
8995 2758 40
3517 8282 88
12532 1827 114
4503 4327 137
4503 8858 137
8340 3686 114
2478 11969 167
3244 2252 129
1129 6849 581
9779 2865 114
1288 4210 106
1836 2519 438
3400 13380 137
833 5804 263
833 2436 263
8769 726 179
4154 11435 114
677 6803 148
586 10498 266
3296 3532 114
653 1171 732
5997 894 114
11801 8940 253
2998 3173 340
2375 5382 148
3004 1421 376
3004 4400 376
5330 10527 98
2216 6885 102
761 4665 458
769 1881 167
36 3307 350
36 12736 18
36 12634 18
36 6404 18
36 7705 18
36 9488 18
36 5308 18
36 12256 18
36 2011 18
36 7119 18
650 11498 518
2185 3474 40
325 12035 377
10020 12408 88
1474 8742 129
10353 1481 846
6135 1613 114
13611 5963 256
798 8332 663
7482 7959 98
13329 3863 114
8556 4342 692
403 4441 91
403 2883 91
403 3570 91
403 2357 91
403 6686 91
403 2459 91
403 2586 91
403 966 91
403 6848 91
8874 3202 155
13494 6906 114
399 8852 221
1294 5575 289
3152 6857 525
991 2810 40
8698 8698 82
5776 3717 1010
14484 3087 114
2684 7934 277
13637 12662 525
101 719 91
101 13340 91
101 8026 91
101 10002 91
101 4078 91
101 13659 91
101 3656 91
101 12137 91
101 12215 91
101 1774 91
101 3323 91
101 11600 91
101 14081 91
101 499 91
101 7440 91
101 998 91
101 6529 91
10276 1127 40
2706 12217 88
4547 3975 735
1704 2724 167
103 4800 106
9559 13641 88
3141 1492 441
9305 10441 82
2637 4388 487
1521 9582 203
6534 2767 484
3845 8265 434
9185 6852 40
375 1847 113
375 12341 113
1278 4243 342
1278 2107 342
2797 5780 203
2797 2449 167
6279 8748 137
102 1081 1
7822 10898 612
2459 7703 1
13874 11016 82
14405 9902 129
7161 2354 560
7611 10557 234
2241 10240 199
2241 1724 199
2241 13874 199
2241 540 199
2241 10743 199
8540 6085 40
2025 13077 161
4263 1627 129
774 5877 167
12879 12879 465
4295 861 377
12389 5333 361
10565 2518 114
10173 4726 155
635 1406 170
10518 3409 40
1415 11802 243
1415 11037 243
9006 3628 256
1818 7330 401
1818 9217 162
1818 4575 286
1818 293 286
1818 2844 301
8640 4125 1
979 10312 199
11540 4842 179
8959 10917 129
3510 1561 277
2162 8858 137
3282 3964 373
9258 9582 155
10497 171 619
49 10275 297
3605 11776 40
14675 5804 462
9563 8048 623
13926 10932 311
3104 5 106
8142 8717 40
9412 3878 40
11839 472 1075
13928 3997 340
10850 6490 971
9922 5196 268
12386 6509 1
5098 2837 129
7876 9191 88
3148 1623 315
4485 5589 518
2076 6939 129
971 744 203
1574 10615 114
2931 5352 88
10555 10541 581
11053 2186 985
10969 6006 256
1498 5196 252
1498 3964 252
12414 11146 82
3924 1240 1
12963 9638 40
17 3196 665
1698 5217 1
13193 7127 927
7602 1707 129
657 13019 114
58 11772 170
58 2857 170
58 11228 170
58 10948 170
58 354 346
3890 9305 199
549 7445 167
14650 3163 161
6940 4486 520
3557 5193 288
3557 3337 288
6518 3757 155
14128 5419 329
11396 12503 797
3191 3337 252
4417 11310 155
5571 3181 40
1836 2608 438
3400 8111 137
833 1336 263
833 5531 263
11967 5013 518
12631 4624 40
954 119 268
385 5824 129
3701 1930 340
3701 4513 340
7805 437 208
5637 1140 114
7436 2174 203
1290 1416 231
36 11106 350
36 6338 350
36 8382 350
36 3372 18
36 2680 18
36 14145 18
36 2188 18
36 3976 18
36 4615 18
36 4816 18
36 8547 18
36 12958 18
36 12182 18
36 12142 18
36 11059 18
6875 2476 185
7432 4812 520
7471 2340 129
325 1122 377
673 11033 1
1474 12566 129
2018 8399 114
7594 8576 1
3133 5220 88
7082 5515 660
403 9623 91
403 12526 91
403 5841 91
403 11870 91
403 3755 91
403 5489 91
403 10666 91
403 13242 91
403 5543 91
1300 2724 106
4 5775 559
11718 2872 106
3340 12320 40
991 6539 40
715 5546 297
715 9002 297
9548 9241 1
6840 1066 167
301 13925 286
101 6067 1013
101 8806 91
101 14082 91
101 2169 91
101 9785 91
101 11322 91
101 5807 91
101 11587 91
101 14216 91
101 5821 91
101 13518 91
101 5863 91
101 554 91
101 711 91
101 5883 91
101 943 91
101 13893 91
101 2486 91
101 552 91
2706 2175 88
3049 5758 481
4824 8489 286
4824 12083 286
4824 10511 286
2152 9329 33
7419 3534 266
5310 2339 40
2637 9510 631
10300 9454 129
4224 7667 40
2882 3463 234
9 10233 500
1612 5559 88
8838 4613 663
2416 5945 40
375 991 113
375 8048 113
1278 14912 342
1278 13623 342
1278 7833 342
1278 14695 342
2797 5877 167
569 12913 185
569 9571 185
569 9925 185
11011 3415 253
5912 1613 129
5912 4938 129
5536 2262 520
1493 2695 161
5574 12628 329
264 11436 88
554 119 155
12065 3304 167
10551 2737 268
7611 4868 234
8045 121 106
2241 13849 199
2241 6705 199
2241 6922 199
3449 945 155
1260 8088 520
14927 8226 252
774 7125 203
5951 14133 299
9405 11205 1
6313 1693 819
6094 3304 98
635 3921 541
67 3035 167
1209 13377 40
1818 11764 649
2731 10662 18
10015 7905 40
5924 1884 155
12192 1229 155
9280 11318 252
6398 1705 167
6552 4123 155
13979 2234 311
6458 272 40
7299 863 88
11839 8232 1075
11670 12952 114
10543 2042 314
2393 9938 340
2076 3749 129
2076 9463 129
9440 2727 716
13056 3844 1
89 6871 40
5100 5271 98
13413 14652 248
2640 11910 129
10075 12798 1129
10686 10683 167
2379 11546 199
2379 14177 199
58 14254 170
4545 7495 114
10 1352 665
9214 5862 155
10358 10652 253
5682 1323 424
4964 6591 40
9621 4938 88
8441 3630 167
10642 23 106
4946 3645 40
3164 2933 167
1675 2935 114
706 9147 170
3878 11456 251
3066 5397 1
13465 3666 167
3244 2683 129
3244 2769 129
7006 3568 520
13053 9839 1
2399 130 167
3400 11878 137
833 2147 318
833 6987 318
7010 1076 253
586 10180 266
1217 4634 714
2998 3058 340
1156 5575 438
6062 10123 88
1477 8418 221
36 2552 18
36 849 18
36 5332 18
36 7729 18
36 8412 18
36 8620 18
36 1041 18
36 8714 18
36 8370 18
36 1057 18
36 3825 18
36 11763 18
36 1691 18
36 4390 18
36 12488 18
36 1228 18
36 7485 18
2773 5333 161
1992 3913 148
4957 7832 167
11158 11158 465
6875 3154 185
1458 2737 167
1474 396 129
7269 10497 98
6149 4875 114
4537 7386 148
5418 764 155
403 10376 91
403 2085 91
403 6093 91
403 2699 91
403 6412 91
403 7577 91
403 13547 91
403 2540 91
403 9896 91
403 240 91
403 14291 91
7663 8990 148
3859 3858 98
14560 11923 405
13293 12196 114
1677 6134 98
859 2566 402
101 5997 91
101 7585 91
101 11633 91
101 8278 91
101 10088 91
101 8827 91
101 14561 91
101 8122 91
101 5443 91
101 1748 91
101 10185 91
101 11407 91
101 12227 91
101 8789 91
389 7049 40
10156 3296 714
4824 4304 286
131 1617 148
5887 430 129
5071 5254 512
7419 10923 376
7254 13847 82
1054 11504 208
4771 1604 155
4224 4277 40
9607 4967 402
4300 4299 98
10361 7940 438
375 11521 113
8112 2269 377
4584 521 377
5912 1323 129
545 635 143
5506 2346 253
1033 3542 129
443 11760 894
4574 2908 622
1359 5652 98
264 9559 627
9669 5056 391
5688 5687 137
9893 2978 179
2241 13329 199
2241 1742 199
9987 767 98
7229 9648 40
2790 4664 525
11939 4921 324
676 13392 162
4155 1832 716
4263 6429 129
13836 9141 185
7678 10781 129
1464 11329 137
5497 6574 40
7360 6082 732
6260 5056 268
635 5865 102
635 13806 102
3934 6575 203
1820 9460 944
7771 8432 155
1818 2853 494
1818 1165 286
1818 2520 286
1818 5670 301
1818 8812 301
2731 100 18
2731 9553 18
8936 5643 167
4871 3290 463
1038 2283 631
13690 6445 40
8341 5878 1
11368 7887 289
2805 9184 251
13571 12040 114
472 9762 581
486 1078 248
6345 5353 286
176 12977 297
8812 4947 203
7251 2761 155
9889 10187 581
7806 9984 129
3905 3866 179
11566 5984 407
1498 6049 252
1498 3070 252
5273 4095 155
5285 1259 155
2379 12461 199
2379 14465 199
2379 11432 199
58 1277 102
58 1848 170
58 12158 170
58 3796 894
8155 367 221
6765 840 155
174 7632 500
7292 291 40
1221 11940 771
4512 9929 143
2469 10110 114
4503 10148 137
999 9252 289
706 8076 114
9745 11402 82
6202 2098 40
9085 9171 1
3191 442 252
11704 2234 541
1836 535 438
3400 8703 137
4154 8578 114
7010 8768 253
7318 7869 315
5720 2527 155
7530 11516 82
2998 5507 340
2375 1076 203
2095 8170 309
36 5529 18
36 4560 18
36 4957 18
36 368 18
36 12249 18
36 3396 18
36 4786 18
36 10848 18
36 2869 18
36 6067 18
5305 11390 129
10899 3414 373
2028 13862 114
3571 2969 520
6818 6086 179
11641 9975 129
8246 2234 541
1474 10645 129
14448 9146 315
4218 4217 900
403 1258 91
403 5631 91
403 906 91
403 7896 91
403 930 91
403 11496 91
403 4088 91
403 12850 91
13944 5955 148
1847 1803 40
11718 7348 106
223 14756 607
5128 4575 40
1677 1126 203
4645 2215 98
5472 3364 33
1929 11751 297
991 4879 40
10929 10055 40
715 2695 297
12926 14571 808
301 7088 96
101 12689 91
101 3797 91
101 6009 91
101 8647 91
101 6032 91
101 4346 91
101 9394 91
101 1018 91
101 7765 91
101 12509 91
101 7070 91
2706 1947 88
4506 1093 268
131 485 167
6273 2921 98
3423 11306 129
4652 1334 129
10528 6608 266
3123 11087 458
6877 10780 88
7324 8527 88
13939 9311 114
375 4036 113
375 1155 113
375 7229 113
375 11677 113
800 14872 527
569 10484 185
569 9891 185
569 8938 578
7822 10520 612
3381 12836 102
6997 3018 114
5536 6601 520
5615 1844 203
9545 7809 129
8045 9360 106
2241 8465 199
2241 2547 199
3021 6638 1
4263 5810 129
2358 696 629
9951 4271 489
6863 526 155
7387 6557 203
2082 9052 595
6743 3782 518
951 2027 238
1818 2678 96
1818 1980 286
1818 8480 286
1818 2723 301
1818 4916 301
2731 7804 18
380 12146 88
380 4998 88
11108 5156 954
11931 4208 155
49 353 297
9563 1316 623
11125 4528 114
2425 34 203
8076 10589 167
13055 13231 82
7251 2256 155
13768 13795 88
8545 11711 114
4312 8929 1
2076 8613 129
971 2058 167
1574 2940 114
4481 3568 221
1230 8223 208
9029 13821 297
642 7501 167
1509 8884 40
2876 9006 699
514 7222 155
9222 6316 203
8558 6001 167
278 11742 527
278 2766 299
1053 5068 487
646 5774 221
58 1150 170
3890 6972 199
3890 13401 199
9020 6898 40
4545 4158 114
489 4480 40
7379 11648 88
6639 11005 40
4946 8578 40
4946 11440 40
4946 5248 374
355 3878 592
75 1147 114
2478 481 167
3244 9736 129
8766 2189 148
3191 1209 252
5571 4468 40
1288 4721 106
3400 2169 137
3400 13383 137
833 8884 263
833 3635 263
833 476 318
2493 4940 277
2017 5300 40
6915 11181 660
586 4908 266
12816 8224 106
385 8384 129
385 4481 129
5007 3182 623
1432 4514 114
2432 2433 297
2555 6175 340
504 14807 170
1111 450 203
4467 10254 155
11801 8308 253
239 1971 114
1708 3217 155
5399 931 933
5399 13058 957
5592 9960 161
36 5527 350
36 1774 350
36 2927 350
36 2881 18
36 2478 18
36 9495 18
36 4575 18
36 11384 18
36 5526 18
36 12896 18
36 8034 18
36 6873 18
36 6248 18
36 13818 18
36 6320 18
36 4281 18
2028 7660 114
2028 631 114
6975 3111 954
12122 11086 311
9230 10983 40
6530 4807 167
7471 1707 129
1389 6903 251
1474 457 129
1474 9012 129
2934 5413 315
7594 2446 1
6261 641 518
403 6202 91
403 7245 91
403 8120 91
403 6645 91
403 10065 91
403 7529 91
403 3145 91
403 11548 91
403 9037 91
4954 9417 40
13308 3755 622
12524 4255 315
4645 450 268
6594 8051 82
991 6693 40
715 11803 297
715 190 297
715 7505 297
3581 2890 98
6957 11273 88
301 8914 407
10479 6903 324
8895 5593 148
101 1676 91
101 8055 91
101 7131 91
101 4350 91
101 6607 91
101 4599 91
101 2848 91
101 14310 91
101 1465 91
101 6953 91
101 9900 91
101 11197 91
101 3044 91
101 14727 91
101 6539 91
101 4859 91
4824 2089 286
4824 3542 286
4547 366 735
2889 6941 203
5341 6899 148
5341 7815 148
5135 12038 289
6534 658 484
3120 9804 623
1612 11473 88
375 2877 113
1278 1129 342
1278 9827 342
2797 6394 203
163 7358 289
569 6525 185
225 6576 560
264 6107 88
1112 91 155
8704 8705 1
8704 2288 1
3789 873 221
1725 342 114
7611 2757 234
2667 11500 664
2241 7225 199
2241 8103 199
2241 14532 199
5113 4606 518
8053 11136 114
4875 4199 98
1260 9943 520
332 6186 113
4263 1535 129
1464 3337 137
5199 1513 155
9967 13303 40
5286 4998 622
635 11269 102
2082 10821 595
1818 1433 286
10109 11115 391
2731 2517 18
8778 8862 268
7065 7308 696
14792 11070 556
3510 10107 277
13374 1437 114
10206 1276 148
1034 13147 102
5924 7034 155
2805 2802 361
6458 12146 40
6102 6045 1
11839 1611 1075
8076 6315 148
8594 5783 40
6694 5191 148
4879 5728 155
7700 10026 324
2448 10189 129
13768 11136 88
2553 5682 277
5977 9120 40
4227 14037 199
2689 538 649
1875 3505 203
3939 6895 114
4873 11144 40
89 5155 40
608 5706 203
1498 5782 252
5585 40 203
2482 6287 886
646 3056 98
2196 1571 252
657 1664 114
657 2496 114
657 6592 106
58 2926 792
58 13995 527
58 981 297
58 10733 170
4545 7881 114
4990 3097 465
4690 9960 277
1732 14068 170
7289 3086 106
10333 4302 203
10715 10821 357
8089 8090 365
1902 6255 373
5442 3019 1
6769 4196 148
4964 8032 40
4702 2641 692
6022 5721 289
4946 8539 40
4503 7046 137
5702 945 167
6204 10560 114
3356 10397 155
2044 6705 297
11298 12344 794
13340 5410 114
5571 1692 40
5064 8810 505
1836 10519 438
1836 2426 438
1836 931 438
4950 7268 301
149 904 520
1113 4387 888
1207 7487 376
13974 10600 167
10588 4579 559
8911 3305 203
4197 269 421
4197 13458 421
13089 2018 488
36 14073 350
36 13260 350
36 9950 18
36 9081 18
36 2520 18
36 12491 18
36 10544 18
36 8397 18
36 10644 18
36 5956 18
36 8890 18
36 12515 18
36 1690 18
36 13526 18
36 4546 18
36 12854 18
36 27 18
36 14595 18
36 2981 18
36 6696 18
5305 10814 129
2956 76 989
2773 4082 361
1992 6159 148
6818 664 179
8830 639 40
8830 95 40
6187 2662 106
1389 288 40
6548 13775 40
2503 3538 167
1474 3299 129
1474 8034 129
1474 8876 129
1474 1351 129
1474 3226 129
1037 5077 98
6412 2797 161
4585 3255 129
11142 4577 562
13990 9454 40
9093 9092 790
403 2597 91
403 6021 91
403 9074 91
403 7333 91
403 11587 91
403 8499 91
403 260 91
403 3778 91
403 8188 91
403 12373 91
403 943 91
403 14454 91
403 4885 91
403 436 91
403 13517 91
6037 4575 1
7650 8360 114
8874 3433 155
9483 211 732
2249 6839 167
12512 489 113
1677 6134 203
1919 2854 98
8867 10837 314
10227 58 917
8262 1647 40
3384 1285 1
301 449 286
101 1153 91
101 9352 91
101 12616 91
101 2207 91
101 8468 91
101 9804 91
101 9647 91
101 11744 91
101 3033 91
101 1506 91
101 8656 91
10276 7396 40
4407 8701 129
12067 12439 40
4547 10594 735
7478 2519 601
4652 14280 129
5071 9935 512
11914 7581 114
7254 6164 82
6749 1992 114
15 1832 203
3120 263 623
12209 797 851
4166 12446 199
7529 7660 315
11706 14571 808
375 2464 113
375 8648 113
375 404 113
375 561 113
1278 3270 342
1278 5603 342
1278 6878 342
1278 1275 342
6279 4597 137
569 11333 185
6502 6370 98
2766 13 792
443 5864 894
6847 1844 221
1359 966 203
2241 10617 199
11667 3948 556
12476 3513 277
6603 3282 88
6356 12239 203
73 5087 33
731 935 155
5723 1611 88
397 12909 114
732 7323 315
6618 12212 137
1818 3923 494
1818 1296 286
1818 6931 649
2731 10888 18
2731 9411 18
80 12160 329
1534 7502 256
1534 1582 256
4929 7115 155
49 5540 114
5105 2218 221
4659 3543 329
5416 9783 717
10543 6871 314
2393 1721 340
4873 2222 40
1230 6667 208
89 11784 40
10379 269 481
514 3071 155
11300 8133 82
1228 5123 98
1955 3347 203
7602 6212 129
657 10915 114
657 704 114
58 13576 170
58 137 170
58 14338 170
58 1798 299
3890 14255 199
3890 5461 199
3890 7968 199
3890 10463 199
11551 2715 203
6633 3267 114
10358 973 253
3531 8030 1
4964 2928 40
5649 3783 716
2450 6395 161
3572 10742 143
3572 7512 143
76 2907 796
11673 7786 221
3244 7524 129
3191 5892 252
1930 9493 1
5571 418 40
11883 8887 114
833 11218 318
4154 2621 114
6915 4942 660
1411 13270 1
5787 11481 277
13814 5035 98
2705 8928 88
5749 13824 299
7320 11443 1
1111 3598 167
7805 94 208
8911 1428 203
4197 2022 481
36 5863 350
36 3380 18
36 13358 18
36 12398 18
36 10446 18
36 6084 18
36 5403 18
36 6734 18
36 4929 18
36 2434 18
36 4570 18
36 6131 18
36 10202 18
36 5148 18
36 4147 18
4135 5939 329
2028 2503 114
1992 1306 148
12078 9822 114
6186 3833 277
1474 3902 129
1474 7471 717
10405 1050 203
6559 14751 808
5418 7565 155
403 6879 91
403 11409 91
403 9479 91
403 5061 91
403 3775 91
403 7999 91
403 2302 91
403 9699 91
403 4636 91
403 9320 91
403 3364 91
403 14263 91
403 9123 91
403 13255 91
8626 313 221
2840 10379 481
715 1746 297
3656 235 315
9767 1884 167
101 2822 91
101 4240 91
101 6609 91
101 12703 91
101 3728 91
101 13769 91
101 2802 91
101 12855 91
101 7816 91
101 7338 91
101 11008 91
101 2022 91
101 1811 91
101 4437 91
4407 6063 129
10237 1247 40
8510 2139 252
3072 1408 98
9157 3761 324
3120 4476 623
9 8423 665
3256 6370 33
4931 9839 329
375 3612 113
1278 6208 342
11460 9937 167
4701 1298 520
7769 7770 82
4366 3181 1
7161 13433 560
4086 8739 167
1043 7301 170
4777 5657 114
332 3401 113
4155 2001 716
4263 3449 129
1531 3562 155
635 14593 102
8354 6585 315
344 8628 489
344 5451 106
12944 14154 496
9895 11492 253
3405 7576 113
2731 4311 18
2731 7347 18
1966 1965 114
13692 8117 114
8341 1437 1
9280 5144 252
11762 10583 106
4659 10955 329
11839 4358 1075
6571 8820 1
4047 4048 373
2076 4816 129
10615 4527 221
4723 7083 481
608 2468 98
1498 5941 252
8153 10419 324
2379 13481 199
12256 11446 98
14478 2716 179
657 7776 114
657 4636 114
58 1562 170
58 2561 299
58 472 541
9507 3758 82
5892 1111 40
4335 2244 40
12646 1950 167
3727 641 268
9621 550 88
12788 1136 256
6639 8282 40
7070 3550 114
355 1896 592
4503 9967 137
3244 5679 129
3244 6565 129
5571 10655 40
4036 5426 40
833 4814 318
833 3531 318
5246 8945 98
7087 517 1
385 275 129
13814 2365 221
11294 6442 277
8037 5062 40
5006 12285 1
4044 3033 40
12290 3945 340
2219 2218 203
1477 4437 98
36 10835 350
36 12985 350
36 13232 350
36 3619 18
36 12591 18
36 4082 18
36 8322 18
36 881 18
36 6246 18
36 2723 18
36 12435 18
2028 4265 114
1992 2500 148
2370 271 1
7432 316 520
913 12464 114
2148 1643 481
1474 2956 129
798 6085 663
3094 4708 1
4537 1097 98
7532 3760 1
4218 3482 888
403 9323 91
403 10600 91
403 2681 91
403 11108 91
403 14406 91
403 5953 91
403 1581 91
403 5327 91
403 11829 91
1300 1000 541
6409 1897 167
11718 6709 88
5472 6308 33
5281 3381 143
859 1534 113
1542 3476 441
11620 13825 481
6957 7110 88
8975 601 155
6115 6661 40
101 5879 91
101 8946 91
101 9761 91
101 11430 91
101 1979 91
101 15 91
101 5783 91
101 8007 91
101 10981 91
101 5100 91
101 9859 91
10276 8595 40
8512 7302 40
4824 3911 286
4547 5217 735
4547 2157 735
4547 7527 735
7478 5669 601
4652 11057 129
7419 2679 376
1054 8098 208
5504 7713 248
6534 2561 484
6826 3228 155
9 3177 500
3504 793 148
4166 11647 199
375 11362 113
1278 4632 342
2797 3071 167
545 5546 143
4574 10333 622
12739 4160 114
648 8843 167
2387 2798 253
2241 13978 199
1043 12263 102
332 331 113
4263 4300 129
1464 8122 137
1063 8358 106
342 2571 268
1277 10906 297
1181 7010 560
635 11068 102
1209 2874 374
753 8122 374
3050 10479 167
1818 67 494
1818 455 286
1818 1606 301
1818 8395 301
1818 2126 301
10109 2890 391
2731 4753 18
9280 9782 252
9280 6690 252
49 8913 297
12093 5040 40
3521 9346 289
7299 5063 88
5964 10723 827
11125 14034 114
8076 8004 167
5698 12317 114
10748 5711 40
7142 11526 900
2689 9591 248
5453 6657 179
10891 10162 82
2076 2855 129
2076 5185 129
2076 8190 129
971 8631 221
1230 6124 208
2931 5826 314
10969 10908 256
1509 11207 40
11381 10357 129
1498 7571 252
1498 8229 252
1514 3209 520
9523 8332 796
17 9445 238
278 1649 102
3231 4738 40
646 5774 148
1785 2432 297
58 1137 792
58 4496 102
5817 5818 847
5892 7150 277
5447 10290 148
2720 12208 1306
8656 11971 114
3558 2310 1
382 2831 340
3557 11216 288
3880 10862 203
7379 2678 106
4024 13066 289
5687 8322 40
4946 8250 481
8057 5438 417
2352 2737 148
2092 7941 481
2450 6395 277
789 10450 487
6202 6119 40
60 6397 148
3244 5515 129
3244 3267 129
4036 8392 40
5064 3628 505
1836 6326 438
3400 11755 137
833 4105 263
833 2475 263
833 8200 318
1411 12731 1
504 11692 170
5749 13824 297
14342 7725 114
1621 2894 129
3472 6748 40
10262 2237 98
12763 9057 155
36 6780 350
36 10975 350
36 9983 18
36 2826 18
36 4078 18
36 8613 18
36 7215 18
36 11109 18
36 13085 18
36 7375 18
36 11276 18
36 7373 18
36 11306 18
36 8102 18
36 10524 18
2940 4867 167
10899 10687 373
2028 9337 114
650 6797 203
2773 3463 361
3288 5636 96
11129 675 114
6187 7691 625
12115 7863 405
1474 6113 129
10709 1956 129
2107 13316 102
3960 7440 1
2018 1318 114
2775 6600 481
6778 472 314
10772 6847 40
403 7225 91
403 1504 91
403 3675 91
403 3384 91
403 4125 91
403 3972 91
403 6969 91
403 9751 91
13944 7475 167
13308 13345 622
2684 1905 161
101 7052 91
101 856 91
101 13514 91
101 6758 91
101 479 91
101 6404 91
101 2791 91
101 1691 91
101 11344 91
101 11475 91
101 13726 91
101 997 91
101 14549 91
101 4028 91
101 11755 91
101 12077 91
101 1862 91
2706 10689 88
4824 6375 96
8723 3825 114
5887 11401 129
10153 4328 401
103 7437 289
1054 1731 208
15 7340 167
12409 13904 40
2882 3565 234
8035 11215 626
8838 1496 735
1278 251 342
8256 2449 98
569 8006 185
4701 6070 520
14346 13715 114
5574 10563 329
4357 3921 462
8053 8479 114
332 7601 113
2003 2143 1
5606 1993 481
10118 11675 441
10173 5694 155
9550 38 520
1820 9363 494
6863 6803 155
1415 14214 243
1818 8783 286
1818 6542 286
1818 3550 301
2731 10633 18
2731 1147 18
8596 75 1037
7895 7590 203
979 5454 199
8918 3653 98
1534 6093 256
1534 3346 256
1534 6259 256
333 1485 525
1508 8016 106
6057 4782 203
8749 4757 179
9280 10156 252
2805 7128 161
583 5134 114
8893 6314 976
14456 2328 405
4017 7985 660
4017 4019 660
176 11647 299
8881 2881 443
8264 14198 633
7883 8036 1
11365 11359 114
13331 8459 114
2812 13362 114
11942 9317 167
882 6442 88
4483 4484 329
2640 8061 129
690 4018 167
10969 12408 256
5820 4453 520
3280 7374 289
17 9458 500
2379 658 199
8709 9348 203
657 5530 764
58 7643 277
58 6932 170
58 11023 790
3890 6204 199
2489 11960 331
500 499 148
2074 9510 254
6633 8036 114
3700 5953 114
8 8938 357
5892 10208 277
12374 2929 167
3531 2517 1
416 3788 484
191 4003 635
10642 5170 106
4946 1875 40
4503 13805 137


================================================
FILE: ConvKB_pytorch/benchmarks/FB15K/entity2id.txt
================================================
14951
/m/027rn	0
/m/06cx9	1
/m/017dcd	2
/m/06v8s0	3
/m/07s9rl0	4
/m/0170z3	5
/m/01sl1q	6
/m/044mz_	7
/m/0cnk2q	8
/m/02nzb8	9
/m/02_j1w	10
/m/01cwm1	11
/m/059ts	12
/m/03h_f4	13
/m/011yn5	14
/m/01pjr7	15
/m/04nrcg	16
/m/02sdk9v	17
/m/07nznf	18
/m/014lc_	19
/m/05cvgl	20
/m/04kxsb	21
/m/02qyp19	22
/m/02d413	23
/m/02vk52z	24
/m/01crd5	25
/m/0q9kd	26
/m/0184jc	27
/m/09w1n	28
/m/0sx8l	29
/m/03q5t	30
/m/07y_7	31
/m/0gqng	32
/m/073hkh	33
/m/0b76d_m	34
/m/029j_	35
/m/04ztj	36
/m/012v9y	37
/m/014_x2	38
/m/012ljv	39
/m/0ds35l9	40
/m/05r4w	41
/m/015qsq	42
/m/02bjrlw	43
/m/04bdxl	44
/m/02s2ft	45
/m/0677ng	46
/m/03ckfl9	47
/m/03zj9	48
/m/059rby	49
/m/0157m	50
/m/063k3h	51
/m/01ky7c	52
/m/019v9k	53
/m/02681vq	54
/m/01w61th	55
/m/0gx_st	56
/m/09qvf4	57
/m/09c7w0	58
/m/0rs6x	59
/m/079vf	60
/m/0d90m	61
/m/015zyd	62
/m/05vsxz	63
/m/06qgvf	64
/m/04ljl_l	65
/m/03qcfvw	66
/m/0grwj	67
/m/05sxg2	68
/m/02ynfr	69
/m/0bmch_x	70
/m/018gz8	71
/m/071wvh	72
/m/02cqbx	73
/m/01jr4j	74
/m/0rh6k	75
/m/01lp8	76
/m/03mqtr	77
/m/04j4tx	78
/m/01z4y	79
/m/07zhjj	80
/m/05d7rk	81
/m/027dtxw	82
/m/05hs4r	83
/m/01pbxb	84
/m/03qcq	85
/m/084w8	86
/m/01lxd4	87
/m/0f0y8	88
/m/08815	89
/m/05bnp0	90
/m/08nhfc1	91
/m/015c4g	92
/m/0g56t9t	93
/m/062zm5h	94
/m/0c9xjl	95
/m/0160w	96
/m/0dbpyd	97
/m/053y0s	98
/m/0dz3r	99
/m/016qtt	100
/m/08mbj5d	101
/m/0b005	102
/m/07c52	103
/m/023wyl	104
/m/02wvfxl	105
/m/09sh8k	106
/m/09zzb8	107
/m/05pd94v	108
/m/01vvydl	109
/m/0gtsx8c	110
/m/0jgd	111
/m/02r0csl	112
/m/0m313	113
/m/02y_lrp	114
/m/012d40	115
/m/034qmv	116
/m/07rlg	117
/m/0b90_r	118
/m/0g22z	119
/m/05b4l5x	120
/m/018js4	121
/m/07fq1y	122
/m/02qgqt	123
/m/081jbk	124
/m/02v5xg	125
/m/028q6	126
/m/02581q	127
/m/0sxg4	128
/m/04xvlr	129
/m/02gpkt	130
/m/0fvf9q	131
/m/0l6qt	132
/m/07jnt	133
/m/0gq9h	134
/m/0jz9f	135
/m/083shs	136
/m/0njvn	137
/m/09nqf	138
/m/0d1tm	139
/m/0154j	140
/m/01br2w	141
/m/0kq0q	142
/m/03rl1g	143
/m/05kkh	144
/m/0fbq2n	145
/m/01r3hr	146
/m/060bp	147
/m/027nb	148
/m/04qvl7	149
/m/02vxq9m	150
/m/04yywz	151
/m/0140g4	152
/m/01k7d9	153
/m/0k049	154
/m/02p65p	155
/m/0337vz	156
/m/06688p	157
/m/0b2v79	158
/m/01c0cc	159
/m/01jc6q	160
/m/040njc	161
/m/01wmjkb	162
/m/0l14j_	163
/m/0520r2x	164
/m/0cb77r	165
/m/06j0md	166
/m/05mgj0	167
/m/0ds6bmk	168
/m/05b3ts	169
/m/0g2hw4	170
/m/0ktpx	171
/m/0hqcy	172
/m/01453	173
/m/0dgrmp	174
/m/028_yv	175
/m/03rjj	176
/m/01914	177
/m/0bpbhm	178
/m/0219x_	179
/m/05_6_y	180
/m/02b15h	181
/m/0487c3	182
/m/01j_jh	183
/m/03c7ln	184
/m/026t6	185
/m/01qvcr	186
/m/04sv4	187
/m/02pprs	188
/m/07s3vqk	189
/m/0d05q4	190
/m/0784z	191
/m/0kwgs	192
/m/0gyh	193
/m/01gbbz	194
/m/035ktt	195
/m/05zjtn4	196
/m/0197tq	197
/m/02_fm2	198
/m/02x73k6	199
/m/02lxrv	200
/m/0411q	201
/m/016z4k	202
/m/06151l	203
/m/06gp3f	204
/m/01fq7	205
/m/06w99h3	206
/m/02kdv5l	207
/m/05cljf	208
/m/08r4x3	209
/m/03_6y	210
/m/0342h	211
/m/0l14qv	212
/m/02bfmn	213
/m/05zppz	214
/m/01rk91	215
/m/0kc6x	216
/m/01xdf5	217
/m/04t2l2	218
/m/02rc4d	219
/m/0g0vx	220
/m/027qgy	221
/m/06cvj	222
/m/01mkq	223
/m/011xy1	224
/m/0dxtw	225
/m/0cq86w	226
/m/02v8kmz	227
/m/01j5ts	228
/m/06_kh	229
/m/02hrh1q	230
/m/01pcz9	231
/m/0dwl2	232
/m/01mw1	233
/m/02wh75	234
/m/0lbj1	235
/m/0c3ybss	236
/m/03cfjg	237
/m/0jzphpx	238
/m/06wxw	239
/m/0jbk9	240
/m/0np9r	241
/m/05bp8g	242
/m/01l1b90	243
/m/0p07l	244
/m/01vsl	245
/m/02vp1f_	246
/m/03_3d	247
/m/060__y	248
/m/03rtz1	249
/m/09m6kg	250
/m/02h40lc	251
/m/047q2k1	252
/m/05p553	253
/m/01ypc	254
/m/02dwn9	255
/m/01k_0fp	256
/m/0m77m	257
/m/02_h0	258
/m/06dv3	259
/m/01rtm4	260
/m/02rchht	261
/m/0dq3c	262
/m/02zs4	263
/m/0qcr0	264
/m/0byfz	265
/m/06wzvr	266
/m/01q03	267
/m/01qn7n	268
/m/016tw3	269
/m/03tps5	270
/m/0cnl80	271
/m/083chw	272
/m/0gq_v	273
/m/0k2cb	274
/m/0qf43	275
/m/0f_nbyh	276
/m/0c0yh4	277
/m/0d060g	278
/m/05qt0	279
/m/0nk72	280
/m/01s3kv	281
/m/03g90h	282
/m/02_n3z	283
/m/06cqb	284
/m/0827d	285
/m/02tkzn	286
/m/014zcr	287
/m/05m63c	288
/m/0yyg4	289
/m/027c924	290
/m/03f0324	291
/m/040rjq	292
/m/01tv3x2	293
/m/06rqw	294
/m/0h3y	295
/m/0rtv	296
/m/090s_0	297
/m/0h0jz	298
/m/05znxx	299
/m/02hsq3m	300
/m/01kyvx	301
/m/0ckr7s	302
/m/02ky346	303
/m/0p5wz	304
/m/05ztjjw	305
/m/01gc7	306
/m/04j53	307
/m/04h68j	308
/m/01g3gq	309
/m/01vw87c	310
/m/01gbcf	311
/m/016clz	312
/m/011yrp	313
/m/0d0vqn	314
/m/049tjg	315
/m/05jf85	316
/m/046f25	317
/m/0d1swh	318
/m/0b_fw	319
/m/04t36	320
/m/011yxg	321
/m/05v38p	322
/m/03hkv_r	323
/m/025twgf	324
/m/0clpml	325
/m/0t015	326
/m/0j1z8	327
/m/07t65	328
/m/0162b	329
/m/0fnb4	330
/m/0hd7j	331
/m/07xl34	332
/m/02sgy	333
/m/0l14md	334
/m/05r5c	335
/m/0c9d9	336
/m/02g8h	337
/m/01d_h8	338
/m/0c_j9x	339
/m/0n5j_	340
/m/0fm9_	341
/m/04cy8rb	342
/m/07gp9	343
/m/081pw	344
/m/0gzy02	345
/m/05g8ky	346
/m/026wp	347
/m/049nq	348
/m/03t5kl	349
/m/01f2q5	350
/m/01jssp	351
/m/01l849	352
/m/0f4y_	353
/m/08959	354
/m/0789n	355
/m/01fkr_	356
/m/0mdxd	357
/m/03x3qv	358
/m/0g60z	359
/m/0d_84	360
/m/01hr1	361
/m/042l3v	362
/m/026mg3	363
/m/01s695	364
/m/0gtt5fb	365
/m/0tj9	366
/m/0hx4y	367
/m/01y_px	368
/m/0k611	369
/m/0hsmh	370
/m/0gx1bnj	371
/m/04gzd	372
/m/04njml	373
/m/07hgkd	374
/m/060c4	375
/m/04wlz2	376
/m/01531	377
/m/0b1f49	378
/m/01gxqf	379
/m/01_qc_	380
/m/0g5lhl7	381
/m/0c55fj	382
/m/09xbpt	383
/m/0yxl	384
/m/02w7gg	385
/m/05f4m9q	386
/m/08lr6s	387
/m/09y20	388
/m/01nrnm	389
/m/0hl3d	390
/m/0cbd2	391
/m/0495ys	392
/m/05l2z4	393
/m/018wrk	394
/m/02bkg	395
/m/0h5f5n	396
/m/04jpl	397
/m/047gn4y	398
/m/05ty4m	399
/m/0bvn25	400
/m/026p4q7	401
/m/044lyq	402
/m/08mbj32	403
/m/01vs5c	404
/m/09jvl	405
/m/01g888	406
/m/01vrx3g	407
/m/01lmj3q	408
/m/0m27n	409
/m/0qpjt	410
/m/01c8w0	411
/m/023361	412
/m/01qbg5	413
/m/017fp	414
/m/01gsvb	415
/m/0b3wk	416
/m/01wbg84	417
/m/01r42_g	418
/m/0gr51	419
/m/04gcd1	420
/m/0ddfwj1	421
/m/0sv6n	422
/m/02wkmx	423
/m/02vxn	424
/m/0f5hyg	425
/m/0126y2	426
/m/01flzq	427
/m/05cj4r	428
/m/0bfvw2	429
/m/023tp8	430
/m/0dq626	431
/m/07sbbz2	432
/m/089tm	433
/m/0m2wm	434
/m/02zq43	435
/m/086k8	436
/m/0czyxs	437
/m/014635	438
/m/02kz_	439
/m/0ds3t5x	440
/m/02896	441
/m/05krk	442
/m/0chghy	443
/m/0l6vl	444
/m/02tcgh	445
/m/0kvgnq	446
/m/02x4wr9	447
/m/017z49	448
/m/08p1gp	449
/m/0dnvn3	450
/m/07zrf	451
/m/016fyc	452
/m/01qscs	453
/m/01p7yb	454
/m/0p_pd	455
/m/0bl2g	456
/m/0prfz	457
/m/02pw_n	458
/m/09l3p	459
/m/0k9ctht	460
/m/0fr63l	461
/m/01q_ph	462
/m/05zr6wv	463
/m/0cjk9	464
/m/047lj	465
/m/0s3y5	466
/m/09fb5	467
/m/07xtqq	468
/m/05hcy	469
/m/0jg1q	470
/m/0c5lg	471
/m/0rlz	472
/m/02ktt7	473
/m/03w5xm	474
/m/026v437	475
/m/080dwhx	476
/m/0150jk	477
/m/05bt6j	478
/m/01k1k4	479
/m/09gq0x5	480
/m/0gtv7pk	481
/m/01ls2	482
/m/02rbdlq	483
/m/0l8v5	484
/m/095zlp	485
/m/06q2q	486
/m/0hnws	487
/m/09wj5	488
/m/0m4yg	489
/m/09c7b	490
/m/0hhbr	491
/m/03ckxdg	492
/m/02_1q9	493
/m/09n48	494
/m/020qr4	495
/m/03_r3	496
/m/02lq67	497
/m/0fvwg	498
/m/0dtw1x	499
/m/01yznp	500
/m/01tvz5j	501
/m/03rs8y	502
/m/0h1cdwq	503
/m/03rt9	504
/m/0m2kd	505
/m/0jdk_	506
/m/064vjs	507
/m/042v_gx	508
/m/025cbm	509
/m/03qbh5	510
/m/09hnb	511
/m/052q4j	512
/m/01fxg8	513
/m/0146pg	514
/m/0bzjvm	515
/m/0cj_v7	516
/m/0jt90f5	517
/m/067xw	518
/m/025h4z	519
/m/0bp_b2	520
/m/02fqrf	521
/m/057xs89	522
/m/05y7hc	523
/m/01g0p5	524
/m/032xhg	525
/m/034qrh	526
/m/050023	527
/m/026dcvf	528
/m/0195fx	529
/m/0m_sb	530
/m/023rwm	531
/m/01t_xp_	532
/m/021gt5	533
/m/01dw4q	534
/m/02qjj7	535
/m/0436f4	536
/m/02lfcm	537
/m/024jwt	538
/m/03v1s	539
/m/0plyy	540
/m/0z4s	541
/m/099jhq	542
/m/0sx7r	543
/m/03tmr	544
/m/02j9z	545
/m/05qhw	546
/m/0h1_w	547
/m/0789_m	548
/m/054_mz	549
/m/039n1	550
/m/099bk	551
/m/05qd_	552
/m/0jsf6	553
/m/0fp_v1x	554
/m/09bjv	555
/m/05zkcn5	556
/m/03mh94	557
/m/022r38	558
/m/05k17c	559
/m/05vsb7	560
/m/01pl14	561
/m/01vx2h	562
/m/06x43v	563
/m/01wv9xn	564
/m/02snj9	565
/m/0dckvs	566
/m/02r96rf	567
/m/0223bl	568
/m/02vx4	569
/m/0clfdj	570
/m/09fqtq	571
/m/0m0jc	572
/m/01pfr3	573
/m/07w21	574
/m/041h0	575
/m/027z0pl	576
/m/0drnwh	577
/m/058kh7	578
/m/0mfj2	579
/m/06nnj	580
/m/0bynt	581
/m/01rrwf6	582
/m/02dtg	583
/m/052nd	584
/m/03ys48	585
/m/07y9k	586
/m/03gqb0k	587
/m/015pdg	588
/m/0m2l9	589
/m/04gxf	590
/m/0b_6q5	591
/m/0ds11z	592
/m/02rjjll	593
/m/032nwy	594
/m/0159h6	595
/m/0h5g_	596
/m/04rs03	597
/m/0flw86	598
/m/04f2zj	599
/m/0285c	600
/m/04tz52	601
/m/03w1v2	602
/m/025_64l	603
/m/01_9c1	604
/m/05f4vxd	605
/m/09v6gc9	606
/m/0c4f4	607
/m/0bxtg	608
/m/0fd3y	609
/m/0m19t	610
/m/04v8x9	611
/m/0p9sw	612
/m/02vs3x5	613
/m/05t54s	614
/m/087c7	615
/m/01tlmw	616
/m/04w8f	617
/m/06z6r	618
/m/02wb6yq	619
/m/0ggx5q	620
/m/0bdwqv	621
/m/02ldv0	622
/m/0g5qs2k	623
/m/03ds3	624
/m/0j_t1	625
/m/03_d0	626
/m/026ps1	627
/m/02g3v6	628
/m/0ds33	629
/m/02x4w6g	630
/m/04zn7g	631
/m/04wqr	632
/m/0chsq	633
/m/0dscrwf	634
/m/07ssc	635
/m/01vrkdt	636
/m/01rr9f	637
/m/02zsn	638
/m/058ncz	639
/m/03zqc1	640
/m/016z5x	641
/m/07f8wg	642
/m/018d6l	643
/m/0xhtw	644
/m/01dys	645
/m/0343h	646
/m/0dtfn	647
/m/0147dk	648
/m/01h7bb	649
/m/06cv1	650
/m/02jknp	651
/m/01lz4tf	652
/m/05148p4	653
/m/0wzm	654
/m/07gvx	655
/m/019tzd	656
/m/02_286	657
/m/07b_l	658
/m/0b6tzs	659
/m/017149	660
/m/05hj0n	661
/m/04lgymt	662
/m/02g3gj	663
/m/0gydcp7	664
/m/02_j7t	665
/m/0lhp1	666
/m/0dxtg	667
/m/04j_gs	668
/m/06dr9	669
/m/05wh0sh	670
/m/09v82c0	671
/m/03cv_gy	672
/m/06cs95	673
/m/02nb2s	674
/m/02lhm2	675
/m/014ktf	676
/m/0mdqp	677
/m/09bcm	678
/m/05qc_	679
/m/09h4b5	680
/m/09l65	681
/m/09sb52	682
/m/0gq6s3	683
/m/04nl83	684
/m/0309lm	685
/m/0wq3z	686
/m/02rgz4	687
/m/018jk2	688
/m/0gkvb7	689
/m/03f2_rc	690
/m/07lmxq	691
/m/06jzh	692
/m/01k_r5b	693
/m/0131kb	694
/m/02q5bx2	695
/m/07z2lx	696
/m/017s11	697
/m/0djb3vw	698
/m/01ty7ll	699
/m/033hqf	700
/m/0gqy2	701
/m/02vm9nd	702
/m/02qrv7	703
/m/03kpvp	704
/m/094vy	705
/m/02jx1	706
/m/04bs3j	707
/m/032l1	708
/m/019z7q	709
/m/01j_9c	710
/m/02mslq	711
/m/014x77	712
/m/01ln5z	713
/m/05v8c	714
/m/02j71	715
/m/0dm5l	716
/m/011k1h	717
/m/0n4m5	718
/m/0ydpd	719
/m/02211by	720
/m/01c6k4	721
/m/0c1pj	722
/m/03h_yy	723
/m/0151ns	724
/m/02gdjb	725
/m/05p1tzf	726
/m/0f830f	727
/m/027dtv3	728
/m/02mhfy	729
/m/0dcqh	730
/m/01nqfh_	731
/m/0bk1p	732
/m/01cf93	733
/m/03_2y	734
/m/0mb8c	735
/m/01wl38s	736
/m/01d38g	737
/m/06cc_1	738
/m/02cg41	739
/m/03t5n3	740
/m/07c6l	741
/m/060v34	742
/m/03npn	743
/m/0bth54	744
/m/016tt2	745
/m/02bxjp	746
/m/05dxl_	747
/m/040vk98	748
/m/01zkxv	749
/m/0cb4j	750
/m/01fpvz	751
/m/05m_8	752
/m/06pwq	753
/m/01sxly	754
/m/05zvj3m	755
/m/0bksh	756
/m/08vq2y	757
/m/0d35y	758
/m/0fz20l	759
/m/0gqwc	760
/m/015fr	761
/m/0l98s	762
/m/018dnt	763
/m/050r1z	764
/m/07g2b	765
/m/0184jw	766
/m/011yqc	767
/m/0f4_2k	768
/m/0g2lq	769
/m/02p_7cr	770
/m/02_1rq	771
/m/02_1sj	772
/m/0g54xkt	773
/m/0bwh6	774
/m/0f4x7	775
/m/0n0bp	776
/m/0lzb8	777
/m/02grdc	778
/m/0ch6mp2	779
/m/01tl50z	780
/m/02lf0c	781
/m/0b60sq	782
/m/071h5c	783
/m/0ggq0m	784
/m/01vvy	785
/m/04ddm4	786
/m/02n4kr	787
/m/02825kb	788
/m/0kbws	789
/m/03hr1p	790
/m/0gr4k	791
/m/0bzk8w	792
/m/03s6l2	793
/m/0lsxr	794
/m/077g7n	795
/m/03ww_x	796
/m/07j87	797
/m/07_lq	798
/m/02002f	799
/m/06m_5	800
/m/0cqt41	801
/m/02x3lt7	802
/m/02k54	803
/m/03xq0f	804
/m/08nvyr	805
/m/019f4v	806
/m/02r6c_	807
/m/0njlp	808
/m/0p4wb	809
/m/01fc7p	810
/m/0bq0p9	811
/m/0xkq4	812
/m/03fykz	813
/m/0mkg	814
/m/0dwsp	815
/m/011yph	816
/m/025jfl	817
/m/09qwmm	818
/m/0fpkxfd	819
/m/09v42sf	820
/m/0br1x_	821
/m/07s6fsf	822
/m/02w2bc	823
/m/0fq27fp	824
/m/03kq98	825
/m/01gvr1	826
/m/0cpllql	827
/m/02nxhr	828
/m/01kwld	829
/m/02pt27	830
/m/0dl5d	831
/m/072kp	832
/m/0ckd1	833
/m/0c40vxk	834
/m/0gx9rvq	835
/m/02pp_q_	836
/m/0d4fqn	837
/m/02r22gf	838
/m/01cssf	839
/m/0298n7	840
/m/054g1r	841
/m/023fb	842
/m/06ryl	843
/m/014b4h	844
/m/08720	845
/m/032t2z	846
/m/0bqs56	847
/m/02q690_	848
/m/044rvb	849
/m/028tv0	850
/m/04rcr	851
/m/0127ps	852
/m/0jbp0	853
/m/04r7jc	854
/m/080nwsb	855
/m/013x0b	856
/m/0c7ct	857
/m/04htfd	858
/m/0dq_5	859
/m/020xn5	860
/m/04n52p6	861
/m/026p_bs	862
/m/06dl_	863
/m/02ndf1	864
/m/02p10m	865
/m/02qvgy	866
/m/0j5m6	867
/m/021sv1	868
/m/070m6c	869
/m/0kzy0	870
/m/0gbbt	871
/m/025p38	872
/m/04jwjq	873
/m/02cl1	874
/m/0jmdb	875
/m/0p3sf	876
/m/013y1f	877
/m/03lpp_	878
/m/083jv	879
/m/0401sg	880
/m/012gbb	881
/m/01psyx	882
/m/0n22z	883
/m/02r_d4	884
/m/0f4vx	885
/m/01pv51	886
/m/0jmj7	887
/m/0284n42	888
/m/0pc62	889
/m/01pgzn_	890
/m/0r0m6	891
/m/07gyv	892
/m/03lygq	893
/m/013tjc	894
/m/02p21g	895
/m/0dqytn	896
/m/064t9	897
/m/0168cl	898
/m/0jcgs	899
/m/0f2r6	900
/m/02jqjm	901
/m/018vs	902
/m/01pcdn	903
/m/0170_p	904
/m/033nzk	905
/m/06y9c2	906
/m/02pjc1h	907
/m/0170qf	908
/m/0cqhk0	909
/m/08wq0g	910
/m/01cv3n	911
/m/025vry	912
/m/0f2wj	913
/m/01ct6	914
/m/0g3zpp	915
/m/0bdw1g	916
/m/039fgy	917
/m/05b1610	918
/m/0dj0m5	919
/m/01cx_	920
/m/030xr_	921
/m/07mvp	922
/m/08c6k9	923
/m/0_b3d	924
/m/026g4l_	925
/m/01l29r	926
/m/0n5j7	927
/m/03176f	928
/m/06n7h7	929
/m/03ldxq	930
/m/01n5309	931
/m/09qvc0	932
/m/0fgpvf	933
/m/0209xj	934
/m/0gffmn8	935
/m/015lhm	936
/m/0209hj	937
/m/0kr5_	938
/m/06x68	939
/m/05kcgsf	940
/m/01f7v_	941
/m/01f8gz	942
/m/01vvycq	943
/m/024n3z	944
/m/08phg9	945
/m/03rg2b	946
/m/0187y5	947
/m/01z9_x	948
/m/0248jb	949
/m/07k53y	950
/m/02vkdwz	951
/m/09q23x	952
/m/020bv3	953
/m/02qggqc	954
/m/0fg04	955
/m/05gml8	956
/m/03m8lq	957
/m/05tg3	958
/m/02qpbqj	959
/m/07kb5	960
/m/02jcc	961
/m/01b30l	962
/m/0b_j2	963
/m/0hn10	964
/m/02py4c8	965
/m/0hmr4	966
/m/01csvq	967
/m/018wng	968
/m/01vlj1g	969
/m/04fzfj	970
/m/03_gd	971
/m/0gs9p	972
/m/03cp4cn	973
/m/01bgqh	974
/m/01bx35	975
/m/07s846j	976
/m/0d2b38	977
/m/03h4fq7	978
/m/02hczc	979
/m/0p07_	980
/m/05kj_	981
/m/0mx4_	982
/m/059f4	983
/m/05g3b	984
/m/09byk	985
/m/04306rv	986
/m/063g7l	987
/m/02g_6j	988
/m/05jx2d	989
/m/04zx3q1	990
/m/065y4w7	991
/m/03m73lj	992
/m/061681	993
/m/06w87	994
/m/041ly3	995
/m/0c_j5d	996
/m/02jyhv	997
/m/01w3v	998
/m/05qjt	999
/m/08f3b1	1000
/m/01vj9c	1001
/m/05q9g1	1002
/m/04dn09n	1003
/m/09q_6t	1004
/m/07lt7b	1005
/m/094qd5	1006
/m/026mfbr	1007
/m/07_dn	1008
/m/01hp5	1009
/m/0338lq	1010
/m/042rnl	1011
/m/02z3r8t	1012
/m/05gg4	1013
/m/06npd	1014
/m/09qj50	1015
/m/0kfpm	1016
/m/043djx	1017
/m/05fkf	1018
/m/03gwpw2	1019
/m/017gm7	1020
/m/0svqs	1021
/m/0274ck	1022
/m/0j7v_	1023
/m/06s6l	1024
/m/01hp22	1025
/m/0jf1b	1026
/m/0dzfdw	1027
/m/01r97z	1028
/m/02w0dc0	1029
/m/012cj0	1030
/m/012c6x	1031
/m/04fzk	1032
/m/01qhm_	1033
/m/06mzp	1034
/m/0l998	1035
/m/0p_sc	1036
/m/05kfs	1037
/m/0kbvb	1038
/m/096f8	1039
/m/02f6ym	1040
/m/01t110	1041
/m/037njl	1042
/m/0f8l9c	1043
/m/0glb5	1044
/m/01k2yr	1045
/m/099t8j	1046
/m/0lpjn	1047
/m/0bzm__	1048
/m/01qzyz	1049
/m/0gkz15s	1050
/m/02k_4g	1051
/m/0fbvqf	1052
/m/018ctl	1053
/m/0cj16	1054
/m/02ryz24	1055
/m/03hjv97	1056
/m/076lxv	1057
/m/08gsvw	1058
/m/0g1rw	1059
/m/01f2w0	1060
/m/0lpp8	1061
/m/087wc7n	1062
/m/0hzlz	1063
/m/018db8	1064
/m/035xwd	1065
/m/09p35z	1066
/m/0415svh	1067
/m/01n7q	1068
/m/024tcq	1069
/m/0jm3v	1070
/m/04s0m	1071
/m/03j43	1072
/m/02vxfw_	1073
/m/09zf_q	1074
/m/082_p	1075
/m/03hkch7	1076
/m/03hl6lc	1077
/m/07s467s	1078
/m/09jp3	1079
/m/0cwrr	1080
/m/01mvth	1081
/m/04yj5z	1082
/m/03ckwzc	1083
/m/03qd_	1084
/m/07z31v	1085
/m/0bdw6t	1086
/m/02fgpf	1087
/m/07hpv3	1088
/m/0vgkd	1089
/m/0vrmb	1090
/m/0cbv4g	1091
/m/03hj5vf	1092
/m/0jzw	1093
/m/03k9fj	1094
/m/01jfsb	1095
/m/0dsvzh	1096
/m/084qpk	1097
/m/058kqy	1098
/m/01mvjl0	1099
/m/0288zy	1100
/m/0785v8	1101
/m/05ml_s	1102
/m/04bd8y	1103
/m/09jwl	1104
/m/01qkqwg	1105
/m/02hxhz	1106
/m/05b__vr	1107
/m/064nh4k	1108
/m/07rhpg	1109
/m/0b73_1d	1110
/m/02kxbwx	1111
/m/0m_v0	1112
/m/0hcs3	1113
/m/06wpc	1114
/m/02l840	1115
/m/01wmxfs	1116
/m/02773nt	1117
/m/02773m2	1118
/m/02778pf	1119
/m/0pv2t	1120
/m/027986c	1121
/m/0164qt	1122
/m/02sg5v	1123
/m/016jhr	1124
/m/025xt8y	1125
/m/0bs5k8r	1126
/m/01yfm8	1127
/m/0p_47	1128
/m/042d1	1129
/m/060d2	1130
/m/02lfl4	1131
/m/0521rl1	1132
/m/01bh3l	1133
/m/01_vrh	1134
/m/01pcq3	1135
/m/03h_9lg	1136
/m/07p__7	1137
/m/01km6_	1138
/m/013n2h	1139
/m/02q_cc	1140
/m/0kx4m	1141
/m/034x61	1142
/m/066m4g	1143
/m/071t0	1144
/m/09x3r	1145
/m/02z13jg	1146
/m/016khd	1147
/m/05kjc6	1148
/m/0_3cs	1149
/m/0mwl2	1150
/m/02ndbd	1151
/m/06_wqk4	1152
/m/013kcv	1153
/m/019lwb	1154
/m/01r3y2	1155
/m/02h4rq6	1156
/m/0dhrqx	1157
/m/05jzt3	1158
/m/0hnlx	1159
/m/0fhp9	1160
/m/0ccd3x	1161
/m/03r8tl	1162
/m/05wdgq	1163
/m/088tb	1164
/m/01l7qw	1165
/m/018j2	1166
/m/01x15dc	1167
/m/01_gx_	1168
/m/0f04v	1169
/m/0l6ny	1170
/m/0dwvl	1171
/m/05x_5	1172
/m/02z9hqn	1173
/m/06mfvc	1174
/m/0ck27z	1175
/m/09qv3c	1176
/m/02nf2c	1177
/m/0152cw	1178
/m/06fq2	1179
/m/03bwzr4	1180
/m/09vw2b7	1181
/m/0hr3c8y	1182
/m/08w7vj	1183
/m/032_jg	1184
/m/06z8s_	1185
/m/0dky9n	1186
/m/0147sh	1187
/m/01yk13	1188
/m/0cqh46	1189
/m/02lk1s	1190
/m/047byns	1191
/m/0287477	1192
/m/0f4vbz	1193
/m/0dwtp	1194
/m/0kv2hv	1195
/m/04tc1g	1196
/m/0b13yt	1197
/m/01c_d	1198
/m/0bg539	1199
/m/0488g9	1200
/m/043h78	1201
/m/05lls	1202
/m/0pcc0	1203
/m/069ld1	1204
/m/01j5x6	1205
/m/04b4yg	1206
/m/0356lc	1207
/m/03tc8d	1208
/m/07tgn	1209
/m/0ymbl	1210
/m/048wrb	1211
/m/03m5k	1212
/m/01jfr3y	1213
/m/02vjzr	1214
/m/016vh2	1215
/m/03wbqc4	1216
/m/018mxj	1217
/m/0gs1_	1218
/m/0344gc	1219
/m/02x1dht	1220
/m/0fkvn	1221
/m/07cfx	1222
/m/04969y	1223
/m/02f72_	1224
/m/015f7	1225
/m/02h659	1226
/m/01v3s2_	1227
/m/0pz7h	1228
/m/01vksx	1229
/m/07fb8_	1230
/m/019rg5	1231
/m/0265v21	1232
/m/026dg51	1233
/m/02qrbbx	1234
/m/01srq2	1235
/m/081k8	1236
/m/073v6	1237
/m/031296	1238
/m/05zr0xl	1239
/m/03ym1	1240
/m/065b6q	1241
/m/0htlr	1242
/m/03cvwkr	1243
/m/01hxs4	1244
/m/02cttt	1245
/m/04sx9_	1246
/m/04411	1247
/m/0ddd9	1248
/m/01yb09	1249
/m/039c26	1250
/m/050rj	1251
/m/05p1dby	1252
/m/03fg0r	1253
/m/03t97y	1254
/m/02rcdc2	1255
/m/04gknr	1256
/m/07q1v4	1257
/m/018fq	1258
/m/0sxmx	1259
/m/06r_by	1260
/m/01sp81	1261
/m/0bn9sc	1262
/m/02279c	1263
/m/0f2rq	1264
/m/0b_6rk	1265
/m/027rwmr	1266
/m/03h26tm	1267
/m/02qflgv	1268
/m/01lbp	1269
/m/0r5y9	1270
/m/01hmnh	1271
/m/05fm6m	1272
/m/01jv_6	1273
/m/06b1q	1274
/m/0jgx	1275
/m/01qb5d	1276
/m/0vmt	1277
/m/04n6k	1278
/m/026ldz7	1279
/m/05zm34	1280
/m/01vdm0	1281
/m/01ycfv	1282
/m/02hv44_	1283
/m/017r2	1284
/m/03d_w3h	1285
/m/0fq9zdn	1286
/m/092vkg	1287
/m/04jjy	1288
/m/0963mq	1289
/m/03jn4	1290
/m/0k3p	1291
/m/02wzl1d	1292
/m/01hhvg	1293
/m/036hv	1294
/m/01hrqc	1295
/m/05zbm4	1296
/m/03_vx9	1297
/m/0cwy47	1298
/m/06f0dc	1299
/m/0hjy	1300
/m/01kwlwp	1301
/m/0n1h	1302
/m/0c75w	1303
/m/02vzc	1304
/m/02z1nbg	1305
/m/09z2b7	1306
/m/0r62v	1307
/m/0kcd5	1308
/m/05ztrmj	1309
/m/03f5spx	1310
/m/01tnxc	1311
/m/04q7r	1312
/m/03s0w	1313
/m/05sfs	1314
/m/0gg5qcw	1315
/m/04f525m	1316
/m/0fh694	1317
/m/03gm48	1318
/m/01vrnsk	1319
/m/01w40h	1320
/m/03j755	1321
/m/01s0ps	1322
/m/0d0vj4	1323
/m/012t_z	1324
/m/015grj	1325
/m/03pmty	1326
/m/01q7cb_	1327
/m/03knl	1328
/m/017gl1	1329
/m/0yfp	1330
/m/0gh65c5	1331
/m/02qzmz6	1332
/m/0dgd_	1333
/m/0738y5	1334
/m/04kkz8	1335
/m/04l3_z	1336
/m/06pk8	1337
/m/02r3cn	1338
/m/0glyyw	1339
/m/0dlngsd	1340
/m/03y_f8	1341
/m/075q_	1342
/m/0134w7	1343
/m/01bdxz	1344
/m/0537y_	1345
/m/03lfd_	1346
/m/02x8n1n	1347
/m/01d3n8	1348
/m/0hgxh	1349
/m/02_sr1	1350
/m/01vsn38	1351
/m/08jbxf	1352
/m/0d1pc	1353
/m/044mjy	1354
/m/0l2lk	1355
/m/05w3y	1356
/m/0m_mm	1357
/m/0k6nt	1358
/m/081lh	1359
/m/01tspc6	1360
/m/0151w_	1361
/m/0mw89	1362
/m/0mw93	1363
/m/01cr28	1364
/m/03mqj_	1365
/m/0p9lw	1366
/m/099tbz	1367
/m/0dn16	1368
/m/07qnf	1369
/m/067jsf	1370
/m/01jrz5j	1371
/m/01dtl	1372
/m/023vrq	1373
/m/01wyz92	1374
/m/015ynm	1375
/m/01795t	1376
/m/01m1dzc	1377
/m/013b2h	1378
/m/0yfvf	1379
/m/01gf5h	1380
/m/03gj2	1381
/m/01vv7sc	1382
/m/0f94t	1383
/m/06krf3	1384
/m/03c7tr1	1385
/m/05zvq6g	1386
/m/01m13b	1387
/m/01d5z	1388
/m/07w0v	1389
/m/01vn35l	1390
/m/0gvvf4j	1391
/m/03qbnj	1392
/m/0133x7	1393
/m/06mn7	1394
/m/0gr42	1395
/m/01bpnd	1396
/m/0bwfwpj	1397
/m/06n90	1398
/m/0jtdp	1399
/m/04dsnp	1400
/m/0456xp	1401
/m/04shbh	1402
/m/01fkv0	1403
/m/019_1h	1404
/m/0f0p0	1405
/m/0124jj	1406
/m/0g133	1407
/m/0dgq80b	1408
/m/0crfwmx	1409
/m/0ctw_b	1410
/m/0cskb	1411
/m/05fhy	1412
/m/04ych	1413
/m/012x4t	1414
/m/0c7g7	1415
/m/06c62	1416
/m/02__x	1417
/m/040j2_	1418
/m/0dsfnd	1419
/m/0czmk1	1420
/m/01x4wq	1421
/m/056xx8	1422
/m/02bn_p	1423
/m/032ft5	1424
/m/071pf2	1425
/m/0k_l4	1426
/m/02l7c8	1427
/m/011ykb	1428
/m/05z96	1429
/m/058w5	1430
/m/01wxyx1	1431
/m/0f2v0	1432
/m/08433	1433
/m/012cph	1434
/m/0jkvj	1435
/m/03vfr_	1436
/m/01x0sy	1437
/m/01p5_g	1438
/m/07_fj54	1439
/m/010v8k	1440
/m/0250f	1441
/m/04hpck	1442
/m/04pm6	1443
/m/05khh	1444
/m/0sqc8	1445
/m/027b9j5	1446
/m/011yl_	1447
/m/0466k4	1448
/m/01zq91	1449
/m/02t__l	1450
/m/08cn_n	1451
/m/05pcn59	1452
/m/04w391	1453
/m/08hmch	1454
/m/080h2	1455
/m/09_bl	1456
/m/05q96q6	1457
/m/04wvhz	1458
/m/06196	1459
/m/03hpr	1460
/m/0bz5v2	1461
/m/03gjzk	1462
/m/0kz2w	1463
/m/05pcjw	1464
/m/0f102	1465
/m/02_xgp2	1466
/m/0517bc	1467
/m/0bx_q	1468
/m/07bzz7	1469
/m/02qm_f	1470
/m/012t1	1471
/m/048scx	1472
/m/02hxc3j	1473
/m/041rx	1474
/m/0jdhp	1475
/m/0pv3x	1476
/m/05_k56	1477
/m/0gqzz	1478
/m/049yf	1479
/m/05gqf	1480
/m/057d89	1481
/m/02q1tc5	1482
/m/05qsxy	1483
/m/01vswwx	1484
/m/06by7	1485
/m/01sxq9	1486
/m/086vfb	1487
/m/021_rm	1488
/m/02r3zy	1489
/m/0264v8r	1490
/m/02clgg	1491
/m/04v3q	1492
/m/01rz1	1493
/m/0mq17	1494
/m/02rdxsh	1495
/m/049dyj	1496
/m/0bshwmp	1497
/m/05glt	1498
/m/018f8	1499
/m/0272kv	1500
/m/02vjp3	1501
/m/01wtlq	1502
/m/01x66d	1503
/m/0jqp3	1504
/m/0170vn	1505
/m/02d44q	1506
/m/01_mdl	1507
/m/04rjg	1508
/m/07tg4	1509
/m/01pr_j6	1510
/m/01j_06	1511
/m/03_hd	1512
/m/0dr3sl	1513
/m/0f3zf_	1514
/m/0k2sk	1515
/m/09q5w2	1516
/m/089fss	1517
/m/0l8z1	1518
/m/01c22t	1519
/m/0l6m5	1520
/m/0171lb	1521
/m/0ktx_	1522
/m/03td5v	1523
/m/0jm_	1524
/m/02cllz	1525
/m/02yw5r	1526
/m/02gys2	1527
/m/0dy68h	1528
/m/01b1mj	1529
/m/03x3wf	1530
/m/01vrncs	1531
/m/01fjz9	1532
/m/06sy4c	1533
/m/0g2c8	1534
/m/02whj	1535
/m/0lk90	1536
/m/086g2	1537
/m/035gjq	1538
/m/07_53	1539
/m/01mc11	1540
/m/0gkkf	1541
/m/01nv4h	1542
/m/03qx63	1543
/m/06fvc	1544
/m/01k2wn	1545
/m/03lsq	1546
/m/07bdd_	1547
/m/0wh3	1548
/m/0jjy0	1549
/m/02f716	1550
/m/0dw4g	1551
/m/01g4zr	1552
/m/0yc84	1553
/m/0cqh6z	1554
/m/09qvms	1555
/m/047g8h	1556
/m/01y3c	1557
/m/06b0d2	1558
/m/03lt8g	1559
/m/0_92w	1560
/m/039bp	1561
/m/0mwx6	1562
/m/09pmkv	1563
/m/05nmg_	1564
/m/06wcbk7	1565
/m/016kjs	1566
/m/02q8ms8	1567
/m/0h1mt	1568
/m/02x8m	1569
/m/01wbgdv	1570
/m/01yx7f	1571
/m/01nvmd_	1572
/m/0jyx6	1573
/m/0cc56	1574
/m/0bdwft	1575
/m/0c5dd	1576
/m/030pr	1577
/m/0t_07	1578
/m/0jnwx	1579
/m/018wdw	1580
/m/04vr_f	1581
/m/07c0j	1582
/m/016r9z	1583
/m/0bl06	1584
/m/057bc6m	1585
/m/02bqn1	1586
/m/059_c	1587
/m/01gjd0	1588
/m/0js9s	1589
/m/02w_6xj	1590
/m/01yfj	1591
/m/0j3b	1592
/m/012x1l	1593
/m/014zfs	1594
/m/03bxz7	1595
/m/02r8hh_	1596
/m/07kc_	1597
/m/02qjv	1598
/m/0h1m9	1599
/m/02qyntr	1600
/m/0249kn	1601
/m/04rzd	1602
/m/0p_th	1603
/m/0h3xztt	1604
/m/02g839	1605
/m/0lgsq	1606
/m/01vc3y	1607
/m/0ym8f	1608
/m/02lp1	1609
/m/0dhdp	1610
/m/083p7	1611
/m/02y0js	1612
/m/083q7	1613
/m/049n7	1614
/m/02x2khw	1615
/m/099c8n	1616
/m/0c0nhgv	1617
/m/01pvkk	1618
/m/05pxnmb	1619
/m/02w9895	1620
/m/0d7wh	1621
/m/03cp7b3	1622
/m/018y2s	1623
/m/06gmr	1624
/m/0473m9	1625
/m/0swff	1626
/m/02lfns	1627
/m/01qdmh	1628
/m/09gdm7q	1629
/m/0207wx	1630
/m/0k3gw	1631
/m/0tyww	1632
/m/0872p_c	1633
/m/047yc	1634
/m/02_5h	1635
/m/0jnnx	1636
/m/02qvzf	1637
/m/0g48m4	1638
/m/019y64	1639
/m/0k_6t	1640
/m/02ly_	1641
/m/053rxgm	1642
/m/030_1m	1643
/m/02scbv	1644
/m/0cp0t91	1645
/m/01ps2h8	1646
/m/05m883	1647
/m/02n9nmz	1648
/m/059t8	1649
/m/036k0s	1650
/m/0b68vs	1651
/m/04n7njg	1652
/m/06rv5t	1653
/m/02v63m	1654
/m/0jt3tjf	1655
/m/02dwj	1656
/m/043tz8m	1657
/m/06jntd	1658
/m/03rwz3	1659
/m/04nfpk	1660
/m/033g4d	1661
/m/05h4t7	1662
/m/0y54	1663
/m/02r34n	1664
/m/06gb2q	1665
/m/028fjr	1666
/m/02pkpfs	1667
/m/02f9wb	1668
/m/07sc6nw	1669
/m/0hcr	1670
/m/0283ph	1671
/m/092c5f	1672
/m/02gvwz	1673
/m/0pqc5	1674
/m/05ksh	1675
/m/0dzf_	1676
/m/02r5w9	1677
/m/02pqp12	1678
/m/048yqf	1679
/m/03n93	1680
/m/03pmfw	1681
/m/0478__m	1682
/m/016t_3	1683
/m/01wdl3	1684
/m/0n6ds	1685
/m/01mqh5	1686
/m/0gj8t_b	1687
/m/07ylj	1688
/m/02sjf5	1689
/m/04cf09	1690
/m/01vrt_c	1691
/m/02lnhv	1692
/m/0ws7	1693
/m/0glqh5_	1694
/m/03fbb6	1695
/m/03fnyk	1696
/m/0988cp	1697
/m/01s81	1698
/m/0d3f83	1699
/m/0lmm3	1700
/m/07gbf	1701
/m/026dx	1702
/m/018sg9	1703
/m/0sz28	1704
/m/0gjk1d	1705
/m/01dyk8	1706
/m/0jcx	1707
/m/0k4gf	1708
/m/07xzm	1709
/m/01ztgm	1710
/m/0fvxz	1711
/m/025m98	1712
/m/0fm2_	1713
/m/02prw4h	1714
/m/03bx2lk	1715
/m/0277jc	1716
/m/04jzj	1717
/m/03f1zdw	1718
/m/016gr2	1719
/m/07gkgp	1720
/m/02rhwjr	1721
/m/020fcn	1722
/m/01qbl	1723
/m/0t_gg	1724
/m/0kpys	1725
/m/0r111	1726
/m/011yg9	1727
/m/026g73	1728
/m/030dx5	1729
/m/0r1yc	1730
/m/05z_kps	1731
/m/059j2	1732
/m/026390q	1733
/m/01vrz41	1734
/m/0c4z8	1735
/m/04zd4m	1736
/m/01sn04	1737
/m/045bg	1738
/m/0lbbj	1739
/m/02g8mp	1740
/m/05pdbs	1741
/m/0n5kc	1742
/m/0435vm	1743
/m/02114t	1744
/m/01xvb	1745
/m/05cgv	1746
/m/02lq5w	1747
/m/01gfq4	1748
/m/0gr0m	1749
/m/027gs1_	1750
/m/0h1nt	1751
/m/02pxmgz	1752
/m/01k5t_3	1753
/m/0fvvz	1754
/m/02sjgpq	1755
/m/0tgcy	1756
/m/07_kq	1757
/m/01tcf7	1758
/m/02c6d	1759
/m/0bs5f0b	1760
/m/0372j5	1761
/m/0gjvqm	1762
/m/02z0dfh	1763
/m/0hv1t	1764
/m/037jz	1765
/m/01wd02c	1766
/m/02qvl7	1767
/m/0j6tr	1768
/m/09728	1769
/m/0h1wg	1770
/m/01vvlyt	1771
/m/01f1jf	1772
/m/07b2lv	1773
/m/05jbn	1774
/m/0bdxs5	1775
/m/03b12	1776
/m/020mfr	1777
/m/0xwj	1778
/m/04hwbq	1779
/m/07_3qd	1780
/m/0g5pv3	1781
/m/027r9t	1782
/m/034np8	1783
/m/0dgst_d	1784
/m/0345h	1785
/m/05c1t6z	1786
/m/0277470	1787
/m/016fly	1788
/m/01tdnyh	1789
/m/07brj	1790
/m/0bm02	1791
/m/0d04z6	1792
/m/0137n0	1793
/m/01v0sx2	1794
/m/04wgh	1795
/m/01bb9r	1796
/m/06m6z6	1797
/m/04ykg	1798
/m/0pmq2	1799
/m/01bzw5	1800
/m/06ms6	1801
/m/0bkj86	1802
/m/030znt	1803
/m/0swbd	1804
/m/016hvl	1805
/m/028p0	1806
/m/0cj2nl	1807
/m/0cp0ph6	1808
/m/0358x_	1809
/m/04wtx1	1810
/m/01wdqrx	1811
/m/0cnztc4	1812
/m/01t6b4	1813
/m/09sdmz	1814
/m/026rm_y	1815
/m/07vc_9	1816
/m/0cdf37	1817
/m/0147fv	1818
/m/06mmb	1819
/m/043q4d	1820
/m/09lxv9	1821
/m/09dt7	1822
/m/02662b	1823
/m/01v42g	1824
/m/0blbxk	1825
/m/01kx_81	1826
/m/03kwtb	1827
/m/01718w	1828
/m/07g_0c	1829
/m/02vl_pz	1830
/m/04991x	1831
/m/03s5lz	1832
/m/022_6	1833
/m/03x746	1834
/m/01f2xy	1835
/m/014mlp	1836
/m/032_wv	1837
/m/0n6f8	1838
/m/0g5879y	1839
/m/09td7p	1840
/m/0d1mp3	1841
/m/01j67j	1842
/m/01b9ck	1843
/m/011ywj	1844
/m/03v3xp	1845
/m/0325pb	1846
/m/0gl5_	1847
/m/0mnrb	1848
/m/0b7xl8	1849
/m/07t_l23	1850
/m/015nhn	1851
/m/05k2s_	1852
/m/017vb_	1853
/m/07ddz9	1854
/m/0m7fm	1855
/m/0fj52s	1856
/m/08lb68	1857
/m/059x0w	1858
/m/02mjs7	1859
/m/02583l	1860
/m/054ks3	1861
/m/01s1zk	1862
/m/04mzf8	1863
/m/05fg2	1864
/m/0_2v	1865
/m/03hbzj	1866
/m/035qlx	1867
/m/02qdzd	1868
/m/05m0h	1869
/m/069q4f	1870
/m/07cbcy	1871
/m/0bkbm	1872
/m/0d1qmz	1873
/m/033srr	1874
/m/05drq5	1875
/m/04pbhw	1876
/m/07b1gq	1877
/m/05z01	1878
/m/03bdkd	1879
/m/0flddp	1880
/m/053tj7	1881
/m/07ww5	1882
/m/01l8t8	1883
/m/0416y94	1884
/m/0drsm	1885
/m/0xy28	1886
/m/0hvvf	1887
/m/01vs14j	1888
/m/01qvgl	1889
/m/0gqz2	1890
/m/04t7ts	1891
/m/05q7cj	1892
/m/0n5fl	1893
/m/02qsjt	1894
/m/01lhy	1895
/m/0kn4c	1896
/m/03m4mj	1897
/m/01vfqh	1898
/m/03qlv7	1899
/m/01l4zqz	1900
/m/07cyl	1901
/m/098sv2	1902
/m/02knxx	1903
/m/026q3s3	1904
/m/0pz91	1905
/m/0ftf0f	1906
/m/09qr6	1907
/m/017cjb	1908
/m/03cs_z7	1909
/m/07s6tbm	1910
/m/05lb87	1911
/m/02lyr4	1912
/m/0h0wc	1913
/m/02p0qmm	1914
/m/0c_zj	1915
/m/049bmk	1916
/m/01jz6x	1917
/m/025syph	1918
/m/06n9lt	1919
/m/0pgjm	1920
/m/0jmfv	1921
/m/01ngz1	1922
/m/06mz5	1923
/m/0w0d	1924
/m/035qy	1925
/m/086qd	1926
/m/0kyk	1927
/m/01fkxr	1928
/m/0h7x	1929
/m/0vhm	1930
/m/0215n	1931
/m/01lfvj	1932
/m/02knnd	1933
/m/0284jb	1934
/m/09lbv	1935
/m/089pg7	1936
/m/0g768	1937
/m/01xqqp	1938
/m/026mml	1939
/m/08pgl8	1940
/m/01c9f2	1941
/m/019bk0	1942
/m/01f69m	1943
/m/0gqyl	1944
/m/058x5	1945
/m/047msdk	1946
/m/01pny5	1947
/m/04qftx	1948
/m/012vd6	1949
/m/06cm5	1950
/m/01swck	1951
/m/07ng9k	1952
/m/02jjt	1953
/m/073tm9	1954
/m/0136g9	1955
/m/03_fk9	1956
/m/02b5_l	1957
/m/0bwhdbl	1958
/m/07tl0	1959
/m/0sjqm	1960
/m/01fmz6	1961
/m/0gmcwlb	1962
/m/0prjs	1963
/m/01kff7	1964
/m/015rmq	1965
/m/095w_	1966
/m/02hrh0_	1967
/m/07wj1	1968
/m/01cpjx	1969
/m/0262zm	1970
/m/01963w	1971
/m/0gpjbt	1972
/m/0249fn	1973
/m/05cv94	1974
/m/01wcp_g	1975
/m/07_f2	1976
/m/0gcdzz	1977
/m/04dqdk	1978
/m/01yh3y	1979
/m/048lv	1980
/m/044g_k	1981
/m/017ztv	1982
/m/059y0	1983
/m/05mt_q	1984
/m/01c427	1985
/m/0bjrnt	1986
/m/05f7s1	1987
/m/02mxw0	1988
/m/04hhv	1989
/m/073q1	1990
/m/01lj_c	1991
/m/06q8hf	1992
/m/0f721s	1993
/m/019nnl	1994
/m/0fr59	1995
/m/01k8rb	1996
/m/01j4ls	1997
/m/011zf2	1998
/m/03xgm3	1999
/m/013cr	2000
/m/0sxfd	2001
/m/0c4xc	2002
/m/099pks	2003
/m/01by1l	2004
/m/04mz10g	2005
/m/04y79_n	2006
/m/0r7fy	2007
/m/05fly	2008
/m/0ftps	2009
/m/04q42	2010
/m/031zkw	2011
/m/0zc6f	2012
/m/01y0y6	2013
/m/061v5m	2014
/m/01yhvv	2015
/m/01r32	2016
/m/0bjqh	2017
/m/01_d4	2018
/m/067mj	2019
/m/02b6n9	2020
/m/01p9hgt	2021
/m/024rbz	2022
/m/09p0ct	2023
/m/01797x	2024
/m/0n85g	2025
/m/0_24q	2026
/m/012gx2	2027
/m/030qb3t	2028
/m/015pxr	2029
/m/0162c8	2030
/m/043q6n_	2031
/m/03fghg	2032
/m/02vw1w2	2033
/m/01yhm	2034
/m/0bxbb	2035
/m/0bx9y	2036
/m/02rqwhl	2037
/m/07nqn	2038
/m/07qg8v	2039
/m/0p_2r	2040
/m/084l5	2041
/m/01rh0w	2042
/m/04m1bm	2043
/m/07z1m	2044
/m/0mnzd	2045
/m/01t_vv	2046
/m/0j90s	2047
/m/02nhxf	2048
/m/03bxwtd	2049
/m/02g3ft	2050
/m/01f7gh	2051
/m/022_lg	2052
/m/0395lw	2053
/m/0239kh	2054
/m/0ddd0gc	2055
/m/0clvcx	2056
/m/0gs96	2057
/m/0dr_4	2058
/m/0xzly	2059
/m/02ht1k	2060
/m/0l4h_	2061
/m/037w7r	2062
/m/0512p	2063
/m/019n8z	2064
/m/014y6	2065
/m/02rlzj	2066
/m/0284gcb	2067
/m/017zq0	2068
/m/099cng	2069
/m/0hr6lkl	2070
/m/054krc	2071
/m/0bscw	2072
/m/018ljb	2073
/m/07bs0	2074
/m/0292l3	2075
/m/033tf_	2076
/m/01wjrn	2077
/m/059x66	2078
/m/0pb33	2079
/m/04zyhx	2080
/m/0jqn5	2081
/m/06qd3	2082
/m/0qm98	2083
/m/0cz8mkh	2084
/m/05ywg	2085
/m/04w_7	2086
/m/02g87m	2087
/m/03n08b	2088
/m/06w2sn5	2089
/m/0xmlp	2090
/m/01vt9p3	2091
/m/0l8sx	2092
/m/0gtvrv3	2093
/m/0fq8f	2094
/m/07t31	2095
/m/01grpq	2096
/m/03mg5f	2097
/m/03rl84	2098
/m/098s2w	2099
/m/0cymp	2100
/m/02vklm3	2101
/m/03bx0bm	2102
/m/01vsxdm	2103
/m/0cd25	2104
/m/05sq84	2105
/m/03twd6	2106
/m/07t21	2107
/m/01f2y9	2108
/m/0286hyp	2109
/m/09sr0	2110
/m/0319l	2111
/m/0g2dz	2112
/m/03fts	2113
/m/05qx1	2114
/m/06jk5_	2115
/m/07y9w5	2116
/m/054lpb6	2117
/m/01ckbq	2118
/m/01c6qp	2119
/m/02r79_h	2120
/m/02g9p4	2121
/m/05strv	2122
/m/06f5j	2123
/m/020d5	2124
/m/03mr85	2125
/m/014kg4	2126
/m/02q0k7v	2127
/m/0d06vc	2128
/m/02qhlm	2129
/m/0d_q40	2130
/m/02lgj6	2131
/m/0bwgc_	2132
/m/045c66	2133
/m/05sxzwc	2134
/m/01719t	2135
/m/0244r8	2136
/m/0fpkhkz	2137
/m/0krdk	2138
/m/0cv9b	2139
/m/0gwf191	2140
/m/05j82v	2141
/m/0_7z2	2142
/m/0168dy	2143
/m/0263ycg	2144
/m/028kj0	2145
/m/015qh	2146
/m/0124k9	2147
/m/03mdt	2148
/m/01xn6mc	2149
/m/04jkpgv	2150
/m/01znc_	2151
/m/03y1mlp	2152
/m/0f6_4	2153
/m/0cvkv5	2154
/m/01njxvw	2155
/m/029sk	2156
/m/01pw2f1	2157
/m/0byq0v	2158
/m/02r4qs	2159
/m/0wp9b	2160
/m/01t8sr	2161
/m/01_9fk	2162
/m/01vsksr	2163
/m/03rzvv	2164
/m/032zq6	2165
/m/018z_c	2166
/m/0340hj	2167
/m/044ntk	2168
/m/017d77	2169
/m/033smt	2170
/m/07f_t4	2171
/m/02s5v5	2172
/m/027cyf7	2173
/m/0ckt6	2174
/m/01h4rj	2175
/m/0g9wdmc	2176
/m/044mm6	2177
/m/07f3xb	2178
/m/0ft5vs	2179
/m/01_1pv	2180
/m/0161h5	2181
/m/0jg77	2182
/m/01271h	2183
/m/059j1m	2184
/m/0187nd	2185
/m/0km5c	2186
/m/04f_d	2187
/m/01tj34	2188
/m/04w7rn	2189
/m/0qmhk	2190
/m/013zdg	2191
/m/07lx1s	2192
/m/016pns	2193
/m/025sc50	2194
/m/01wp8w7	2195
/m/04k4rt	2196
/m/0py9b	2197
/m/02lkcc	2198
/m/07z542	2199
/m/03qnvdl	2200
/m/04_tv	2201
/m/0fdjb	2202
/m/07s3m4g	2203
/m/01_4z	2204
/m/01c6nk	2205
/m/049g_xj	2206
/m/02rv_dz	2207
/m/011yhm	2208
/m/09rx7tx	2209
/m/05183k	2210
/m/05prs8	2211
/m/01cvtf	2212
/m/07qcbw	2213
/m/01p45_v	2214
/m/02r1c18	2215
/m/06c1y	2216
/m/0bx8pn	2217
/m/0dp7wt	2218
/m/045cq	2219
/m/05sw5b	2220
/m/02sb1w	2221
/m/03jldb	2222
/m/02wcx8c	2223
/m/07y2s	2224
/m/0blg2	2225
/m/01c72t	2226
/m/0crx5w	2227
/m/01tbp	2228
/m/04hgpt	2229
/m/01jv1z	2230
/m/01r9fv	2231
/m/04nw9	2232
/m/04syw	2233
/m/0d1_f	2234
/m/0ggbhy7	2235
/m/0pvms	2236
/m/0d6b7	2237
/m/028c_8	2238
/m/01d6g	2239
/m/0mbwf	2240
/m/02hcv8	2241
/m/0n1v8	2242
/m/0168ls	2243
/m/0j582	2244
/m/05tk7y	2245
/m/020lpx	2246
/m/0gj9qxr	2247
/m/0g5b0q5	2248
/m/06cgy	2249
/m/061fhg	2250
/m/03qmj9	2251
/m/02xwq9	2252
/m/05sy_5	2253
/m/01mjq	2254
/m/027f7dj	2255
/m/0mcl0	2256
/m/05hqv	2257
/m/026n4h6	2258
/m/07_m2	2259
/m/0lbd9	2260
/m/0fpjd_g	2261
/m/01hqhm	2262
/m/024rgt	2263
/m/01g257	2264
/m/01c4_6	2265
/m/0ggl02	2266
/m/0f2df	2267
/m/01pl9g	2268
/m/02wgk1	2269
/m/0_ytw	2270
/m/071x0k	2271
/m/03g5jw	2272
/m/02r1tx7	2273
/m/01sb5r	2274
/m/0l15bq	2275
/m/01d650	2276
/m/03ttfc	2277
/m/05tfm	2278
/m/0169t	2279
/m/0jm2v	2280
/m/0ctt4z	2281
/m/01vb403	2282
/m/03cvfg	2283
/m/01445t	2284
/m/0hsb3	2285
/m/03cdg	2286
/m/030p35	2287
/m/0f6_x	2288
/m/01713c	2289
/m/02x4wb	2290
/m/03h502k	2291
/m/01kvqc	2292
/m/089g0h	2293
/m/047vp1n	2294
/m/02gx2k	2295
/m/02633g	2296
/m/021yc7p	2297
/m/0qm8b	2298
/m/02rdyk7	2299
/m/0mx7f	2300
/m/0dw6b	2301
/m/0tyql	2302
/m/05p09zm	2303
/m/013knm	2304
/m/016k6x	2305
/m/0160nk	2306
/m/02c4s	2307
/m/01xd9	2308
/m/05v10	2309
/m/01nczg	2310
/m/0m4mb	2311
/m/0d5fb	2312
/m/05pbl56	2313
/m/0lk8j	2314
/m/0c34mt	2315
/m/0dlglj	2316
/m/02_n7	2317
/m/0ntpv	2318
/m/03yl2t	2319
/m/025txtg	2320
/m/0203v	2321
/m/01nqj	2322
/m/073hmq	2323
/m/0fb7c	2324
/m/05zh9c	2325
/m/01l_pn	2326
/m/0gxtknx	2327
/m/06bnz	2328
/m/0154d7	2329
/m/0199wf	2330
/m/021r7r	2331
/m/01ypsj	2332
/m/05z_p6	2333
/m/038bht	2334
/m/0gztl	2335
/m/0vg8	2336
/m/06rzwx	2337
/m/019l68	2338
/m/01xcqc	2339
/m/01mqz0	2340
/m/02m501	2341
/m/04vq33	2342
/m/01304j	2343
/m/05w3f	2344
/m/02kth6	2345
/m/02tktw	2346
/m/09blyk	2347
/m/025ndl	2348
/m/0g78xc	2349
/m/01sbf2	2350
/m/033q4k	2351
/m/07s93v	2352
/m/0fvyg	2353
/m/0fx0j2	2354
/m/05hmp6	2355
/m/01f1jy	2356
/m/02847m9	2357
/m/04l8xw	2358
/m/09rp4r_	2359
/m/02lxj_	2360
/m/051kv	2361
/m/03q4nz	2362
/m/01fwj8	2363
/m/03fn8k	2364
/m/0436yk	2365
/m/05z7c	2366
/m/013jz2	2367
/m/0dvld	2368
/m/027cxsm	2369
/m/08jgk1	2370
/m/02rl201	2371
/m/02607j	2372
/m/01l2fn	2373
/m/03xb2w	2374
/m/01g1lp	2375
/m/024y8p	2376
/m/0j5q3	2377
/m/0pmpl	2378
/m/02fqwt	2379
/m/02rlj20	2380
/m/07l24	2381
/m/0cd2vh9	2382
/m/07s8r0	2383
/m/0d63kt	2384
/m/04h41v	2385
/m/0bq8tmw	2386
/m/09pjnd	2387
/m/09txzv	2388
/m/0156q	2389
/m/02b1gz	2390
/m/073h1t	2391
/m/05ljv7	2392
/m/01nzs7	2393
/m/01f62	2394
/m/06vkl	2395
/m/01fh9	2396
/m/02lg9w	2397
/m/01l1sq	2398
/m/03h304l	2399
/m/03wjm2	2400
/m/0464pz	2401
/m/01bpc9	2402
/m/04kngf	2403
/m/01hww_	2404
/m/0l12d	2405
/m/07csf4	2406
/m/0h7h6	2407
/m/09ykwk	2408
/m/015zxh	2409
/m/034h1h	2410
/m/0dzst	2411
/m/0r2l7	2412
/m/02bxd	2413
/m/01w4dy	2414
/m/02jgm0	2415
/m/04rwx	2416
/m/028lc8	2417
/m/06s_2	2418
/m/03v6t	2419
/m/02tr7d	2420
/m/06_sc3	2421
/m/0gp9mp	2422
/m/02_hj4	2423
/m/031t2d	2424
/m/032v0v	2425
/m/01v_pj6	2426
/m/01j_cy	2427
/m/05fnl9	2428
/m/021vwt	2429
/m/02zyy4	2430
/m/05q54f5	2431
/m/017v_	2432
/m/05x30m	2433
/m/03ktjq	2434
/m/0946bb	2435
/m/026_dcw	2436
/m/0hn821n	2437
/m/06_x996	2438
/m/019pm_	2439
/m/06lpmt	2440
/m/02ck7w	2441
/m/07szy	2442
/m/0bwjj	2443
/m/02sf_r	2444
/m/03ft8	2445
/m/03mp9s	2446
/m/0djtky	2447
/m/07mqps	2448
/m/072x7s	2449
/m/030_1_	2450
/m/024l2y	2451
/m/035s95	2452
/m/0nm9y	2453
/m/0gcs9	2454
/m/03lrht	2455
/m/0gh4g0	2456
/m/01w923	2457
/m/02ny8t	2458
/m/01ft14	2459
/m/0gls4q_	2460
/m/080dyk	2461
/m/0182r9	2462
/m/010dft	2463
/m/01nkcn	2464
/m/02jt1k	2465
/m/0bh8yn3	2466
/m/04x4s2	2467
/m/0c00zd0	2468
/m/094jv	2469
/m/024d8w	2470
/m/05cj_j	2471
/m/06hhrs	2472
/m/05p3738	2473
/m/06v_gh	2474
/m/09gffmz	2475
/m/01v3x8	2476
/m/06h4y9	2477
/m/0ksf29	2478
/m/04bpm6	2479
/m/02y_rq5	2480
/m/0q9zc	2481
/m/02g_6x	2482
/m/0fgg8c	2483
/m/0nbcg	2484
/m/03q2t9	2485
/m/0f2w0	2486
/m/075wx7_	2487
/m/0fsm8c	2488
/m/0kpw3	2489
/m/013gwb	2490
/m/09tqxt	2491
/m/02_ssl	2492
/m/0jmk7	2493
/m/01n4f8	2494
/m/02pb53	2495
/m/05sj55	2496
/m/06kb_	2497
/m/0c3kw	2498
/m/0p2rj	2499
/m/07h9gp	2500
/m/01hwc6	2501
/m/02p68d	2502
/m/09pl3f	2503
/m/047csmy	2504
/m/04swx	2505
/m/03v_5	2506
/m/041y2	2507
/m/01n4w_	2508
/m/0w7c	2509
/m/01mxqyk	2510
/m/016ckq	2511
/m/0fxgg9	2512
/m/03t9sp	2513
/m/015pkc	2514
/m/029zqn	2515
/m/01r2lw	2516
/m/01l9p	2517
/m/032w8h	2518
/m/01d494	2519
/m/0170pk	2520
/m/015rkw	2521
/m/0bjy7	2522
/m/03y9p40	2523
/m/0b_6zk	2524
/m/0b_c7	2525
/m/0c8tkt	2526
/m/035yn8	2527
/m/0sxkh	2528
/m/01p95y0	2529
/m/01w56k	2530
/m/0bcndz	2531
/m/0k4kk	2532
/m/023l9y	2533
/m/0735l	2534
/m/0bzm81	2535
/m/01756d	2536
/m/01fl3	2537
/m/0b44shh	2538
/m/02hnl	2539
/m/05crg7	2540
/m/01dzz7	2541
/m/07s5fz	2542
/m/041pnt	2543
/m/0fdv3	2544
/m/02q5g1z	2545
/m/01grnp	2546
/m/01x73	2547
/m/02khs	2548
/m/0crjn65	2549
/m/0dbdy	2550
/m/01kkg5	2551
/m/06w33f8	2552
/m/0d6qjf	2553
/m/03gkn5	2554
/m/0gsg7	2555
/m/02bg8v	2556
/m/0lp_cd3	2557
/m/02wrhj	2558
/m/02zv4b	2559
/m/0m2cb	2560
/m/04rrd	2561
/m/09kvv	2562
/m/04rtpt	2563
/m/015pkt	2564
/m/06rmdr	2565
/m/02vptk_	2566
/m/09fqdt	2567
/m/0fhpv4	2568
/m/03hmt9b	2569
/m/03sb38	2570
/m/0b7l4x	2571
/m/09cm54	2572
/m/0bthb	2573
/m/06pr6	2574
/m/049k07	2575
/m/01gzm2	2576
/m/02_jjm	2577
/m/01c59k	2578
/m/0845v	2579
/m/03gk2	2580
/m/013t9y	2581
/m/011ydl	2582
/m/039v1	2583
/m/0137g1	2584
/m/0ft7sr	2585
/m/01z88t	2586
/m/01z215	2587
/m/034rd	2588
/m/0488g	2589
/m/049dk	2590
/m/0bbm7r	2591
/m/039g82	2592
/m/0fpzzp	2593
/m/07scx	2594
/m/0gj9tn5	2595
/m/013q07	2596
/m/091z_p	2597
/m/0gbwp	2598
/m/02jm0n	2599
/m/0gs5q	2600
/m/0hkq4	2601
/m/0m_xy	2602
/m/01pj7	2603
/m/0fhzy	2604
/m/0dth6b	2605
/m/058s57	2606
/m/03gr7w	2607
/m/070yzk	2608
/m/07sgfvl	2609
/m/050xxm	2610
/m/01pq5j7	2611
/m/0f1vrl	2612
/m/02ptzz0	2613
/m/05g_nr	2614
/m/08hp53	2615
/m/02pyyld	2616
/m/0f9rw9	2617
/m/076xkps	2618
/m/03_2td	2619
/m/0cjyzs	2620
/m/015_30	2621
/m/01c92g	2622
/m/030h95	2623
/m/0jmbv	2624
/m/049sb	2625
/m/02q52q	2626
/m/01g6gs	2627
/m/064p92m	2628
/m/018nnz	2629
/m/01dyvs	2630
/m/0lfgr	2631
/m/027yf83	2632
/m/02f6g5	2633
/m/01pgp6	2634
/m/0n5hh	2635
/m/0k5p1	2636
/m/06sks6	2637
/m/04k9y6	2638
/m/030hcs	2639
/m/09vc4s	2640
/m/015882	2641
/m/0770cd	2642
/m/016s_5	2643
/m/016zgj	2644
/m/02dgq2	2645
/m/01jgkj2	2646
/m/02b71x	2647
/m/02yv_b	2648
/m/09cr8	2649
/m/0ftlkg	2650
/m/0283_zv	2651
/m/06mnps	2652
/m/01kc4s	2653
/m/04vn5	2654
/m/03h42s4	2655
/m/0835q	2656
/m/05ztm4r	2657
/m/0806vbn	2658
/m/09mq4m	2659
/m/090q32	2660
/m/01515w	2661
/m/0333t	2662
/m/0bs8s1p	2663
/m/02qgyv	2664
/m/04g9sq	2665
/m/0jm9w	2666
/m/03g3w	2667
/m/02wbm	2668
/m/01vc5m	2669
/m/062z7	2670
/m/01y888	2671
/m/02bp37	2672
/m/03z5xd	2673
/m/0g8rj	2674
/m/03t22m	2675
/m/01ky2h	2676
/m/0h1x5f	2677
/m/02yvct	2678
/m/07r78j	2679
/m/0b7t3p	2680
/m/049dzz	2681
/m/0bhtzw	2682
/m/016_mj	2683
/m/06rq1k	2684
/m/0kq9l	2685
/m/012zng	2686
/m/01c40n	2687
/m/05l5n	2688
/m/015cjr	2689
/m/047jhq	2690
/m/015x74	2691
/m/09kn9	2692
/m/01htzx	2693
/m/02jyr8	2694
/m/04g61	2695
/m/0ynfz	2696
/m/0241jw	2697
/m/02rb84n	2698
/m/07l4z	2699
/m/03wnh	2700
/m/01k8q5	2701
/m/0kfv9	2702
/m/017jv5	2703
/m/01kf3_9	2704
/m/01l2m3	2705
/m/0gk4g	2706
/m/02lz1s	2707
/m/052gzr	2708
/m/025m8y	2709
/m/016dj8	2710
/m/01c58j	2711
/m/0177s6	2712
/m/026lj	2713
/m/05r79	2714
/m/0m491	2715
/m/0b76t12	2716
/m/05rrtf	2717
/m/03h40_7	2718
/m/04qw17	2719
/m/0cv72h	2720
/m/051q5	2721
/m/01t2h2	2722
/m/03k7bd	2723
/m/02rx2m5	2724
/m/025s7j4	2725
/m/016ywr	2726
/m/0cc7hmk	2727
/m/0hvb2	2728
/m/02bkdn	2729
/m/01t07j	2730
/m/01g63y	2731
/m/01vh3r	2732
/m/02qsqmq	2733
/m/044crp	2734
/m/03mz9r	2735
/m/0584r4	2736
/m/0cz_ym	2737
/m/05rgl	2738
/m/06x58	2739
/m/0261g5l	2740
/m/0217m9	2741
/m/02bh_v	2742
/m/06qjgc	2743
/m/03k50	2744
/m/016s0m	2745
/m/02qw1zx	2746
/m/016v46	2747
/m/02k84w	2748
/m/0p5mw	2749
/m/03xmy1	2750
/m/028cg00	2751
/m/01gq0b	2752
/m/0d2psv	2753
/m/0j1yf	2754
/m/0320jz	2755
/m/01y9xg	2756
/m/02d9k	2757
/m/073bb	2758
/m/0c_n9	2759
/m/07s6prs	2760
/m/026gyn_	2761
/m/09146g	2762
/m/02q3n9c	2763
/m/06r4f	2764
/m/06qw_	2765
/m/05kr_	2766
/m/04rrx	2767
/m/02lmk	2768
/m/022q32	2769
/m/01yc02	2770
/m/0mj1l	2771
/m/011yth	2772
/m/07wbk	2773
/m/083pr	2774
/m/03xsby	2775
/m/0fq7dv_	2776
/m/0ch26b_	2777
/m/01p1v	2778
/m/08m4c8	2779
/m/0_7w6	2780
/m/0gd_s	2781
/m/02664f	2782
/m/021p26	2783
/m/0479b	2784
/m/0c6g29	2785
/m/0cq7tx	2786
/m/02zd460	2787
/m/03mkk4	2788
/m/04n2r9h	2789
/m/0g26h	2790
/m/01rgdw	2791
/m/0bs0bh	2792
/m/03mg35	2793
/m/02x258x	2794
/m/018wl5	2795
/m/04mn81	2796
/m/06pj8	2797
/m/0h21v2	2798
/m/0bs5vty	2799
/m/0by1wkq	2800
/m/060ny2	2801
/m/06bss	2802
/m/0mhfr	2803
/m/04r1t	2804
/m/0d075m	2805
/m/02stbw	2806
/m/02qx69	2807
/m/0gt_k	2808
/m/0btyf5z	2809
/m/0c94fn	2810
/m/0tn9j	2811
/m/04ly1	2812
/m/0b82vw	2813
/m/0784v1	2814
/m/040wdl	2815
/m/01s5q	2816
/m/0lz8d	2817
/m/0mxcf	2818
/m/0mx6c	2819
/m/04zwjd	2820
/m/03sxd2	2821
/m/07ymr5	2822
/m/02krf9	2823
/m/03hj3b3	2824
/m/03cffvv	2825
/m/033071	2826
/m/02bqm0	2827
/m/07ym0	2828
/m/05d1y	2829
/m/02_5x9	2830
/m/02906	2831
/m/014dq7	2832
/m/03bnv	2833
/m/0fy34l	2834
/m/04myfb7	2835
/m/06t61y	2836
/m/065jlv	2837
/m/021_z5	2838
/m/019g40	2839
/m/0fb0v	2840
/m/03n0cd	2841
/m/05qbckf	2842
/m/0162v	2843
/m/02wgln	2844
/m/03bdv	2845
/m/0f25w9	2846
/m/07hnp	2847
/m/07w5rq	2848
/m/01_srz	2849
/m/01lly5	2850
/m/016z9n	2851
/m/042z_g	2852
/m/0jvtp	2853
/m/09d38d	2854
/m/06s6hs	2855
/m/07kh6f3	2856
/m/0nvrd	2857
/m/0s69k	2858
/m/050f0s	2859
/m/015h31	2860
/m/01cgz	2861
/m/050gkf	2862
/m/0l_j_	2863
/m/01gvxh	2864
/m/0zjpz	2865
/m/01nd2c	2866
/m/070fnm	2867
/m/0136p1	2868
/m/04y9dk	2869
/m/0jcg8	2870
/m/02qfv5d	2871
/m/01jmyj	2872
/m/02hft3	2873
/m/0yjf0	2874
/m/01b3l	2875
/m/06ntj	2876
/m/037s9x	2877
/m/03z19	2878
/m/04ktcgn	2879
/m/034b6k	2880
/m/063vn	2881
/m/0c8wxp	2882
/m/0gz5hs	2883
/m/01c57n	2884
/m/09k56b7	2885
/m/0gjc4d3	2886
/m/0cjsxp	2887
/m/0fvxg	2888
/m/01twdk	2889
/m/0k0rf	2890
/m/0r540	2891
/m/02lf70	2892
/m/0dt8xq	2893
/m/0140t7	2894
/m/07yk1xz	2895
/m/01wz3cx	2896
/m/0fxwx	2897
/m/0b76kw1	2898
/m/0509bl	2899
/m/0g51l1	2900
/m/01wsl7c	2901
/m/0l_tn	2902
/m/016pjk	2903
/m/051_y	2904
/m/0gd0c7x	2905
/m/0d0kn	2906
/m/0w6w	2907
/m/03fvqg	2908
/m/0sg6b	2909
/m/04kj2v	2910
/m/0mtdx	2911
/m/0pzpz	2912
/m/01z0lb	2913
/m/05k79	2914
/m/052p7	2915
/m/0694j	2916
/m/031778	2917
/m/0dq6p	2918
/m/03d34x8	2919
/m/04913k	2920
/m/0j6b5	2921
/m/0jmfb	2922
/m/07vk2	2923
/m/06brp0	2924
/m/01sh2	2925
/m/01gtbb	2926
/m/05k7sb	2927
/m/0bvzp	2928
/m/03f7xg	2929
/m/0hpt3	2930
/m/0g02vk	2931
/m/04t53l	2932
/m/02vqhv0	2933
/m/047cx	2934
/m/045bs6	2935
/m/04kzqz	2936
/m/0279c15	2937
/m/01vyp_	2938
/m/0278rq7	2939
/m/01n9d9	2940
/m/028knk	2941
/m/02hzz	2942
/m/026mmy	2943
/m/02c8d7	2944
/m/01wbl_r	2945
/m/0h_cssd	2946
/m/01ycbq	2947
/m/0f7h2g	2948
/m/0cq8qq	2949
/m/022xml	2950
/m/0bj9k	2951
/m/047qxs	2952
/m/0tzt_	2953
/m/0_9wr	2954
/m/0gvrws1	2955
/m/045m1_	2956
/m/0mrs1	2957
/m/031n8c	2958
/m/02zn1b	2959
/m/0dtd6	2960
/m/066l3y	2961
/m/02z5x7l	2962
/m/0bdx29	2963
/m/01xr2s	2964
/m/0lv1x	2965
/m/06f41	2966
/m/02k6rq	2967
/m/0cwt70	2968
/m/0hmm7	2969
/m/03t5b6	2970
/m/01vvyd8	2971
/m/050l8	2972
/m/02y7sr	2973
/m/045c7b	2974
/m/040p3y	2975
/m/01fmys	2976
/m/035dk	2977
/m/09tqkv2	2978
/m/0g9zjp	2979
/m/02b2np	2980
/m/012s5j	2981
/m/0l2q3	2982
/m/0r3tq	2983
/m/05fjf	2984
/m/070mff	2985
/m/05d8vw	2986
/m/02s62q	2987
/m/0jym0	2988
/m/0clz1b	2989
/m/0dwr4	2990
/m/054ky1	2991
/m/0b1zz	2992
/m/0292qb	2993
/m/09f0bj	2994
/m/0r80l	2995
/m/0f1pyf	2996
/m/02ryyk	2997
/m/09d5h	2998
/m/02xhpl	2999
/m/043s3	3000
/m/0h25	3001
/m/01cyd5	3002
/m/015npr	3003
/m/0355pl	3004
/m/01fwqn	3005
/m/02s7tr	3006
/m/05g76	3007
/m/0gl88b	3008
/m/05x2t7	3009
/m/02fb1n	3010
/m/0x44q	3011
/m/02l4rh	3012
/m/018lg0	3013
/m/01czx	3014
/m/0j2pg	3015
/m/085q5	3016
/m/09g8vhw	3017
/m/06lgq8	3018
/m/01541z	3019
/m/05cwl_	3020
/m/03ln8b	3021
/m/01bvw5	3022
/m/0dnkmq	3023
/m/0jlv5	3024
/m/01j8wk	3025
/m/0gmdkyy	3026
/m/02zmh5	3027
/m/048qrd	3028
/m/0kvgxk	3029
/m/0h1p	3030
/m/099ty	3031
/m/025hwq	3032
/m/06lj1m	3033
/m/02rh1dz	3034
/m/08l0x2	3035
/m/01z77k	3036
/m/0yvjx	3037
/m/0371rb	3038
/m/023p33	3039
/m/0653m	3040
/m/05b7q	3041
/m/02hwhyv	3042
/m/02mp0g	3043
/m/01m15br	3044
/m/01lyv	3045
/m/064n1pz	3046
/m/022769	3047
/m/016fmf	3048
/m/01dtcb	3049
/m/018grr	3050
/m/034qzw	3051
/m/036c_0	3052
/m/0j_tw	3053
/m/02dq8f	3054
/m/0pf2	3055
/m/0ddt_	3056
/m/02jxk	3057
/m/06zsk51	3058
/m/01kwsg	3059
/m/02w9sd7	3060
/m/016z68	3061
/m/0260bz	3062
/m/06hwzy	3063
/m/0164nb	3064
/m/0fbtbt	3065
/m/02rzdcp	3066
/m/01yzl2	3067
/m/0dck27	3068
/m/016z7s	3069
/m/0bm2g	3070
/m/011xg5	3071
/m/04hw4b	3072
/m/05dbyt	3073
/m/02p86pb	3074
/m/073hgx	3075
/m/01y49	3076
/m/02psgvg	3077
/m/05zrvfd	3078
/m/02c638	3079
/m/02661h	3080
/m/09p30_	3081
/m/09ntbc	3082
/m/011k11	3083
/m/05qw5	3084
/m/05g49	3085
/m/047n8xt	3086
/m/018swb	3087
/m/0f6_dy	3088
/m/01xcfy	3089
/m/027b9k6	3090
/m/01kv4mb	3091
/m/0126rp	3092
/m/01j7rd	3093
/m/07c72	3094
/m/03xp8d5	3095
/m/02pgky2	3096
/m/0gr07	3097
/m/0443y3	3098
/m/03qdm	3099
/m/02lpp7	3100
/m/02__34	3101
/m/04y5j64	3102
/m/04gtdnh	3103
/m/0d1w9	3104
/m/02c_4	3105
/m/01pcmd	3106
/m/0n2bh	3107
/m/04rlf	3108
/m/042y1c	3109
/m/040fb	3110
/m/03_wj_	3111
/m/0dplh	3112
/m/0l9rg	3113
/m/0mwht	3114
/m/02vmzp	3115
/m/03rk0	3116
/m/03x23q	3117
/m/0fht9f	3118
/m/027lf1	3119
/m/023k2	3120
/m/07w3r	3121
/m/0yzvw	3122
/m/06mkj	3123
/m/02qhqz4	3124
/m/04b7xr	3125
/m/0196bp	3126
/m/0407yfx	3127
/m/011_6p	3128
/m/01dnws	3129
/m/01njml	3130
/m/07024	3131
/m/02ck1	3132
/m/04p3w	3133
/m/01y67v	3134
/m/014kq6	3135
/m/040db	3136
/m/04xjp	3137
/m/03_9r	3138
/m/06ybb1	3139
/m/09r1j5	3140
/m/02l6h	3141
/m/03jqw5	3142
/m/01jtp7	3143
/m/0bx0l	3144
/m/0k4bc	3145
/m/04zpv	3146
/m/02f77l	3147
/m/013w8y	3148
/m/01w60_p	3149
/m/0144l1	3150
/m/01jq34	3151
/m/01jzxy	3152
/m/015gw6	3153
/m/051vz	3154
/m/01ptt7	3155
/m/02_2v2	3156
/m/02_06s	3157
/m/0f4_l	3158
/m/0325dj	3159
/m/0c3ns	3160
/m/071ywj	3161
/m/02v1m7	3162
/m/0l56b	3163
/m/01f7j9	3164
/m/0fvr1	3165
/m/04jlgp	3166
/m/0cpz4k	3167
/m/056878	3168
/m/01vsnff	3169
/m/0163v	3170
/m/0c8tk	3171
/m/046rfv	3172
/m/03czz87	3173
/m/02pbp9	3174
/m/0fphgb	3175
/m/05f5sr9	3176
/m/02y9ln	3177
/m/01vwbts	3178
/m/02tj96	3179
/m/012x03	3180
/m/011zd3	3181
/m/01cszh	3182
/m/01yndb	3183
/m/02pl5bx	3184
/m/0gfzgl	3185
/m/0bfvd4	3186
/m/02jjdr	3187
/m/0pyg6	3188
/m/0c3z0	3189
/m/07w4j	3190
/m/09g7thr	3191
/m/0mwh1	3192
/m/05g3v	3193
/m/0121c1	3194
/m/01tfck	3195
/m/0c11mj	3196
/m/0266sb_	3197
/m/0g284	3198
/m/0dq630k	3199
/m/07gql	3200
/m/015wnl	3201
/m/07p62k	3202
/m/041mt	3203
/m/0bvfqq	3204
/m/07nt8p	3205
/m/02nbqh	3206
/m/0gvstc3	3207
/m/02cyfz	3208
/m/011yd2	3209
/m/08664q	3210
/m/02lq10	3211
/m/0161c	3212
/m/01364q	3213
/m/01vvyfh	3214
/m/05zksls	3215
/m/026c1	3216
/m/0h1v19	3217
/m/0m0nq	3218
/m/0h98b3k	3219
/m/0274v0r	3220
/m/0457w0	3221
/m/03zbg0	3222
/m/05cc1	3223
/m/0gjv_	3224
/m/0345gh	3225
/m/0klh7	3226
/m/02t_zq	3227
/m/05c9zr	3228
/m/09k2t1	3229
/m/07ss8_	3230
/m/01s0_f	3231
/m/05b4rcb	3232
/m/03kxj2	3233
/m/09bg4l	3234
/m/016jll	3235
/m/02_fz3	3236
/m/02f5qb	3237
/m/0d193h	3238
/m/0l6mp	3239
/m/0ymc8	3240
/m/0jt5zcn	3241
/m/05sb1	3242
/m/04xrx	3243
/m/0x67	3244
/m/01vs_v8	3245
/m/025m8l	3246
/m/0453t	3247
/m/0j3v	3248
/m/02p11jq	3249
/m/09prnq	3250
/m/01y9pk	3251
/m/0ptk_	3252
/m/0fqpc7d	3253
/m/03jvmp	3254
/m/01q415	3255
/m/0bytfv	3256
/m/0gvs1kt	3257
/m/03lty	3258
/m/0167_s	3259
/m/01qncf	3260
/m/01679d	3261
/m/0dclg	3262
/m/040fv	3263
/m/03y_46	3264
/m/0568qz	3265
/m/07srw	3266
/m/01w724	3267
/m/06j6l	3268
/m/01wyzyl	3269
/m/02rxj	3270
/m/0z1cr	3271
/m/01b195	3272
/m/07f1x	3273
/m/06wrt	3274
/m/04jbyg	3275
/m/0lgxj	3276
/m/025tdwc	3277
/m/01lb14	3278
/m/03rj0	3279
/m/01lj9	3280
/m/071jv5	3281
/m/0dh73w	3282
/m/085v7	3283
/m/054kmq	3284
/m/047g6m	3285
/m/04pbsq	3286
/m/02jg92	3287
/m/09_gdc	3288
/m/01xllf	3289
/m/0300cp	3290
/m/03qh03g	3291
/m/01zmpg	3292
/m/013v5j	3293
/m/05dbf	3294
/m/04qhdf	3295
/m/013yq	3296
/m/06xj93	3297
/m/04smkr	3298
/m/02vy5j	3299
/m/01wj9y9	3300
/m/07hbxm	3301
/m/0721cy	3302
/m/018h2	3303
/m/02s4l6	3304
/m/0f40w	3305
/m/02725hs	3306
/m/02wt0	3307
/m/07z5n	3308
/m/0cg39k	3309
/m/01q_y0	3310
/m/0gyv0b4	3311
/m/050yyb	3312
/m/0311wg	3313
/m/051hrr	3314
/m/02x7vq	3315
/m/0mp3l	3316
/m/0161sp	3317
/m/01gjw	3318
/m/01qh7	3319
/m/025tn92	3320
/m/0jm3b	3321
/m/033hn8	3322
/m/01ww2fs	3323
/m/0b_77q	3324
/m/02plv57	3325
/m/05218gr	3326
/m/01bv8b	3327
/m/026zlh9	3328
/m/06k02	3329
/m/01vhb0	3330
/m/01fdc0	3331
/m/064q5v	3332
/m/0gthm	3333
/m/018qb4	3334
/m/06ncr	3335
/m/0gghm	3336
/m/07wrz	3337
/m/0h5k	3338
/m/02_7t	3339
/m/01jzyx	3340
/m/03spz	3341
/m/06t2t	3342
/m/0537b	3343
/m/04fhxp	3344
/m/06v9_x	3345
/m/0lccn	3346
/m/03ydlnj	3347
/m/01r216	3348
/m/0136pk	3349
/m/01ck6h	3350
/m/05k2xy	3351
/m/02581c	3352
/m/01nm8w	3353
/m/020y73	3354
/m/07wjk	3355
/m/021bk	3356
/m/0pdp8	3357
/m/0lyjf	3358
/m/024qqx	3359
/m/024tsn	3360
/m/0gxkm	3361
/m/07nvmx	3362
/m/0dc3_	3363
/m/0g3zrd	3364
/m/04y8r	3365
/m/08sfxj	3366
/m/01kckd	3367
/m/047sxrj	3368
/m/0288fyj	3369
/m/03dn9v	3370
/m/05smlt	3371
/m/01p4vl	3372
/m/02g1px	3373
/m/0j06n	3374
/m/01dcqj	3375
/m/0d_2fb	3376
/m/0fpv_3_	3377
/m/02x17s4	3378
/m/0ct5zc	3379
/m/01_vfy	3380
/m/0d05w3	3381
/m/01w9ph_	3382
/m/01jq0j	3383
/m/07g9f	3384
/m/09qftb	3385
/m/0cg9y	3386
/m/025vl4m	3387
/m/02_1ky	3388
/m/01nhkxp	3389
/m/01hvjx	3390
/m/029bkp	3391
/m/01wx756	3392
/m/02sp_v	3393
/m/0661m4p	3394
/m/0q19t	3395
/m/0kvrb	3396
/m/02b1mc	3397
/m/0bymv	3398
/m/01gkgk	3399
/m/01rs41	3400
/m/01hnb	3401
/m/0783m_	3402
/m/080knyg	3403
/m/03ln9	3404
/m/05c0jwl	3405
/m/0nzw2	3406
/m/05p1qyh	3407
/m/03yvf2	3408
/m/0kszw	3409
/m/014v6f	3410
/m/01hw6wq	3411
/m/065zr	3412
/m/075mb	3413
/m/0f4yh	3414
/m/0ddjy	3415
/m/02fcs2	3416
/m/03g52k	3417
/m/02n9bh	3418
/m/075cph	3419
/m/0hky	3420
/m/01hb6v	3421
/m/0427y	3422
/m/048z7l	3423
/m/0fdys	3424
/m/07tlg	3425
/m/0dqcs3	3426
/m/03z2rz	3427
/m/0yyts	3428
/m/02xb2bt	3429
/m/0l6px	3430
/m/0cc97st	3431
/m/02mx98	3432
/m/01jft4	3433
/m/016z2j	3434
/m/02d4ct	3435
/m/06z68	3436
/m/026f__m	3437
/m/06y0xx	3438
/m/0
Download .txt
gitextract_2ysrl9k6/

├── ConvKB_pytorch/
│   ├── Config.py
│   ├── ConvKB.py
│   ├── ConvKB_1D.py
│   ├── Model.py
│   ├── base/
│   │   ├── Base.cpp
│   │   ├── Corrupt.h
│   │   ├── Random.h
│   │   ├── Reader.h
│   │   ├── Setting.h
│   │   ├── Test.h
│   │   ├── Triple.h
│   │   └── Valid.h
│   ├── benchmarks/
│   │   ├── FB15K/
│   │   │   ├── 1-1.txt
│   │   │   ├── 1-n.txt
│   │   │   ├── entity2id.txt
│   │   │   ├── n-1.txt
│   │   │   ├── n-n.py
│   │   │   ├── n-n.txt
│   │   │   ├── relation2id.txt
│   │   │   ├── test2id.txt
│   │   │   ├── test2id_all.txt
│   │   │   ├── train2id.txt
│   │   │   ├── type_constrain.txt
│   │   │   └── valid2id.txt
│   │   ├── FB15K237/
│   │   │   ├── 1-1.txt
│   │   │   ├── 1-n.txt
│   │   │   ├── entity2id.txt
│   │   │   ├── entity2id_100init.txt
│   │   │   ├── entity2vec100.init
│   │   │   ├── n-1.txt
│   │   │   ├── n-n.py
│   │   │   ├── n-n.txt
│   │   │   ├── relation2id.txt
│   │   │   ├── relation2id_100init.txt
│   │   │   ├── relation2vec100.init
│   │   │   ├── test2id.txt
│   │   │   ├── test2id_all.txt
│   │   │   ├── train2id.txt
│   │   │   ├── type_constrain.txt
│   │   │   └── valid2id.txt
│   │   ├── WN18/
│   │   │   ├── 1-1.txt
│   │   │   ├── 1-n.txt
│   │   │   ├── entity2id.txt
│   │   │   ├── n-1.txt
│   │   │   ├── n-n.py
│   │   │   ├── n-n.txt
│   │   │   ├── relation2id.txt
│   │   │   ├── test2id.txt
│   │   │   ├── test2id_all.txt
│   │   │   ├── train2id.txt
│   │   │   ├── type_constrain.txt
│   │   │   └── valid2id.txt
│   │   └── WN18RR/
│   │       ├── 1-1.txt
│   │       ├── 1-n.txt
│   │       ├── _also_see.txt
│   │       ├── _derivationally_related_form.txt
│   │       ├── _has_part.txt
│   │       ├── _hypernym.txt
│   │       ├── _instance_hypernym.txt
│   │       ├── _member_meronym.txt
│   │       ├── _member_of_domain_region.txt
│   │       ├── _member_of_domain_usage.txt
│   │       ├── _similar_to.txt
│   │       ├── _synset_domain_topic_of.txt
│   │       ├── _verb_group.txt
│   │       ├── entity2id.txt
│   │       ├── entity2id_50init.txt
│   │       ├── entity2vec50.init
│   │       ├── n-1.txt
│   │       ├── n-n.py
│   │       ├── n-n.txt
│   │       ├── relation2id.txt
│   │       ├── relation2id_50init.txt
│   │       ├── relation2vec50.init
│   │       ├── test2id.txt
│   │       ├── test2id_all.txt
│   │       ├── train2id.txt
│   │       ├── type_constrain.txt
│   │       └── valid2id.txt
│   ├── make.sh
│   └── train_ConvKB.py
├── ConvKB_tf/
│   ├── batching.py
│   ├── builddata.py
│   ├── data/
│   │   ├── FB15k-237/
│   │   │   ├── entity2id.txt
│   │   │   ├── entity2vec100.init
│   │   │   ├── relation2id.txt
│   │   │   ├── relation2vec100.init
│   │   │   ├── test.txt
│   │   │   ├── train.txt
│   │   │   └── valid.txt
│   │   └── WN18RR/
│   │       ├── entity2id.txt
│   │       ├── entity2vec100.init
│   │       ├── entity2vec50.init
│   │       ├── relation2id.txt
│   │       ├── relation2vec100.init
│   │       ├── relation2vec50.init
│   │       ├── test.txt
│   │       ├── train.txt
│   │       └── valid.txt
│   ├── eval.py
│   ├── evalFB15k-237.sh
│   ├── evalWN18RR.sh
│   ├── model.py
│   ├── train.py
│   └── training_commands.txt
├── LICENSE
└── README.md
Download .txt
SYMBOL INDEX (144 symbols across 18 files)

FILE: ConvKB_pytorch/Config.py
  class MyDataParallel (line 20) | class MyDataParallel(nn.DataParallel):
    method _getattr__ (line 21) | def _getattr__(self, name):
  function to_var (line 25) | def to_var(x):
  class Config (line 29) | class Config(object):
    method __init__ (line 30) | def __init__(self):
    method init (line 139) | def init(self):
    method set_test_link (line 215) | def set_test_link(self, test_link):
    method set_test_triple (line 218) | def set_test_triple(self, test_triple):
    method set_margin (line 221) | def set_margin(self, margin):
    method set_in_path (line 224) | def set_in_path(self, in_path):
    method set_test_file_path (line 227) | def set_test_file_path(self, test_file_path):
    method set_nbatches (line 230) | def set_nbatches(self, nbatches):
    method set_p_norm (line 233) | def set_p_norm(self, p_norm):
    method set_valid_steps (line 236) | def set_valid_steps(self, valid_steps):
    method set_save_steps (line 239) | def set_save_steps(self, save_steps):
    method set_checkpoint_dir (line 242) | def set_checkpoint_dir(self, checkpoint_dir):
    method set_result_dir (line 245) | def set_result_dir(self, result_dir):
    method set_alpha (line 248) | def set_alpha(self, alpha):
    method set_lmbda (line 251) | def set_lmbda(self, lmbda):
    method set_lmbda_two (line 254) | def set_lmbda_two(self, lmbda_two):
    method set_lr_decay (line 257) | def set_lr_decay(self, lr_decay):
    method set_weight_decay (line 260) | def set_weight_decay(self, weight_decay):
    method set_opt_method (line 263) | def set_opt_method(self, opt_method):
    method set_bern (line 266) | def set_bern(self, bern):
    method set_init_embeddings (line 269) | def set_init_embeddings(self, entity_embs, rel_embs):
    method set_config_CNN (line 274) | def set_config_CNN(self, num_of_filters, drop_prob, kernel_size=1):
    method set_dimension (line 279) | def set_dimension(self, dim):
    method set_ent_dimension (line 284) | def set_ent_dimension(self, dim):
    method set_rel_dimension (line 287) | def set_rel_dimension(self, dim):
    method set_train_times (line 290) | def set_train_times(self, train_times):
    method set_work_threads (line 293) | def set_work_threads(self, work_threads):
    method set_ent_neg_rate (line 296) | def set_ent_neg_rate(self, rate):
    method set_rel_neg_rate (line 299) | def set_rel_neg_rate(self, rate):
    method set_ent_dropout (line 302) | def set_ent_dropout(self, ent_dropout):
    method set_rel_dropout (line 305) | def set_rel_dropout(self, rel_dropout):
    method set_early_stopping_patience (line 308) | def set_early_stopping_patience(self, early_stopping_patience):
    method set_pretrain_model (line 311) | def set_pretrain_model(self, pretrain_model):
    method get_parameters (line 314) | def get_parameters(self, param_dict, mode="numpy"):
    method save_embedding_matrix (line 327) | def save_embedding_matrix(self, best_model):
    method set_train_model (line 333) | def set_train_model(self, model):
    method set_test_model (line 369) | def set_test_model(self, model, path=None):
    method sampling (line 380) | def sampling(self):
    method save_checkpoint (line 391) | def save_checkpoint(self, model, epoch):
    method save_best_checkpoint (line 397) | def save_best_checkpoint(self, best_model):
    method train_one_step (line 401) | def train_one_step(self):
    method test_one_step (line 416) | def test_one_step(self, model, test_h, test_t, test_r):
    method valid (line 424) | def valid(self, model):
    method training_model (line 444) | def training_model(self):
    method valid_triple_classification (line 495) | def valid_triple_classification(self, model):
    method training_triple_classification (line 522) | def training_triple_classification(self):
    method link_prediction (line 572) | def link_prediction(self):
    method triple_classification (line 590) | def triple_classification(self):
    method test (line 631) | def test(self):

FILE: ConvKB_pytorch/ConvKB.py
  class ConvKB (line 15) | class ConvKB(Model):
    method __init__ (line 17) | def __init__(self, config):
    method init_parameters (line 33) | def init_parameters(self):
    method _calc (line 45) | def _calc(self, h, r, t):
    method loss (line 64) | def loss(self, score, regul):
    method forward (line 67) | def forward(self):
    method predict (line 82) | def predict(self):

FILE: ConvKB_pytorch/ConvKB_1D.py
  class ConvKB (line 15) | class ConvKB(Model):
    method __init__ (line 17) | def __init__(self, config):
    method init_parameters (line 33) | def init_parameters(self):
    method _calc (line 45) | def _calc(self, h, r, t):
    method loss (line 61) | def loss(self, score, regul):
    method forward (line 64) | def forward(self):
    method predict (line 79) | def predict(self):

FILE: ConvKB_pytorch/Model.py
  class Model (line 8) | class Model(nn.Module):
    method __init__ (line 9) | def __init__(self, config):
    method get_positive_score (line 17) | def get_positive_score(self, score):
    method get_negative_score (line 20) | def get_negative_score(self, score):
    method forward (line 26) | def forward(self):
    method predict (line 29) | def predict(self):

FILE: ConvKB_pytorch/base/Base.cpp
  type Parameter (line 52) | struct Parameter {
  function sampling (line 117) | void sampling(INT *batch_h, INT *batch_t, INT *batch_r, REAL *batch_y, I...
  function main (line 137) | int main() {

FILE: ConvKB_pytorch/base/Corrupt.h
  function INT (line 7) | INT corrupt_head(INT id, INT h, INT r) {
  function INT (line 39) | INT corrupt_tail(INT id, INT t, INT r) {
  function INT (line 72) | INT corrupt_rel(INT id, INT h, INT t) {
  function _find (line 105) | bool _find(INT h, INT t, INT r) {
  function INT (line 118) | INT corrupt(INT h, INT r){

FILE: ConvKB_pytorch/base/Random.h
  function randReset (line 9) | void randReset() {
  function randd (line 15) | unsigned long long randd(INT id) {
  function INT (line 20) | INT rand_max(INT id, INT x) {
  function INT (line 28) | INT rand(INT a, INT b){

FILE: ConvKB_pytorch/base/Reader.h
  function importTrainFiles (line 23) | void importTrainFiles() {
  function importTestFiles (line 128) | void importTestFiles() {
  function importTypeFiles (line 213) | void importTypeFiles() {

FILE: ConvKB_pytorch/base/Setting.h
  function setInPath (line 14) | void setInPath(char *path) {
  function setTestFilePath (line 23) | void setTestFilePath(char *path) {
  function setOutPath (line 32) | void setOutPath(char *path) {
  function setWorkThreads (line 47) | void setWorkThreads(INT threads) {
  function INT (line 52) | INT getWorkThreads() {
  function INT (line 68) | INT getEntityTotal() {
  function INT (line 73) | INT getRelationTotal() {
  function INT (line 78) | INT getTripleTotal() {
  function INT (line 83) | INT getTrainTotal() {
  function INT (line 88) | INT getTestTotal() {
  function INT (line 93) | INT getValidTotal() {
  function setBern (line 103) | void setBern(INT con) {

FILE: ConvKB_pytorch/base/Test.h
  function initTest (line 18) | void initTest() {
  function getHeadBatch (line 28) | void getHeadBatch(INT *ph, INT *pt, INT *pr) {
  function getTailBatch (line 37) | void getTailBatch(INT *ph, INT *pt, INT *pr) {
  function testHead (line 46) | void testHead(REAL *con) {
  function testTail (line 109) | void testTail(REAL *con) {
  function test_link_prediction (line 170) | void test_link_prediction() {
  function getNegTest (line 219) | void getNegTest() {
  function getNegValid (line 237) | void getNegValid() {
  function getTestBatch (line 254) | void getTestBatch(INT *ph, INT *pt, INT *pr, INT *nh, INT *nt, INT *nr) {
  function getValidBatch (line 267) | void getValidBatch(INT *ph, INT *pt, INT *pr, INT *nh, INT *nt, INT *nr) {
  function getBestThreshold (line 280) | void getBestThreshold(REAL *relThresh, REAL *score_pos, REAL *score_neg) {
  function REAL (line 321) | REAL test_triple_classification(REAL *relThresh, REAL *score_pos, REAL *...

FILE: ConvKB_pytorch/base/Triple.h
  function cmp_list (line 5) | struct Triple {
  function cmp_head (line 18) | static bool cmp_head(const Triple &a, const Triple &b) {
  function cmp_tail (line 22) | static bool cmp_tail(const Triple &a, const Triple &b) {
  function cmp_rel (line 26) | static bool cmp_rel(const Triple &a, const Triple &b) {
  function cmp_rel2 (line 30) | static bool cmp_rel2(const Triple &a, const Triple &b) {

FILE: ConvKB_pytorch/base/Valid.h
  function validInit (line 14) | void validInit() {
  function getValidHeadBatch (line 22) | void getValidHeadBatch(INT *ph, INT *pt, INT *pr) {
  function getValidTailBatch (line 31) | void getValidTailBatch(INT *ph, INT *pt, INT *pr) {
  function validHead (line 40) | void validHead(REAL *con) {
  function validTail (line 60) | void validTail(REAL *con) {
  function REAL (line 81) | REAL  getValidHit10() {

FILE: ConvKB_pytorch/train_ConvKB.py
  function get_term_id (line 69) | def get_term_id(filename):
  function get_init_embeddings (line 80) | def get_init_embeddings(relinit, entinit):

FILE: ConvKB_tf/batching.py
  function randn (line 26) | def randn(*args): return np.random.randn(*args).astype('f')
  class Batch_Loader (line 28) | class Batch_Loader(object):
    method __init__ (line 29) | def __init__(self, train_triples, words_indexes, indexes_words, headTa...
    method __call__ (line 57) | def __call__(self):

FILE: ConvKB_tf/builddata.py
  function read_from_id (line 8) | def read_from_id(filename='../data/WN18RR/entity2id.txt'):
  function init_norm_Vector (line 20) | def init_norm_Vector(relinit, entinit, embedding_size):
  function getID (line 39) | def getID(folder='data/WN18RR/'):
  function parse_line (line 85) | def parse_line(line):
  function load_triples_from_txt (line 97) | def load_triples_from_txt(filename, words_indexes=None, parse_line=parse...
  function build_data (line 150) | def build_data(name='WN18', path='../data'):
  function dic_of_chars (line 198) | def dic_of_chars(words_indexes):
  function convert_to_seq_chars (line 208) | def convert_to_seq_chars(x_batch, lstChars, indexes_words):
  function _pad_sequences (line 217) | def _pad_sequences(sequences, pad_tok, max_length):
  function pad_sequences (line 228) | def pad_sequences(sequences, pad_tok):

FILE: ConvKB_tf/eval.py
  function predict (line 171) | def predict(x_batch, y_batch, writer=None):
  function test_prediction (line 181) | def test_prediction(x_batch, y_batch, head_or_tail='head'):

FILE: ConvKB_tf/model.py
  class ConvKB (line 4) | class ConvKB(object):
    method __init__ (line 6) | def __init__(self, sequence_length, num_classes, embedding_size, filte...

FILE: ConvKB_tf/train.py
  function train_step (line 128) | def train_step(x_batch, y_batch):
Copy disabled (too large) Download .json
Condensed preview — 107 files, each showing path, character count, and a content snippet. Download the .json file for the full structured content (35,101K chars).
[
  {
    "path": "ConvKB_pytorch/Config.py",
    "chars": 23656,
    "preview": "# coding:utf-8\nimport torch\nimport torch.nn as nn\nfrom torch.autograd import Variable\nimport torch.optim as optim\nimport"
  },
  {
    "path": "ConvKB_pytorch/ConvKB.py",
    "chars": 3368,
    "preview": "import torch\nimport torch.autograd as autograd\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim "
  },
  {
    "path": "ConvKB_pytorch/ConvKB_1D.py",
    "chars": 3195,
    "preview": "import torch\nimport torch.autograd as autograd\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim "
  },
  {
    "path": "ConvKB_pytorch/Model.py",
    "chars": 798,
    "preview": "import torch\nimport torch.autograd as autograd\nimport torch.nn as nn\nimport torch.nn.functional as F\nimport torch.optim "
  },
  {
    "path": "ConvKB_pytorch/base/Base.cpp",
    "chars": 3453,
    "preview": "#include \"Setting.h\"\n#include \"Random.h\"\n#include \"Reader.h\"\n#include \"Corrupt.h\"\n#include \"Test.h\"\n#include \"Valid.h\"\n#"
  },
  {
    "path": "ConvKB_pytorch/base/Corrupt.h",
    "chars": 3271,
    "preview": "#ifndef CORRUPT_H\n#define CORRUPT_H\n#include \"Random.h\"\n#include \"Triple.h\"\n#include \"Reader.h\"\n\nINT corrupt_head(INT id"
  },
  {
    "path": "ConvKB_pytorch/base/Random.h",
    "chars": 603,
    "preview": "#ifndef RANDOM_H\n#define RANDOM_H\n#include \"Setting.h\"\n#include <cstdlib>\n\nunsigned long long *next_random;\n\nextern \"C\"\n"
  },
  {
    "path": "ConvKB_pytorch/base/Reader.h",
    "chars": 9364,
    "preview": "#ifndef READER_H\n#define READER_H\n#include \"Setting.h\"\n#include \"Triple.h\"\n#include <cstdlib>\n#include <algorithm>\n\nINT "
  },
  {
    "path": "ConvKB_pytorch/base/Setting.h",
    "chars": 1761,
    "preview": "#ifndef SETTING_H\n#define SETTING_H\n#define INT long\n#define REAL float\n#include <cstring>\n#include <cstdio>\n#include <s"
  },
  {
    "path": "ConvKB_pytorch/base/Test.h",
    "chars": 13035,
    "preview": "#ifndef TEST_H\n#define TEST_H\n#include \"Setting.h\"\n#include \"Reader.h\"\n#include \"Corrupt.h\"\n\n/*========================="
  },
  {
    "path": "ConvKB_pytorch/base/Triple.h",
    "chars": 893,
    "preview": "#ifndef TRIPLE_H\n#define TRIPLE_H\n#include \"Setting.h\"\n\nstruct Triple {\n\n\tINT h, r, t;\n\n\tstatic INT minimal(INT a,INT b)"
  },
  {
    "path": "ConvKB_pytorch/base/Valid.h",
    "chars": 2182,
    "preview": "#ifndef VALID_H\n#define VALID_H\n#include \"Setting.h\"\n#include \"Reader.h\"\n#include \"Corrupt.h\"\n\nINT lastValidHead = 0;\nIN"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/1-1.txt",
    "chars": 11973,
    "preview": "832\n2326 6886 637\n4843 4843 149\n9671 7174 190\n8055 8055 305\n9549 11909 855\n13652 13585 446\n1100 1100 305\n1321 14529 176\n"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/1-n.txt",
    "chars": 70522,
    "preview": "5259\n147 11738 592\n3572 8632 143\n12761 4179 88\n3244 201 129\n3244 11230 129\n9085 11970 1\n1963 6035 203\n833 10624 263\n4654"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/entity2id.txt",
    "chars": 225015,
    "preview": "14951\n/m/027rn\t0\n/m/06cx9\t1\n/m/017dcd\t2\n/m/06v8s0\t3\n/m/07s9rl0\t4\n/m/0170z3\t5\n/m/01sl1q\t6\n/m/044mz_\t7\n/m/0cnk2q\t8\n/m/02nz"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/n-1.txt",
    "chars": 114333,
    "preview": "8637\n3136 4357 588\n6675 58 267\n719 2241 326\n5864 443 110\n13576 138 55\n76 1278 230\n9368 2018 42\n3956 6313 1056\n7654 12726"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/n-n.py",
    "chars": 3125,
    "preview": "lef = {}\nrig = {}\nrellef = {}\nrelrig = {}\n\ntriple = open(\"train2id.txt\", \"r\")\nvalid = open(\"valid2id.txt\", \"r\")\ntest = o"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/n-n.txt",
    "chars": 567778,
    "preview": "44343\n453 1347 37\n8663 4522 307\n2404 8386 186\n722 806 37\n1248 10937 26\n9182 1043 20\n706 14564 28\n706 14004 28\n213 9353 9"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/relation2id.txt",
    "chars": 89799,
    "preview": "1345\n/location/country/form_of_government\t0\n/tv/tv_program/regular_cast./tv/regular_tv_appearance/actor\t1\n/media_common/"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/test2id.txt",
    "chars": 764592,
    "preview": "59071\n453 1347 37\n3136 4357 588\n8663 4522 307\n2404 8386 186\n722 806 37\n1248 10937 26\n9182 1043 20\n706 14564 28\n706 14004"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/test2id_all.txt",
    "chars": 882734,
    "preview": "59071\n3\t453 1347 37\n2\t3136 4357 588\n3\t8663 4522 307\n3\t2404 8386 186\n3\t722 806 37\n3\t1248 10937 26\n3\t9182 1043 20\n3\t706 14"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/train2id.txt",
    "chars": 6251267,
    "preview": "483142\n0 1 0\n2 3 1\n4 5 2\n6 7 3\n8 9 4\n10 11 5\n12 13 6\n14 15 7\n16 17 8\n18 19 9\n20 21 10\n22 23 11\n24 25 12\n26 27 13\n28 29 1"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/type_constrain.txt",
    "chars": 1597866,
    "preview": "1345\n1343\t1\t11904\n1343\t1\t11904\n1200\t4\t3123\t1034\t58\t5733\n1200\t4\t12123\t4388\t11087\t11088\n1175\t2\t13219\t14400\n1175\t5\t487\t4584"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K/valid2id.txt",
    "chars": 647201,
    "preview": "50000\n5167 1427 52\n239 2379 326\n837 9339 3\n7674 4431 272\n4528 8708 155\n71 5412 32\n5041 5979 26\n11273 3632 390\n10994 36 1"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/1-1.txt",
    "chars": 2657,
    "preview": "192\n5335 6299 230\n5299 10806 226\n14151 12928 94\n5692 5692 146\n11618 11618 56\n986 13237 94\n6680 7830 94\n1080 10505 94\n101"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/1-n.txt",
    "chars": 17040,
    "preview": "1293\n13757 3763 50\n2279 130 71\n2279 9410 71\n6559 12201 1\n1180 4006 102\n2955 4214 22\n8313 11808 132\n4709 8227 189\n4770 12"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/entity2id.txt",
    "chars": 233099,
    "preview": "14541\r\n/m/027rn\t0\r\n/m/06cx9\t1\r\n/m/017dcd\t2\r\n/m/06v8s0\t3\r\n/m/07s9rl0\t4\r\n/m/0170z3\t5\r\n/m/01sl1q\t6\r\n/m/044mz_\t7\r\n/m/0cnk2q\t"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/entity2id_100init.txt",
    "chars": 233092,
    "preview": "/m/027rn\t0\r\n/m/06cx9\t1\r\n/m/017dcd\t2\r\n/m/06v8s0\t3\r\n/m/07s9rl0\t4\r\n/m/0170z3\t5\r\n/m/01sl1q\t6\r\n/m/044mz_\t7\r\n/m/0cnk2q\t8\r\n/m/0"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/n-1.txt",
    "chars": 52880,
    "preview": "4185\n422 2438 154\n12013 90 33\n44 774 118\n6850 1652 24\n5308 10758 124\n8254 494 110\n10469 718 186\n10778 911 80\n5900 90 34\n"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/n-n.py",
    "chars": 3125,
    "preview": "lef = {}\nrig = {}\nrellef = {}\nrelrig = {}\n\ntriple = open(\"train2id.txt\", \"r\")\nvalid = open(\"valid2id.txt\", \"r\")\ntest = o"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/n-n.txt",
    "chars": 186511,
    "preview": "14796\n6180 2861 148\n1454 5951 92\n6651 637 13\n1124 14037 15\n1124 13245 15\n140 6827 6\n1899 6401 7\n98 2794 37\n9317 3257 31\n"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/relation2id.txt",
    "chars": 15764,
    "preview": "237\r\n/location/country/form_of_government\t0\r\n/tv/tv_program/regular_cast./tv/regular_tv_appearance/actor\t1\r\n/media_commo"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/relation2id_100init.txt",
    "chars": 15759,
    "preview": "/location/country/form_of_government\t0\r\n/tv/tv_program/regular_cast./tv/regular_tv_appearance/actor\t1\r\n/media_common/net"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/relation2vec100.init",
    "chars": 225364,
    "preview": "-0.100144\t-0.109701\t-0.064256\t0.022187\t0.047555\t0.064904\t0.028120\t-0.049784\t0.100976\t-0.005863\t0.133728\t0.004563\t0.10196"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/test2id.txt",
    "chars": 259074,
    "preview": "20466\n6180 2861 148\n1454 5951 92\n6651 637 13\n1124 14037 15\n1124 13245 15\n140 6827 6\n1899 6401 7\n98 2794 37\n9317 3257 31\n"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/test2id_all.txt",
    "chars": 300006,
    "preview": "20466\n3\t6180 2861 148\n3\t1454 5951 92\n3\t6651 637 13\n3\t1124 14037 15\n3\t1124 13245 15\n3\t140 6827 6\n3\t1899 6401 7\n3\t98 2794 "
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/train2id.txt",
    "chars": 3416118,
    "preview": "272115\n0 1 0\n2 3 1\n4 5 2\n6 7 3\n8 9 4\n10 11 5\n12 13 6\n14 15 7\n16 17 8\n18 19 9\n20 21 10\n22 23 11\n24 25 12\n26 27 13\n28 29 1"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/type_constrain.txt",
    "chars": 828462,
    "preview": "237\n216\t23\t9229\t4633\t12816\t6727\t5526\t9228\t10434\t13204\t5983\t978\t13322\t3416\t9787\t9157\t13430\t11024\t10843\t5579\t6500\t13090\t10"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/FB15K237/valid2id.txt",
    "chars": 222093,
    "preview": "17535\n6404 1211 31\n4089 1437 154\n8549 434 85\n8379 3498 47\n2364 235 13\n5042 23 11\n8252 160 51\n14424 1694 127\n3929 12645 7"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/1-1.txt",
    "chars": 605,
    "preview": "42\n20391 18987 14\n39837 12647 14\n37344 24682 14\n961 30170 14\n14581 24254 14\n14264 11347 14\n23007 4630 14\n14674 7581 14\n4"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/1-n.txt",
    "chars": 26270,
    "preview": "1847\n7951 38768 8\n13820 25808 10\n16395 29310 3\n11866 20457 1\n12799 21410 13\n19036 2722 10\n566 37370 10\n12652 34179 13\n61"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/entity2id.txt",
    "chars": 603041,
    "preview": "40943\n05451384\t0\n04958634\t1\n00620424\t2\n09887034\t3\n12149144\t4\n03257343\t5\n02757462\t6\n03757925\t7\n02453611\t8\n14108324\t9\n0445"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/n-1.txt",
    "chars": 26818,
    "preview": "1981\n18323 197 15\n39889 11075 5\n24460 2195 15\n6067 31615 5\n27845 12500 5\n12730 6128 5\n11108 18068 5\n15590 19118 5\n32278 "
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/n-n.py",
    "chars": 3125,
    "preview": "lef = {}\nrig = {}\nrellef = {}\nrelrig = {}\n\ntriple = open(\"train2id.txt\", \"r\")\nvalid = open(\"valid2id.txt\", \"r\")\ntest = o"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/n-n.txt",
    "chars": 15230,
    "preview": "1130\n24257 24051 2\n24484 3510 2\n38166 8808 2\n14244 17173 2\n23148 1594 2\n1550 4544 2\n1091 39703 17\n22912 4228 2\n12735 356"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/relation2id.txt",
    "chars": 363,
    "preview": "18\n_member_of_domain_topic\t0\n_member_meronym\t1\n_derivationally_related_form\t2\n_member_of_domain_region\t3\n_similar_to\t4\n_"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/test2id.txt",
    "chars": 68910,
    "preview": "5000\n7951 38768 8\n20391 18987 14\n13820 25808 10\n16395 29310 3\n11866 20457 1\n18323 197 15\n39889 11075 5\n24460 2195 15\n606"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/test2id_all.txt",
    "chars": 78910,
    "preview": "5000\n1\t7951 38768 8\n0\t20391 18987 14\n1\t13820 25808 10\n1\t16395 29310 3\n1\t11866 20457 1\n2\t18323 197 15\n2\t39889 11075 5\n2\t2"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/train2id.txt",
    "chars": 1955191,
    "preview": "141442\n27536 33729 10\n25546 10838 5\n1213 38780 6\n4207 6437 2\n27541 34437 2\n8217 20390 7\n23993 10605 10\n40739 23109 2\n283"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/type_constrain.txt",
    "chars": 1035718,
    "preview": "18\n11\t419\t23997\t18885\t34975\t14916\t14179\t37693\t27281\t11545\t21764\t23690\t19010\t36665\t21645\t11265\t24713\t9939\t33034\t2914\t1792"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18/valid2id.txt",
    "chars": 69123,
    "preview": "5000\n18394 30466 5\n4484 32979 2\n9388 13671 9\n26213 3112 1\n21878 36397 5\n7802 35451 7\n19886 33640 5\n1247 16285 15\n18521 3"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/1-1.txt",
    "chars": 554,
    "preview": "42\n17935 35175 9\n65 439 9\n1122 23212 9\n25017 18392 9\n6882 10563 9\n34810 22728 9\n11632 24469 9\n18458 31641 9\n15601 15600 "
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/1-n.txt",
    "chars": 6306,
    "preview": "475\n1678 9202 7\n785 23537 8\n16176 33688 4\n16748 29001 6\n5945 10270 6\n1255 24915 6\n30663 40225 4\n31355 35983 4\n34114 1598"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_also_see.txt",
    "chars": 732,
    "preview": "56\n38200 8055 3\n25183 11235 3\n2507 4435 3\n18377 4166 3\n11585 21329 3\n12153 19756 3\n17011 17169 3\n11643 3908 3\n24578 2311"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_derivationally_related_form.txt",
    "chars": 14130,
    "preview": "1074\n26184 32091 1\n4194 6975 1\n24512 24859 1\n12274 9268 1\n12842 15886 1\n13394 3789 1\n18270 18269 1\n29179 10971 1\n5734 37"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_has_part.txt",
    "chars": 2286,
    "preview": "172\n16748 29001 6\n5945 10270 6\n1255 24915 6\n40287 21134 6\n271 40026 6\n258 35861 6\n31306 34770 6\n22455 27199 6\n637 40404 "
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_hypernym.txt",
    "chars": 16266,
    "preview": "1251\n18671 11241 0\n15582 4030 0\n19165 2969 0\n31740 1183 0\n15602 4367 0\n33182 5053 0\n5109 22368 0\n20774 8060 0\n36698 586 "
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_instance_hypernym.txt",
    "chars": 1544,
    "preview": "122\n38212 121 2\n8415 7033 2\n11449 6220 2\n8306 25327 2\n13604 251 2\n21703 1136 2\n27870 2023 2\n35882 2524 2\n11490 2023 2\n23"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_member_meronym.txt",
    "chars": 3396,
    "preview": "253\n16176 33688 4\n30663 40225 4\n31355 35983 4\n34114 1598 4\n15430 617 4\n10581 27666 4\n20809 32206 4\n35258 25396 4\n1038 40"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_member_of_domain_region.txt",
    "chars": 321,
    "preview": "26\n785 23537 8\n1559 14508 8\n840 19493 8\n785 6770 8\n3408 38801 8\n785 36761 8\n3408 28506 8\n840 36647 8\n785 1493 8\n785 3874"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_member_of_domain_usage.txt",
    "chars": 313,
    "preview": "24\n1678 9202 7\n1678 30089 7\n785 11761 7\n1678 18000 7\n1678 35669 7\n6633 12050 7\n1823 36030 7\n1046 18788 7\n1678 36781 7\n66"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_similar_to.txt",
    "chars": 44,
    "preview": "3\n15601 15600 10\n6093 24981 10\n849 13402 10\n"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_synset_domain_topic_of.txt",
    "chars": 1441,
    "preview": "114\n40728 914 5\n14576 2971 5\n25546 914 5\n25625 2176 5\n36561 129 5\n5335 9988 5\n15250 2644 5\n31958 914 5\n33711 19257 5\n118"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/_verb_group.txt",
    "chars": 512,
    "preview": "39\n17935 35175 9\n65 439 9\n1122 23212 9\n25017 18392 9\n6882 10563 9\n34810 22728 9\n11632 24469 9\n18458 31641 9\n17534 12584 "
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/entity2id.txt",
    "chars": 643985,
    "preview": "40943\r\n00260881\t0\r\n00260622\t1\r\n01332730\t2\r\n03122748\t3\r\n06066555\t4\r\n00645415\t5\r\n09322930\t6\r\n09360122\t7\r\n07193596\t8\r\n00784"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/entity2id_50init.txt",
    "chars": 643978,
    "preview": "00260881\t0\r\n00260622\t1\r\n01332730\t2\r\n03122748\t3\r\n06066555\t4\r\n00645415\t5\r\n09322930\t6\r\n09360122\t7\r\n07193596\t8\r\n00784342\t9\r\n"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/n-1.txt",
    "chars": 19243,
    "preview": "1487\n18671 11241 0\n15582 4030 0\n19165 2969 0\n31740 1183 0\n15602 4367 0\n33182 5053 0\n5109 22368 0\n20774 8060 0\n36698 586 "
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/n-n.py",
    "chars": 3125,
    "preview": "lef = {}\nrig = {}\nrellef = {}\nrelrig = {}\n\ntriple = open(\"train2id.txt\", \"r\")\nvalid = open(\"valid2id.txt\", \"r\")\ntest = o"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/n-n.txt",
    "chars": 14859,
    "preview": "1130\n26184 32091 1\n4194 6975 1\n24512 24859 1\n12274 9268 1\n12842 15886 1\n13394 3789 1\n38200 8055 3\n18270 18269 1\n29179 10"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/relation2id.txt",
    "chars": 229,
    "preview": "11\r\n_hypernym\t0\r\n_derivationally_related_form\t1\r\n_instance_hypernym\t2\r\n_also_see\t3\r\n_member_meronym\t4\r\n_synset_domain_to"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/relation2id_50init.txt",
    "chars": 225,
    "preview": "_hypernym\t0\r\n_derivationally_related_form\t1\r\n_instance_hypernym\t2\r\n_also_see\t3\r\n_member_meronym\t4\r\n_synset_domain_topic_"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/relation2vec50.init",
    "chars": 5232,
    "preview": "0.074439\t-0.078413\t-0.060850\t-0.074264\t0.093673\t-0.073760\t-0.028944\t0.062929\t0.045091\t-0.057606\t-0.101286\t-0.073069\t-0.0"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/test2id.txt",
    "chars": 40950,
    "preview": "3134\n1678 9202 7\n17935 35175 9\n785 23537 8\n16176 33688 4\n18671 11241 0\n15582 4030 0\n19165 2969 0\n16748 29001 6\n31740 118"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/test2id_all.txt",
    "chars": 47218,
    "preview": "3134\n1\t1678 9202 7\n0\t17935 35175 9\n1\t785 23537 8\n1\t16176 33688 4\n2\t18671 11241 0\n2\t15582 4030 0\n2\t19165 2969 0\n1\t16748 2"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/train2id.txt",
    "chars": 1124910,
    "preview": "86835\n0 1 0\n2 3 1\n4 5 1\n6 7 2\n8 9 1\n10 11 1\n12 13 0\n14 15 0\n16 17 0\n18 19 1\n20 21 0\n22 23 3\n24 25 0\n26 27 0\n28 29 0\n30 3"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/type_constrain.txt",
    "chars": 618450,
    "preview": "11\n10\t82\t15601\t15600\t4898\t16473\t16472\t961\t8156\t3441\t19682\t2437\t455\t7032\t13402\t9779\t3515\t12045\t24127\t25225\t26292\t4553\t662"
  },
  {
    "path": "ConvKB_pytorch/benchmarks/WN18RR/valid2id.txt",
    "chars": 39634,
    "preview": "3034\n33512 4410 0\n16031 32320 1\n40543 172 5\n15278 39692 4\n22936 14838 0\n31887 743 2\n29405 27360 0\n35494 15192 1\n18925 32"
  },
  {
    "path": "ConvKB_pytorch/make.sh",
    "chars": 81,
    "preview": "g++ ./base/Base.cpp -fPIC -shared -o ./release/Base.so -pthread -O3 -march=native"
  },
  {
    "path": "ConvKB_pytorch/train_ConvKB.py",
    "chars": 5554,
    "preview": "from Config import Config\nfrom ConvKB import ConvKB\nimport json\nimport os\nimport numpy as np\n\nfrom argparse import Argum"
  },
  {
    "path": "ConvKB_tf/batching.py",
    "chars": 4476,
    "preview": "import sys, os\nimport logging\nimport numpy as np\nimport colorsys\n\n\n# Current path\ncur_path = os.path.dirname(os.path.rea"
  },
  {
    "path": "ConvKB_tf/builddata.py",
    "chars": 8238,
    "preview": "import scipy\nimport scipy.io\nimport random\n\nfrom batching import *\n\n\ndef read_from_id(filename='../data/WN18RR/entity2id"
  },
  {
    "path": "ConvKB_tf/data/FB15k-237/entity2id.txt",
    "chars": 233092,
    "preview": "/m/027rn\t0\r\n/m/06cx9\t1\r\n/m/017dcd\t2\r\n/m/06v8s0\t3\r\n/m/07s9rl0\t4\r\n/m/0170z3\t5\r\n/m/01sl1q\t6\r\n/m/044mz_\t7\r\n/m/0cnk2q\t8\r\n/m/0"
  },
  {
    "path": "ConvKB_tf/data/FB15k-237/relation2id.txt",
    "chars": 15759,
    "preview": "/location/country/form_of_government\t0\r\n/tv/tv_program/regular_cast./tv/regular_tv_appearance/actor\t1\r\n/media_common/net"
  },
  {
    "path": "ConvKB_tf/data/FB15k-237/relation2vec100.init",
    "chars": 225364,
    "preview": "-0.100144\t-0.109701\t-0.064256\t0.022187\t0.047555\t0.064904\t0.028120\t-0.049784\t0.100976\t-0.005863\t0.133728\t0.004563\t0.10196"
  },
  {
    "path": "ConvKB_tf/data/FB15k-237/test.txt",
    "chars": 1519499,
    "preview": "/m/08966\t/travel/travel_destination/climate./travel/travel_destination_monthly_climate/month\t/m/05lf_\r\n/m/01hww_\t/music/"
  },
  {
    "path": "ConvKB_tf/data/FB15k-237/valid.txt",
    "chars": 1303101,
    "preview": "/m/07pd_j\t/film/film/genre\t/m/02l7c8\r\n/m/06wxw\t/location/location/time_zones\t/m/02fqwt\r\n/m/01t94_1\t/people/person/spouse"
  },
  {
    "path": "ConvKB_tf/data/WN18RR/entity2id.txt",
    "chars": 643978,
    "preview": "00260881\t0\r\n00260622\t1\r\n01332730\t2\r\n03122748\t3\r\n06066555\t4\r\n00645415\t5\r\n09322930\t6\r\n09360122\t7\r\n07193596\t8\r\n00784342\t9\r\n"
  },
  {
    "path": "ConvKB_tf/data/WN18RR/relation2id.txt",
    "chars": 225,
    "preview": "_hypernym\t0\r\n_derivationally_related_form\t1\r\n_instance_hypernym\t2\r\n_also_see\t3\r\n_member_meronym\t4\r\n_synset_domain_topic_"
  },
  {
    "path": "ConvKB_tf/data/WN18RR/relation2vec100.init",
    "chars": 10463,
    "preview": "0.006882\t0.075964\t0.036331\t0.034198\t0.016211\t-0.008821\t-0.020629\t-0.014837\t0.052503\t0.037201\t-0.029670\t-0.056403\t0.04463"
  },
  {
    "path": "ConvKB_tf/data/WN18RR/relation2vec50.init",
    "chars": 5232,
    "preview": "0.074439\t-0.078413\t-0.060850\t-0.074264\t0.093673\t-0.073760\t-0.028944\t0.062929\t0.045091\t-0.057606\t-0.101286\t-0.073069\t-0.0"
  },
  {
    "path": "ConvKB_tf/data/WN18RR/test.txt",
    "chars": 113180,
    "preview": "06845599\t_member_of_domain_usage\t03754979\n00789448\t_verb_group\t01062739\n08860123\t_member_of_domain_region\t05688486\n02233"
  },
  {
    "path": "ConvKB_tf/data/WN18RR/train.txt",
    "chars": 3135377,
    "preview": "00260881\t_hypernym\t00260622\n01332730\t_derivationally_related_form\t03122748\n06066555\t_derivationally_related_form\t0064541"
  },
  {
    "path": "ConvKB_tf/data/WN18RR/valid.txt",
    "chars": 110415,
    "preview": "02174461\t_hypernym\t02176268\n05074057\t_derivationally_related_form\t02310895\n08390511\t_synset_domain_topic_of\t08199025\n020"
  },
  {
    "path": "ConvKB_tf/eval.py",
    "chars": 12747,
    "preview": "import tensorflow as tf\nimport numpy as np\nfrom scipy.stats import rankdata\n\nnp.random.seed(1234)\nimport os\nimport time\n"
  },
  {
    "path": "ConvKB_tf/evalFB15k-237.sh",
    "chars": 1164,
    "preview": "nohup python eval.py --embedding_dim 100 --num_filters 50 --name FB15k-237 --useConstantInit --model_name fb15k237 --num"
  },
  {
    "path": "ConvKB_tf/evalWN18RR.sh",
    "chars": 976,
    "preview": "nohup python eval.py --embedding_dim 50 --num_filters 500 --name WN18RR --model_name wn18rr --num_splits 8 --testIdx 0 &"
  },
  {
    "path": "ConvKB_tf/model.py",
    "chars": 3673,
    "preview": "import tensorflow as tf\nimport numpy as np\nimport math\nclass ConvKB(object):\n\n    def __init__(self, sequence_length, nu"
  },
  {
    "path": "ConvKB_tf/train.py",
    "chars": 7006,
    "preview": "import tensorflow as tf\nimport numpy as np\n\nnp.random.seed(1234)\nimport os\nimport time\nimport datetime\nfrom argparse imp"
  },
  {
    "path": "ConvKB_tf/training_commands.txt",
    "chars": 260,
    "preview": "python train.py --embedding_dim 50 --num_filters 500 --learning_rate 0.0001 --name WN18RR --model_name wn18rr --saveStep"
  },
  {
    "path": "LICENSE",
    "chars": 11357,
    "preview": "                                 Apache License\n                           Version 2.0, January 2004\n                   "
  },
  {
    "path": "README.md",
    "chars": 4196,
    "preview": "<p align=\"center\">\n\t<img src=\"https://github.com/daiquocnguyen/ConvKB/blob/master/convkb_logo.png\">\n</p>\n\n# A Novel Embe"
  }
]

// ... and 6 more files (download for full content)

About this extraction

This page contains the full source code of the daiquocnguyen/ConvKB GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 107 files (151.3 MB), approximately 8.0M tokens, and a symbol index with 144 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!