Full Code of wziji/deep_ctr for AI

master 0fa3e043300e cached
67 files
3.0 MB
784.5k tokens
115 symbols
1 requests
Download .txt
Showing preview only (3,139K chars total). Download the full file or copy to clipboard to get everything.
Repository: wziji/deep_ctr
Branch: master
Commit: 0fa3e043300e
Files: 67
Total size: 3.0 MB

Directory structure:
gitextract_0m5oq6rv/

├── AFM/
│   ├── AFMLayer.py
│   ├── AFM_Model.py
│   ├── README.md
│   ├── data.py
│   ├── data_generator.py
│   ├── master.sh
│   ├── preprocess.py
│   └── train_AFM_model.py
├── BST/
│   ├── README.md
│   ├── bst_model.py
│   ├── din.py
│   └── transformer.py
├── BilinearFFM/
│   ├── BilinearFFM.py
│   ├── BilinearInteraction.py
│   ├── README.md
│   ├── data.py
│   ├── data_generator.py
│   ├── master.sh
│   ├── preprocess.py
│   └── train_BilinearFFM_model.py
├── DeepFM/
│   ├── DeepFM.ipynb
│   ├── README.md
│   └── train.csv
├── ESMM/
│   ├── README.md
│   ├── data/
│   │   └── esmm_raw_sample_data
│   ├── master.sh
│   ├── run_train_esmm_model.sh
│   ├── split_train_val.py
│   ├── split_train_val.sh
│   ├── tar_model.py
│   ├── train_esmm_finetune.py
│   └── write_tfrecord.py
├── MIND/
│   ├── CapsuleLayer.py
│   ├── README.md
│   ├── data.py
│   ├── data_generator.py
│   ├── master.sh
│   ├── mind.py
│   ├── predict.py
│   ├── preprocess.py
│   └── train_mind.py
├── MMoE/
│   └── README.md
├── NFM/
│   ├── NFM.ipynb
│   ├── README.md
│   └── train.csv
├── README.md
├── Wide&Deep/
│   ├── README.md
│   └── wide_and_deep.py
├── Word2vec/
│   ├── README.md
│   ├── data/
│   │   └── w2v_order_seq
│   ├── train_w2v_model.sh
│   └── word2vec
├── YouTubeNet/
│   ├── README.md
│   ├── SequencePoolingLayer.py
│   ├── YouTubeNet.py
│   ├── data.py
│   ├── data_generator.py
│   ├── load_YouTubeNet_model_to_predict.py
│   ├── master.sh
│   ├── preprocess.py
│   └── train_YouTubeNet_model.py
└── vgg16_figure_search_annoy/
    ├── README.md
    ├── build_figure_ann.py
    ├── download_jd_figures.py
    ├── extract_figure_feature.py
    ├── search_topN_figure.py
    └── threadings_download_txt.py

================================================
FILE CONTENTS
================================================

================================================
FILE: AFM/AFMLayer.py
================================================
import tensorflow as tf
from tensorflow.keras.layers import Layer
from tensorflow.keras import backend as K
import itertools
from tensorflow.keras.initializers import (Zeros, glorot_normal, glorot_uniform)
from tensorflow.keras.layers import Concatenate
from tensorflow.keras.regularizers import l2



class AFMLayer(Layer):
    """Attentonal Factorization Machine models pairwise (order-2) feature
    interactions without linear term and bias.

      Input shape
        - A list of 3D tensor with shape: ``(batch_size,1,embedding_size)``.

      Output shape
        - 2D tensor with shape: ``(batch_size, 1)``.

      Arguments
        - **attention_factor** : Positive integer, dimensionality of the
         attention network output space.

        - **l2_reg_w** : float between 0 and 1. L2 regularizer strength
         applied to attention network.

        - **dropout_rate** : float between in [0,1). Fraction of the attention net output units to dropout.

        - **seed** : A Python integer to use as random seed.

      References
        - [Attentional Factorization Machines : Learning the Weight of Feature
        Interactions via Attention Networks](https://arxiv.org/pdf/1708.04617.pdf)
    """

    def __init__(self, attention_factor=4, l2_reg_w=0, dropout_rate=0, seed=1024, **kwargs):
        self.attention_factor = attention_factor
        self.l2_reg_w = l2_reg_w
        self.dropout_rate = dropout_rate
        self.seed = seed
        super(AFMLayer, self).__init__(**kwargs)

    def build(self, input_shape):

        if not isinstance(input_shape, list) or len(input_shape) < 2:
            raise ValueError('A `AttentionalFM` layer should be called '
                             'on a list of at least 2 inputs')

        shape_set = set()
        reduced_input_shape = [shape.as_list() for shape in input_shape]
        for i in range(len(input_shape)):
            shape_set.add(tuple(reduced_input_shape[i]))

        if len(shape_set) > 1:
            raise ValueError('A `AttentionalFM` layer requires '
                             'inputs with same shapes '
                             'Got different shapes: %s' % (shape_set))

        if len(input_shape[0]) != 3 or input_shape[0][1] != 1:
            raise ValueError('A `AttentionalFM` layer requires '
                             'inputs of a list with same shape tensor like\
                             (None, 1, embedding_size)'
                             'Got different shapes: %s' % (input_shape[0]))

        embedding_size = int(input_shape[0][-1])

        self.attention_W = self.add_weight(shape=(embedding_size,
                                                  self.attention_factor), initializer=glorot_normal(seed=self.seed),
                                           regularizer=l2(self.l2_reg_w), name="attention_W")
        self.attention_b = self.add_weight(
            shape=(self.attention_factor,), initializer=Zeros(), name="attention_b")
        self.projection_h = self.add_weight(shape=(self.attention_factor, 1),
                                            initializer=glorot_normal(seed=self.seed), name="projection_h")
        self.projection_p = self.add_weight(shape=(
            embedding_size, 1), initializer=glorot_normal(seed=self.seed), name="projection_p")
        self.dropout = tf.keras.layers.Dropout(
            self.dropout_rate, seed=self.seed)

        self.tensordot = tf.keras.layers.Lambda(
            lambda x: tf.tensordot(x[0], x[1], axes=(-1, 0)))

        # Be sure to call this somewhere!
        super(AFMLayer, self).build(input_shape)

    def call(self, inputs, training=None, **kwargs):

        if K.ndim(inputs[0]) != 3:
            raise ValueError(
                "Unexpected inputs dimensions %d, expect to be 3 dimensions" % (K.ndim(inputs)))

        embeds_vec_list = inputs
        row = []
        col = []

        for r, c in itertools.combinations(embeds_vec_list, 2):
            row.append(r)
            col.append(c)

        p = tf.concat(row, axis=1)
        q = tf.concat(col, axis=1)
        inner_product = p * q

        bi_interaction = inner_product
        attention_temp = tf.nn.relu(tf.nn.bias_add(tf.tensordot(
            bi_interaction, self.attention_W, axes=(-1, 0)), self.attention_b))
        
        self.normalized_att_score = tf.nn.softmax(tf.tensordot(
            attention_temp, self.projection_h, axes=(-1, 0)))
        attention_output = tf.reduce_sum(
            self.normalized_att_score * bi_interaction, axis=1)

        attention_output = self.dropout(attention_output)  # training

        afm_out = self.tensordot([attention_output, self.projection_p])
        return afm_out

    def compute_output_shape(self, input_shape):

        if not isinstance(input_shape, list):
            raise ValueError('A `AFMLayer` layer should be called '
                             'on a list of inputs.')
        return (None, 1)

    def get_config(self, ):
        config = {'attention_factor': self.attention_factor,
                  'l2_reg_w': self.l2_reg_w, 'dropout_rate': self.dropout_rate, 'seed': self.seed}
        base_config = super(AFMLayer, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))



================================================
FILE: AFM/AFM_Model.py
================================================
#-*- coding:utf-8 -*-


import tensorflow as tf
from tensorflow.keras.layers import Input, Embedding, concatenate, Dense, Dropout

from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping
from AFMLayer import AFMLayer


def AFM(
    sparse_input_length=1,
    embedding_dim = 64
    ):

    # 1. Input layer
    user_id_input_layer = Input(shape=(sparse_input_length, ), name="user_id_input_layer")
    gender_input_layer = Input(shape=(sparse_input_length, ), name="gender_input_layer")
    age_input_layer = Input(shape=(sparse_input_length, ), name="age_input_layer")
    occupation_input_layer = Input(shape=(sparse_input_length, ), name="occupation_input_layer")
    zip_input_layer = Input(shape=(sparse_input_length, ), name="zip_input_layer")
    item_input_layer = Input(shape=(sparse_input_length, ), name="item_input_layer")

    
    # 2. Embedding layer
    user_id_embedding_layer = Embedding(6040+1, embedding_dim, mask_zero=True, name='user_id_embedding_layer')(user_id_input_layer)
    gender_embedding_layer = Embedding(2+1, embedding_dim, mask_zero=True, name='gender_embedding_layer')(gender_input_layer)
    age_embedding_layer = Embedding(7+1, embedding_dim, mask_zero=True, name='age_embedding_layer')(age_input_layer)
    occupation_embedding_layer = Embedding(21+1, embedding_dim, mask_zero=True, name='occupation_embedding_layer')(occupation_input_layer)
    zip_embedding_layer = Embedding(3439+1, embedding_dim, mask_zero=True, name='zip_embedding_layer')(zip_input_layer)
    item_id_embedding_layer = Embedding(3706+1, embedding_dim, mask_zero=True, name='item_id_embedding_layer')(item_input_layer)
  

    sparse_embedding_list = [user_id_embedding_layer, gender_embedding_layer, age_embedding_layer, \
                             occupation_embedding_layer, zip_embedding_layer, item_id_embedding_layer]
    


    # 3. AFM
    attention_factor = int(len(sparse_embedding_list) * (len(sparse_embedding_list)-1) / 2)
    afm_out = AFMLayer(attention_factor=attention_factor, l2_reg_w=1e-5, dropout_rate=0, seed=2020)\
        (sparse_embedding_list)
    print("\n"*3)
    print("afm_out: ", afm_out)


    # Output
    output = tf.nn.sigmoid(tf.reduce_sum(afm_out, axis=-1))

    sparse_input_list = [user_id_input_layer, gender_input_layer, age_input_layer, occupation_input_layer, zip_input_layer, item_input_layer]
    model = Model(inputs = sparse_input_list,
                  outputs = output)
    
    
    return model

================================================
FILE: AFM/README.md
================================================
# tf.version == '2.1.0'

# Data format : 

```python
说明:

(1)第1列:user id;
(2)第2列:user gender id;
(3)第3列:user age id;
(4)第4列:user occupation id;
(5)第5列:user zip id;
(6)第6列:item id;
(7)第7列:label;

```


# run model
```shell
sh master.sh

```


# 模型 summary
```python
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to
==================================================================================================
user_id_input_layer (InputLayer [(None, 1)]          0
__________________________________________________________________________________________________
gender_input_layer (InputLayer) [(None, 1)]          0
__________________________________________________________________________________________________
age_input_layer (InputLayer)    [(None, 1)]          0
__________________________________________________________________________________________________
occupation_input_layer (InputLa [(None, 1)]          0
__________________________________________________________________________________________________
zip_input_layer (InputLayer)    [(None, 1)]          0
__________________________________________________________________________________________________
item_input_layer (InputLayer)   [(None, 1)]          0
__________________________________________________________________________________________________
user_id_embedding_layer (Embedd (None, 1, 64)        386624      user_id_input_layer[0][0]
__________________________________________________________________________________________________
gender_embedding_layer (Embeddi (None, 1, 64)        192         gender_input_layer[0][0]
__________________________________________________________________________________________________
age_embedding_layer (Embedding) (None, 1, 64)        512         age_input_layer[0][0]
__________________________________________________________________________________________________
occupation_embedding_layer (Emb (None, 1, 64)        1408        occupation_input_layer[0][0]
__________________________________________________________________________________________________
zip_embedding_layer (Embedding) (None, 1, 64)        220160      zip_input_layer[0][0]
__________________________________________________________________________________________________
item_id_embedding_layer (Embedd (None, 1, 64)        237248      item_input_layer[0][0]
__________________________________________________________________________________________________
afm_layer (AFMLayer)            (None, 1)            1054        user_id_embedding_layer[0][0]
                                                                 gender_embedding_layer[0][0]
                                                                 age_embedding_layer[0][0]
                                                                 occupation_embedding_layer[0][0]
                                                                 zip_embedding_layer[0][0]
                                                                 item_id_embedding_layer[0][0]
__________________________________________________________________________________________________
tf_op_layer_Sum (TensorFlowOpLa [(None,)]            0           afm_layer[0][0]
__________________________________________________________________________________________________
tf_op_layer_Sigmoid (TensorFlow [(None,)]            0           tf_op_layer_Sum[0][0]
==================================================================================================
Total params: 847,198
Trainable params: 847,198
Non-trainable params: 0
__________________________________________________________________________________________________
None
```


# 参考

```python

1. 深度学习推进系统,王喆著

2. https://github.com/shenweichen/DeepCTR/blob/master/deepctr/layers/interaction.py
```

================================================
FILE: AFM/data.py
================================================
#-*- coding:utf-8 -*-

# https://github.com/shenweichen/DeepMatch/blob/master/examples/colab_MovieLen1M_YoutubeDNN.ipynb


#! wget http://files.grouplens.org/datasets/movielens/ml-1m.zip -O ./ml-1m.zip 
#! wget https://raw.githubusercontent.com/shenweichen/DeepMatch/master/examples/preprocess.py -O preprocess.py
#! unzip -o ml-1m.zip 


import pandas as pd
import numpy as np

from preprocess import gen_data_set, gen_model_input
from sklearn.preprocessing import LabelEncoder
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.models import Model



# 1. Load data
unames = ['user_id','gender','age','occupation','zip']
user = pd.read_csv('ml-1m/users.dat',sep='::',header=None,names=unames)

rnames = ['user_id','movie_id','rating','timestamp']
ratings = pd.read_csv('ml-1m/ratings.dat',sep='::',header=None,names=rnames)

mnames = ['movie_id','title','genres']
movies = pd.read_csv('ml-1m/movies.dat',sep='::',header=None,names=mnames)

data = pd.merge(pd.merge(ratings, movies), user)



print(data.shape)
# (1000209, 10)



# 2. Label Encoding for sparse features, 
# and process sequence features with `gen_date_set` and `gen_model_input`

sparse_features = ["movie_id", "user_id", "gender", "age", "occupation", "zip"]
SEQ_LEN = 50
negsample = 0


features = ['user_id', 'movie_id', 'gender', 'age', 'occupation', 'zip']
feature_max_idx = {}

for feature in features:
    lbe = LabelEncoder()
    data[feature] = lbe.fit_transform(data[feature]) + 1
    feature_max_idx[feature] = data[feature].max() + 1

user_profile = data[["user_id", "gender", "age", "occupation", "zip"]].drop_duplicates('user_id')
item_profile = data[["movie_id"]].drop_duplicates('movie_id')

user_profile.set_index("user_id", inplace=True)
user_item_list = data.groupby("user_id")['movie_id'].apply(list)

train_set, test_set = gen_data_set(data, negsample)
train_model_input, train_label = gen_model_input(train_set, user_profile, SEQ_LEN)
test_model_input, test_label = gen_model_input(test_set, user_profile, SEQ_LEN)




# 3. Create neg samples

import random
from tqdm import tqdm

train_neg_sample_list = []
test_neg_sample_list = []
all_movie_list = set(data['movie_id'])
neg_sample_num = 10

for i in tqdm(range(len(train_label))):
    a = set(train_model_input['hist_movie_id'][i] + train_model_input['movie_id'][i])
    neg_list = random.sample(list(all_movie_list - a), neg_sample_num)
    train_neg_sample_list.append(np.array(neg_list))
    
for i in tqdm(range(len(test_label))):
    a = set(test_model_input['hist_movie_id'][i] + test_model_input['movie_id'][i])
    neg_list = random.sample(list(all_movie_list - a), neg_sample_num)
    test_neg_sample_list.append(np.array(neg_list))




# 4. Write to .txt

train = open("train.txt", "w")

for i in range(len(train_label)):
    a = train_model_input["user_id"][i]
    b = train_model_input["gender"][i]
    c = train_model_input["age"][i]
    d = train_model_input["occupation"][i]
    e = train_model_input["zip"][i]
    
    h = train_model_input["movie_id"][i]
    m = train_neg_sample_list[i]
    
    train.write("%s\t%s\t%s\t%s\t%s\t%s\t1\n"\
               %(str(a), str(b), str(c), str(d), str(e), str(h)))
               
    for x_i in m:
        train.write("%s\t%s\t%s\t%s\t%s\t%s\t0\n"\
               %(str(a), str(b), str(c), str(d), str(e), str(x_i)))
    
train.close()



test = open("test.txt", "w")

for i in range(len(test_label)):
    a = test_model_input["user_id"][i]
    b = test_model_input["gender"][i]
    c = test_model_input["age"][i]
    d = test_model_input["occupation"][i]
    e = test_model_input["zip"][i]
    
    h = test_model_input["movie_id"][i]
    m = test_neg_sample_list[i]
    
    test.write("%s\t%s\t%s\t%s\t%s\t%s\t1\n"\
               %(str(a), str(b), str(c), str(d), str(e), str(h)))
               
    for x_i in m:
        test.write("%s\t%s\t%s\t%s\t%s\t%s\t0\n"\
               %(str(a), str(b), str(c), str(d), str(e), str(x_i)))
test.close()


================================================
FILE: AFM/data_generator.py
================================================
#-*- coding:utf-8 -*-

import numpy as np


def init_output():
    user_id = []
    gender = []
    age = []
    occupation = []
    zip = []
    movie_id = []
    label = []


    return user_id, gender, age, occupation, zip, movie_id, label


def file_generator(input_path, batch_size):

    user_id, gender, age, occupation, zip, movie_id, label = init_output()

    cnt = 0
    
    num_lines = sum([1 for line in open(input_path)])

    while True:

        with open(input_path, 'r') as f:
            for line in f.readlines():

                buf = line.strip().split('\t')

                user_id.append(int(buf[0]))
                gender.append(int(buf[1]))
                age.append(int(buf[2]))
                occupation.append(int(buf[3]))
                zip.append(int(buf[4]))
                movie_id.append(int(buf[5]))
                label.append(int(buf[6]))

                cnt += 1

                if cnt % batch_size == 0 or cnt == num_lines:
                    user_id = np.array(user_id, dtype='int32')
                    gender = np.array(gender, dtype='int32')
                    age = np.array(age, dtype='int32')
                    occupation = np.array(occupation, dtype='int32')
                    zip = np.array(zip, dtype='int32')
                    movie_id = np.array(movie_id, dtype='int32')
                    
                    label = np.array(label, dtype='int32')

                    yield [user_id, gender, age, occupation, zip, movie_id], label

                    user_id, gender, age, occupation, zip, movie_id, label = init_output()



================================================
FILE: AFM/master.sh
================================================
python data.py 

python train_AFM_model.py


================================================
FILE: AFM/preprocess.py
================================================
import random
import numpy as np
from tqdm import tqdm
from tensorflow.python.keras.preprocessing.sequence import pad_sequences

def gen_data_set(data, negsample=0):

    data.sort_values("timestamp", inplace=True)
    item_ids = data['movie_id'].unique()

    train_set = []
    test_set = []
    for reviewerID, hist in tqdm(data.groupby('user_id')):
        pos_list = hist['movie_id'].tolist()
        rating_list = hist['rating'].tolist()

        if negsample > 0:
            candidate_set = list(set(item_ids) - set(pos_list))
            neg_list = np.random.choice(candidate_set,size=len(pos_list)*negsample,replace=True)
        for i in range(1, len(pos_list)):
            hist = pos_list[:i]
            if i != len(pos_list) - 1:
                train_set.append((reviewerID, hist[::-1], pos_list[i], 1,len(hist[::-1]),rating_list[i]))
                for negi in range(negsample):
                    train_set.append((reviewerID, hist[::-1], neg_list[i*negsample+negi], 0,len(hist[::-1])))
            else:
                test_set.append((reviewerID, hist[::-1], pos_list[i],1,len(hist[::-1]),rating_list[i]))

    random.shuffle(train_set)
    random.shuffle(test_set)

    print(len(train_set[0]),len(test_set[0]))

    return train_set,test_set

def gen_model_input(train_set,user_profile,seq_max_len):

    train_uid = np.array([line[0] for line in train_set])
    train_seq = [line[1] for line in train_set]
    train_iid = np.array([line[2] for line in train_set])
    train_label = np.array([line[3] for line in train_set])
    train_hist_len = np.array([line[4] for line in train_set])

    train_seq_pad = pad_sequences(train_seq, maxlen=seq_max_len, padding='post', value=0)
    train_model_input = {"user_id": train_uid, "movie_id": train_iid, "hist_movie_id": train_seq_pad,
                         "hist_len": train_hist_len}

    for key in ["gender", "age", "occupation", "zip"]:
        train_model_input[key] = user_profile.loc[train_model_input['user_id']][key].values

    return train_model_input,train_label


================================================
FILE: AFM/train_AFM_model.py
================================================
#-*- coding:utf-8 -*-

import tensorflow as tf
from tensorflow.keras.optimizers import Adam

from data_generator import file_generator
from AFM_Model import AFM




# 1. Load data

train_path = "train.txt"
val_path = "test.txt"
batch_size = 1000

n_train = sum([1 for i in open(train_path)])
n_val = sum([1 for i in open(val_path)])

train_steps = n_train / batch_size
train_steps_ = n_train // batch_size
validation_steps = n_val / batch_size
validation_steps_ = n_val // batch_size


train_generator = file_generator(train_path, batch_size)
val_generator = file_generator(val_path, batch_size)

steps_per_epoch = train_steps_ if train_steps==train_steps_ else train_steps_ + 1
validation_steps = validation_steps_ if validation_steps==validation_steps_ else validation_steps_ + 1

print("n_train: ", n_train)
print("n_val: ", n_val)

print("steps_per_epoch: ", steps_per_epoch)
print("validation_steps: ", validation_steps)




# 2. Train model

early_stopping_cb = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=True)
callbacks = [early_stopping_cb]


model = AFM()
print(model.summary())
tf.keras.utils.plot_model(model, to_file='AFM_model.png', show_shapes=True)


model.compile(loss='binary_crossentropy', \
    optimizer=Adam(lr=1e-3), \
    metrics=['accuracy'])


history = model.fit(train_generator, \
                    epochs=1, \
                    steps_per_epoch = steps_per_epoch, \
                    callbacks = callbacks,
                    validation_data = val_generator, \
                    validation_steps = validation_steps, \
                    shuffle=True
                   )



model.save_weights('AFM_model.h5')


================================================
FILE: BST/README.md
================================================
# tf.version == '2.1.0'

# 运行

+ python bst_model.py 


# 模型 summary

```python
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
user_click_item_seq_input_layer [(None, 50)]         0                                            
__________________________________________________________________________________________________
item_input_layer (InputLayer)   [(None, 1)]          0                                            
__________________________________________________________________________________________________
cate_input_layer (InputLayer)   [(None, 1)]          0                                            
__________________________________________________________________________________________________
user_click_cate_seq_input_layer [(None, 50)]         0                                            
__________________________________________________________________________________________________
tf_op_layer_Equal (TensorFlowOp [(None, 50)]         0           user_click_item_seq_input_layer[0
__________________________________________________________________________________________________
item_id_embedding (Embedding)   multiple             2560512     user_click_item_seq_input_layer[0
                                                                 item_input_layer[0][0]           
__________________________________________________________________________________________________
cate_id_embedding (Embedding)   multiple             109568      user_click_cate_seq_input_layer[0
                                                                 cate_input_layer[0][0]           
__________________________________________________________________________________________________
tf_op_layer_Cast (TensorFlowOpL [(None, 50)]         0           tf_op_layer_Equal[0][0]          
__________________________________________________________________________________________________
tf_op_layer_Equal_1 (TensorFlow [(None, 50)]         0           user_click_item_seq_input_layer[0
__________________________________________________________________________________________________
user_id_input_layer (InputLayer [(None, 1)]          0                                            
__________________________________________________________________________________________________
gender_input_layer (InputLayer) [(None, 1)]          0                                            
__________________________________________________________________________________________________
age_input_layer (InputLayer)    [(None, 1)]          0                                            
__________________________________________________________________________________________________
concatenate_1 (Concatenate)     (None, 50, 1024)     0           item_id_embedding[0][0]          
                                                                 cate_id_embedding[0][0]          
__________________________________________________________________________________________________
tf_op_layer_strided_slice (Tens [(None, 1, 1, 50)]   0           tf_op_layer_Cast[0][0]           
__________________________________________________________________________________________________
tf_op_layer_Cast_1 (TensorFlowO [(None, 50)]         0           tf_op_layer_Equal_1[0][0]        
__________________________________________________________________________________________________
user_id_embedding_layer (Embedd (None, 1, 512)       154112      user_id_input_layer[0][0]        
__________________________________________________________________________________________________
gender_embedding_layer (Embeddi (None, 1, 512)       1536        gender_input_layer[0][0]         
__________________________________________________________________________________________________
age_embedding_layer (Embedding) (None, 1, 512)       5632        age_input_layer[0][0]            
__________________________________________________________________________________________________
concatenate_2 (Concatenate)     (None, 1, 1024)      0           item_id_embedding[1][0]          
                                                                 cate_id_embedding[1][0]          
__________________________________________________________________________________________________
encoder (Encoder)               (None, 50, 1024)     12603392    concatenate_1[0][0]              
                                                                 tf_op_layer_strided_slice[0][0]  
__________________________________________________________________________________________________
tf_op_layer_strided_slice_1 (Te [(None, 1, 50)]      0           tf_op_layer_Cast_1[0][0]         
__________________________________________________________________________________________________
concatenate (Concatenate)       (None, 1, 1536)      0           user_id_embedding_layer[0][0]    
                                                                 gender_embedding_layer[0][0]     
                                                                 age_embedding_layer[0][0]        
__________________________________________________________________________________________________
din_attention_layer (DinAttenti (None, 1, 1024)      5244928     concatenate_2[0][0]              
                                                                 encoder[0][0]                    
                                                                 encoder[0][0]                    
                                                                 tf_op_layer_strided_slice_1[0][0]
__________________________________________________________________________________________________
concatenate_3 (Concatenate)     (None, 1, 2560)      0           concatenate[0][0]                
                                                                 din_attention_layer[0][0]        
__________________________________________________________________________________________________
tf_op_layer_Squeeze (TensorFlow [(None, 2560)]       0           concatenate_3[0][0]              
__________________________________________________________________________________________________
FC_1 (Dense)                    (None, 512)          1311232     tf_op_layer_Squeeze[0][0]        
__________________________________________________________________________________________________
dropout_1 (Dropout)             (None, 512)          0           FC_1[0][0]                       
__________________________________________________________________________________________________
FC_2 (Dense)                    (None, 128)          65664       dropout_1[0][0]                  
__________________________________________________________________________________________________
dropout_2 (Dropout)             (None, 128)          0           FC_2[0][0]                       
__________________________________________________________________________________________________
FC_3 (Dense)                    (None, 32)           4128        dropout_2[0][0]                  
__________________________________________________________________________________________________
dropout_3 (Dropout)             (None, 32)           0           FC_3[0][0]                       
__________________________________________________________________________________________________
Sigmoid_output_layer (Dense)    (None, 1)            33          dropout_3[0][0]                  
==================================================================================================
Total params: 22,060,737
Trainable params: 22,060,737
Non-trainable params: 0


```


# 参考

```python

1. Behavior sequence transformer for e-commerce recommendation in Alibaba

2. https://zhuanlan.zhihu.com/p/161311198

3. https://github.com/czy36mengfei/tensorflow2_tutorials_chinese/tree/master/026-Transformer

4. https://github.com/shenweichen/DeepCTR/blob/master/deepctr/layers/sequence.py

```

================================================
FILE: BST/bst_model.py
================================================
import tensorflow as tf
from tensorflow.keras.layers import Input, Embedding, concatenate, Flatten, Dense, Dropout

from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.utils import plot_model

from transformer import Encoder, padding_mask
from din import DinAttentionLayer, din_padding_mask



def bst_model(sparse_input_length = 1, \
    max_seq_length = 50, \
    vocab_size_dict = None, \
    embedding_dim = 512, \
    dnn_unit_list = [512, 128, 32], \
    activation = 'relu', \
    dropout_rate = 0.2, \
    n_layers = 2, \
    num_heads = 8, \
    middle_units = 1024, \
    training = False
    ):
    
    
    # 1. Input layer
    
    # 1.1 user 
    user_id_input_layer = Input(shape=(sparse_input_length, ), name="user_id_input_layer")
    gender_input_layer = Input(shape=(sparse_input_length, ), name="gender_input_layer")
    age_input_layer = Input(shape=(sparse_input_length, ), name="age_input_layer")
    
    
    user_click_item_seq_input_layer = Input(shape=(max_seq_length, ), name="user_click_item_seq_input_layer")
    user_click_cate_seq_input_layer = Input(shape=(max_seq_length, ), name="user_click_cate_seq_input_layer")
    
    
    # 1.2 item
    item_input_layer = Input(shape=(sparse_input_length, ), name="item_input_layer")
    cate_input_layer = Input(shape=(sparse_input_length, ), name="cate_input_layer")
    
    
    
    # 2. Embedding layer
    
    # 2.1 user
    user_id_embedding_layer = Embedding(vocab_size_dict["user_id"]+1, embedding_dim, \
                                        mask_zero=True, name='user_id_embedding_layer')(user_id_input_layer)
    gender_embedding_layer = Embedding(vocab_size_dict["gender"]+1, embedding_dim, \
                                       mask_zero=True, name='gender_embedding_layer')(gender_input_layer)
    age_embedding_layer = Embedding(vocab_size_dict["age"]+1, embedding_dim, \
                                    mask_zero=True, name='age_embedding_layer')(age_input_layer)
    
    
    item_id_embedding = Embedding(vocab_size_dict["item_id"]+1, embedding_dim, \
                                mask_zero=True, name='item_id_embedding')
    cate_id_embedding = Embedding(vocab_size_dict["cate_id"]+1, embedding_dim, \
                                mask_zero=True, name='cate_id_embedding')
    
    user_click_item_seq_embedding_layer = item_id_embedding(user_click_item_seq_input_layer)
    user_click_cate_seq_embedding_layer = cate_id_embedding(user_click_cate_seq_input_layer)
    
    
    # 2.2 item 
    target_item_embedding_layer = item_id_embedding(item_input_layer)
    target_cate_embedding_layer = cate_id_embedding(cate_input_layer)
    

    
    # 3. Concat layer
    
    # 3.1 user: other features
    other_features_concat_layer = concatenate([user_id_embedding_layer, gender_embedding_layer, \
                                               age_embedding_layer], axis=-1)
    
    
    # 3.1 user: sequence features
    input_transformer_layer = concatenate([user_click_item_seq_embedding_layer, \
                                           user_click_cate_seq_embedding_layer], axis=-1)
    
    
    # 3.2 item
    input_din_layer_query = concatenate([target_item_embedding_layer, \
                                         target_cate_embedding_layer], axis=-1)

    
    # 4. Transformer layer

    d_model = input_transformer_layer.shape[-1]
    padding_mask_list = padding_mask(user_click_item_seq_input_layer)
    #print("padding_mask_list.shape: ", padding_mask_list.shape)
    
    output_tranformer_layer = Encoder(n_layers, d_model, num_heads, 
                                middle_units, max_seq_length, training)([input_transformer_layer, padding_mask_list])

    #print("output_tranformer_layer.shape: ", output_tranformer_layer.shape)

    
    
    # 5. Din attention layer
    
    query = input_din_layer_query
    keys = output_tranformer_layer
    vecs = output_tranformer_layer
    
    din_padding_mask_list = din_padding_mask(user_click_item_seq_input_layer)
    #print("din_padding_mask_list.shape: ", din_padding_mask_list.shape)

    output_din_layer = DinAttentionLayer(d_model, middle_units, dropout_rate)([query, keys, vecs, din_padding_mask_list])
    #print("output_din_layer.shape: ", output_din_layer.shape)
    
    
    
    # 6. DNN layer
    input_dnn_layer = concatenate([other_features_concat_layer, output_din_layer], \
                                 axis=-1)
    
    input_dnn_layer = tf.squeeze(input=input_dnn_layer, axis=[1])
    
    
    for inx in range(len(dnn_unit_list)):
        input_dnn_layer = Dense(dnn_unit_list[inx], activation=activation, \
                                name="FC_{0}".format(inx+1))(input_dnn_layer)
        
        input_dnn_layer = Dropout(dropout_rate, name="dropout_{0}".format(inx+1))(input_dnn_layer)
        
    
    output = Dense(1, activation='sigmoid', \
                   name='Sigmoid_output_layer')(input_dnn_layer)
    
    
    
    # Output model
    
    inputs_list = [user_id_input_layer, gender_input_layer, age_input_layer, \
                   user_click_item_seq_input_layer, user_click_cate_seq_input_layer, \
                   item_input_layer, cate_input_layer]
    
    model = Model(inputs = inputs_list, outputs = output)
    
    
    return model
    
    
    
if __name__ == "__main__":
    vocab_size_dict = {
    "user_id": 300,
    "gender": 2,
    "age": 10,
    "item_id": 5000,
    "cate_id": 213}

    bst_model = bst_model(vocab_size_dict=vocab_size_dict)

    print(bst_model.summary())
    
    plot_model(bst_model, to_file='bst_model.png')



================================================
FILE: BST/din.py
================================================
import numpy as np
import tensorflow as tf
from tensorflow.keras import backend as K



def din_padding_mask(seq):
    # 获取为 0的padding项
    seq = tf.cast(tf.math.equal(seq, 0), tf.float32)

    # 扩充维度用于attention矩阵
    return seq[:, np.newaxis, :] # (batch_size, 1, seq_len)



class LocalActivationUnit(tf.keras.layers.Layer):

    def __init__(self, d_model, middle_units, dropout_rate, **kwargs):
        self.d_model = d_model
        self.middle_units = middle_units
        self.dropout_rate = dropout_rate

        super(LocalActivationUnit, self).__init__(**kwargs)


    def build(self, input_shape):

        self.dnn = tf.keras.Sequential([
            tf.keras.layers.Dense(self.middle_units, activation='relu'),
            tf.keras.layers.Dense(self.d_model, activation='relu')
            ])

        super(LocalActivationUnit, self).build(input_shape)



    def call(self, inputs, training=None, **kwargs):

        query, keys = inputs
        keys_len = keys.get_shape()[1]
        queries = K.repeat_elements(query, keys_len, 1)

        att_input = tf.concat([queries, keys, queries - keys, queries * keys], axis=-1)
        att_out = self.dnn(att_input)
        attention_score = tf.keras.layers.Dense(1)(att_out)

        return attention_score


    def compute_output_shape(self, input_shape):
        return input_shape[1][:2] + (1,)


    def get_config(self):
        config = {'d_model': self.d_model, 'middle_units': self.middle_units, 'dropout_rate': self.dropout_rate}
        base_config = super(LocalActivationUnit, self).get_config()
        
        return dict(list(base_config.items()) + list(config.items()))



# 构造 Din Attention Layer 层

class DinAttentionLayer(tf.keras.layers.Layer):
    def __init__(self, d_model, middle_units, dropout_rate, **kwargs):
        super(DinAttentionLayer, self).__init__(**kwargs)

        self.d_model = d_model
        self.middle_units = middle_units
        self.dropout_rate = dropout_rate
        
        self.local_activation_unit = LocalActivationUnit(d_model, middle_units, dropout_rate)
    
    
    def call(self, inputs, **kwargs):
        query, keys, values, mask = inputs
      
        scaled_attention_logits = self.local_activation_unit([query, keys])
        scaled_attention_logits = tf.transpose(scaled_attention_logits, perm=[0, 2, 1])
        
        if mask is not None:
            scaled_attention_logits += (mask * -1e9)

        attention_weights = tf.nn.softmax(scaled_attention_logits, axis=-1)        
        output = tf.matmul(attention_weights, values)
        
        return output
    
    
    def compute_output_shape(self, input_shape):
        return (input_shape[1][0], 1, input_shape[1][2])


    def get_config(self):
        config = {'d_model': self.d_model, 'use_bias': self.middle_units, 'dropout_rate': self.dropout_rate}
        base_config = super(DinAttentionLayer, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))



if __name__ == "__main__":
    query = tf.random.uniform((10, 1, 64))
    keys = tf.random.uniform((10, 50, 64))
    vecs = keys
    
    din_padding_mask_list = din_padding_mask(np.random.randint(0, 15, size=(10, 50)))
    print("din_padding_mask_list.shape: ", din_padding_mask_list.shape)

    output = DinAttentionLayer(32, 64, 0.1)([query, keys, vecs, din_padding_mask_list])
    print("output.shape: ", output.shape)


================================================
FILE: BST/transformer.py
================================================
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import backend as K


# "Attention is all you need" 中的编码模块

class Encoder(tf.keras.layers.Layer):
    def __init__(self, n_layers, d_model, num_heads, middle_units,
                max_seq_len, epsilon=1e-6, dropout_rate=0.1, training=False, **kwargs):
        super(Encoder, self).__init__(**kwargs)

        self.n_layers = n_layers
        self.d_model = d_model
        self.pos_embedding = PositionalEncoding(sequence_len=max_seq_len, embedding_dim=d_model)

        self.encode_layer = [EncoderLayer(d_model=d_model, num_heads=num_heads, 
                                          middle_units=middle_units, 
                                          epsilon=epsilon, dropout_rate=dropout_rate,
                                          training = training)
                            for _ in range(n_layers)]
        
    def call(self, inputs, **kwargs):
        emb, mask = inputs
        emb = self.pos_embedding(emb)
        
        for i in range(self.n_layers):
            emb = self.encode_layer[i](emb, mask)

        return emb




# 编码层
class EncoderLayer(tf.keras.layers.Layer):
    def __init__(self, d_model, num_heads, middle_units, \
                 epsilon=1e-6, dropout_rate=0.1, training=False, **kwargs):
        super(EncoderLayer, self).__init__(**kwargs)
        
        self.mha = MultiHeadAttention(d_model, num_heads)
        self.ffn = point_wise_feed_forward_network(d_model, middle_units)
        
        self.layernorm1 = LayerNormalization()
        self.layernorm2 = LayerNormalization()
        
        self.dropout1 = tf.keras.layers.Dropout(dropout_rate)
        self.dropout2 = tf.keras.layers.Dropout(dropout_rate)
        
        self.training = training
        
    def call(self, inputs, mask, **kwargs):

        # 多头注意力网络
        att_output = self.mha([inputs, inputs, inputs, mask])
        att_output = self.dropout1(att_output, training=self.training)
        out1 = self.layernorm1(inputs + att_output)  # (batch_size, input_seq_len, d_model)
        
        # 前向网络
        ffn_output = self.ffn(out1)
        ffn_output = self.dropout2(ffn_output, training=self.training)
        out2 = self.layernorm2(out1 + ffn_output)   # (batch_size, input_seq_len, d_model)
        
        return out2



# 层标准化
class LayerNormalization(tf.keras.layers.Layer):
    def __init__(self, epsilon=1e-6, **kwargs):
        self.eps = epsilon
        super(LayerNormalization, self).__init__(**kwargs)
        
    def build(self, input_shape):
        self.gamma = self.add_weight(name='gamma', shape=input_shape[-1:],
                                     initializer=tf.ones_initializer(), trainable=True)
        self.beta = self.add_weight(name='beta', shape=input_shape[-1:],
                                    initializer=tf.zeros_initializer(), trainable=True)
        super(LayerNormalization, self).build(input_shape)
        
    def call(self, x):
        mean = tf.keras.backend.mean(x, axis=-1, keepdims=True)
        std = tf.keras.backend.std(x, axis=-1, keepdims=True)
        return self.gamma * (x - mean) / (std + self.eps) + self.beta
    
    def compute_output_shape(self, input_shape):
        return input_shape




# 前向网络
def point_wise_feed_forward_network(d_model, middle_units):
    
    return tf.keras.Sequential([
        tf.keras.layers.Dense(middle_units, activation='relu'),
        tf.keras.layers.Dense(d_model, activation='relu')])



# dot attention
def scaled_dot_product_attention(q, k, v, mask):
    
    matmul_qk = tf.matmul(q, k, transpose_b=True)
    dim_k = tf.cast(tf.shape(k)[-1], tf.float32)
    scaled_attention_logits = matmul_qk / tf.math.sqrt(dim_k)
    
    if mask is not None:
        scaled_attention_logits += (mask * -1e9)

    attention_weights = tf.nn.softmax(scaled_attention_logits, axis=-1)
    output = tf.matmul(attention_weights, v)

    return output



# 构造 multi head attention 层
class MultiHeadAttention(tf.keras.layers.Layer):
    def __init__(self, d_model, num_heads, **kwargs):
        super(MultiHeadAttention, self).__init__(**kwargs)
        self.num_heads = num_heads
        self.d_model = d_model

        # d_model 必须可以正确分为各个头
        assert d_model % num_heads == 0
        
        # 分头后的维度
        self.depth = d_model // num_heads

        self.wq = tf.keras.layers.Dense(d_model)
        self.wk = tf.keras.layers.Dense(d_model)
        self.wv = tf.keras.layers.Dense(d_model)

        self.dense = tf.keras.layers.Dense(d_model)
        
        self.dot_attention = scaled_dot_product_attention

    def split_heads(self, x, batch_size):
        # 分头, 将头个数的维度 放到 seq_len 前面
        x = tf.reshape(x, (batch_size, -1, self.num_heads, self.depth))
        return tf.transpose(x, perm=[0, 2, 1, 3])

    def call(self, inputs, **kwargs):
        q, k, v, mask = inputs
        batch_size = tf.shape(q)[0]

        # 分头前的前向网络,获取q、k、v语义
        q = self.wq(q) # (batch_size, seq_len, d_model)
        k = self.wk(k)
        v = self.wv(v)

        # 分头
        q = self.split_heads(q, batch_size) # (batch_size, num_heads, seq_len_q, depth)
        k = self.split_heads(k, batch_size) # (batch_size, num_heads, seq_len_k, depth)
        v = self.split_heads(v, batch_size) # (batch_size, num_heads, seq_len_v, depth)
        
        # 通过缩放点积注意力层
        scaled_attention = self.dot_attention(q, k, v, mask) # (batch_size, num_heads, seq_len_q, depth)
        
        # “多头维度” 后移
        scaled_attention = tf.transpose(scaled_attention, [0, 2, 1, 3]) # (batch_size, seq_len_q, num_heads, depth)

        # 合并 “多头维度”
        concat_attention = tf.reshape(scaled_attention, (batch_size, -1, self.d_model))

        # 全连接层
        output = self.dense(concat_attention)
        
        return output




# mask功能
def padding_mask(seq):
    # 获取为 0的padding项
    seq = tf.cast(tf.math.equal(seq, 0), tf.float32)

    # 扩充维度用于attention矩阵
    return seq[:, np.newaxis, np.newaxis, :] # (batch_size, 1, 1, seq_len)




# 位置编码
class PositionalEncoding(tf.keras.layers.Layer):
    def __init__(self, sequence_len=None, embedding_dim=None, **kwargs):
        self.sequence_len = sequence_len
        self.embedding_dim = embedding_dim
        super(PositionalEncoding, self).__init__(**kwargs)

    def call(self, inputs):
        if self.embedding_dim == None:
            self.embedding_dim = int(inputs.shape[-1])
        
        position_embedding = np.array([
            [pos / np.power(10000, 2. * i / self.embedding_dim) for i in range(self.embedding_dim)]
            for pos in range(self.sequence_len)])

        position_embedding[:, 0::2] = np.sin(position_embedding[:, 0::2])  # dim 2i
        position_embedding[:, 1::2] = np.cos(position_embedding[:, 1::2])  # dim 2i+1
        
        position_embedding = tf.cast(position_embedding, dtype=tf.float32)
        
        return position_embedding + inputs

        
    def compute_output_shape(self, input_shape):
        return input_shape




if __name__ == "__main__":

    n_layers = 2
    d_model = 512
    num_heads = 8
    middle_units = 1024
    max_seq_len = 60
    
    samples = 10
    training = False

    encode_padding_mask_list = padding_mask(np.random.randint(0, 108, size=(samples, max_seq_len)))
    input_data = tf.random.uniform((samples, max_seq_len, d_model))

    sample_encoder = Encoder(n_layers, d_model, num_heads, middle_units, max_seq_len, training)
    sample_encoder_output = sample_encoder([input_data, encode_padding_mask_list])

    print(sample_encoder_output.shape)

================================================
FILE: BilinearFFM/BilinearFFM.py
================================================
#-*- coding:utf-8 -*-


import tensorflow as tf
from tensorflow.keras.layers import Input, Embedding, concatenate, Dense, Dropout

from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping
from BilinearInteraction import BilinearInteraction


def BilinearFFM(
    sparse_input_length=1,
    embedding_dim = 64
    ):

    # 1. Input layer
    user_id_input_layer = Input(shape=(sparse_input_length, ), name="user_id_input_layer")
    gender_input_layer = Input(shape=(sparse_input_length, ), name="gender_input_layer")
    age_input_layer = Input(shape=(sparse_input_length, ), name="age_input_layer")
    occupation_input_layer = Input(shape=(sparse_input_length, ), name="occupation_input_layer")
    zip_input_layer = Input(shape=(sparse_input_length, ), name="zip_input_layer")
    item_input_layer = Input(shape=(sparse_input_length, ), name="item_input_layer")

    
    # 2. Embedding layer
    user_id_embedding_layer = Embedding(6040+1, embedding_dim, mask_zero=True, name='user_id_embedding_layer')(user_id_input_layer)
    gender_embedding_layer = Embedding(2+1, embedding_dim, mask_zero=True, name='gender_embedding_layer')(gender_input_layer)
    age_embedding_layer = Embedding(7+1, embedding_dim, mask_zero=True, name='age_embedding_layer')(age_input_layer)
    occupation_embedding_layer = Embedding(21+1, embedding_dim, mask_zero=True, name='occupation_embedding_layer')(occupation_input_layer)
    zip_embedding_layer = Embedding(3439+1, embedding_dim, mask_zero=True, name='zip_embedding_layer')(zip_input_layer)
    item_id_embedding_layer = Embedding(3706+1, embedding_dim, mask_zero=True, name='item_id_embedding_layer')(item_input_layer)
  

    sparse_embedding_list = [user_id_embedding_layer, gender_embedding_layer, age_embedding_layer, \
                             occupation_embedding_layer, zip_embedding_layer, item_id_embedding_layer]
    


    # 3. Bilinear FFM
    bilinear_out = BilinearInteraction()(sparse_embedding_list)


    # Output
    dot_output = tf.nn.sigmoid(tf.reduce_sum(bilinear_out, axis=-1))

    sparse_input_list = [user_id_input_layer, gender_input_layer, age_input_layer, occupation_input_layer, zip_input_layer, item_input_layer]
    model = Model(inputs = sparse_input_list,
                  outputs = dot_output)
    
    

    return model

================================================
FILE: BilinearFFM/BilinearInteraction.py
================================================
import tensorflow as tf
from tensorflow.keras.layers import Layer
from tensorflow.keras import backend as K
import itertools
from tensorflow.keras.initializers import (Zeros, glorot_normal, glorot_uniform)
from tensorflow.keras.layers import Concatenate


class BilinearInteraction(Layer):
    """
      Input shape
        - A list of 3D tensor with shape: ``(batch_size,1,embedding_size)``.

      Output shape
        - 3D tensor with shape: ``(batch_size,1,embedding_size)``.

      Arguments
        - **str** : String, types of bilinear functions used in this layer.

        - **seed** : A Python integer to use as random seed.
    """

    def __init__(self, bilinear_type="each", seed=1024, **kwargs):
        self.bilinear_type = bilinear_type
        self.seed = seed

        super(BilinearInteraction, self).__init__(**kwargs)

    def build(self, input_shape):

        if not isinstance(input_shape, list) or len(input_shape) < 2:
            raise ValueError('A `AttentionalFM` layer should be called on a list of at least 2 inputs')
        embedding_size = int(input_shape[0][-1])

        if self.bilinear_type == "all":
            self.W = self.add_weight(shape=(embedding_size, embedding_size), initializer=glorot_normal(
                seed=self.seed), name="bilinear_weight")
        elif self.bilinear_type == "each":
            self.W_list = [self.add_weight(shape=(embedding_size, embedding_size), initializer=glorot_normal(
                seed=self.seed), name="bilinear_weight" + str(i)) for i in range(len(input_shape) - 1)]
        elif self.bilinear_type == "interaction":
            self.W_list = [self.add_weight(shape=(embedding_size, embedding_size), initializer=glorot_normal(
                seed=self.seed), name="bilinear_weight" + str(i) + '_' + str(j)) for i, j in
                           itertools.combinations(range(len(input_shape)), 2)]
        else:
            raise NotImplementedError

        super(BilinearInteraction, self).build(
            input_shape)  # Be sure to call this somewhere!

    def call(self, inputs, **kwargs):

        if K.ndim(inputs[0]) != 3:
            raise ValueError(
                "Unexpected inputs dimensions %d, expect to be 2 dimensions" % (K.ndim(inputs)))

        if self.bilinear_type == "all":
            p = [tf.multiply(tf.tensordot(v_i, self.W, axes=(-1, 0)), v_j)
                 for v_i, v_j in itertools.combinations(inputs, 2)]
        elif self.bilinear_type == "each":
            p = [tf.multiply(tf.tensordot(inputs[i], self.W_list[i], axes=(-1, 0)), inputs[j])
                 for i, j in itertools.combinations(range(len(inputs)), 2)]
        elif self.bilinear_type == "interaction":
            p = [tf.multiply(tf.tensordot(v[0], w, axes=(-1, 0)), v[1])
                 for v, w in zip(itertools.combinations(inputs, 2), self.W_list)]
        else:
            raise NotImplementedError

        return Concatenate(axis=-1)(p)
        

    def compute_output_shape(self, input_shape):
        filed_size = len(input_shape)
        embedding_size = input_shape[0][-1]

        return (None, 1, filed_size * (filed_size - 1) // 2 * embedding_size)

    def get_config(self, ):
        config = {'bilinear_type': self.bilinear_type, 'seed': self.seed}
        base_config = super(BilinearInteraction, self).get_config()
        return dict(list(base_config.items()) + list(config.items()))


================================================
FILE: BilinearFFM/README.md
================================================
# tf.version == '2.1.0'

# Data format : 

```python
说明:

(1)第1列:user id;
(2)第2列:user gender id;
(3)第3列:user age id;
(4)第4列:user occupation id;
(5)第5列:user zip id;
(6)第6列:item id;
(7)第7列:label;

```


# run model
```shell
sh master.sh

```


# 模型 summary
```python
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to
==================================================================================================
user_id_input_layer (InputLayer [(None, 1)]          0
__________________________________________________________________________________________________
gender_input_layer (InputLayer) [(None, 1)]          0
__________________________________________________________________________________________________
age_input_layer (InputLayer)    [(None, 1)]          0
__________________________________________________________________________________________________
occupation_input_layer (InputLa [(None, 1)]          0
__________________________________________________________________________________________________
zip_input_layer (InputLayer)    [(None, 1)]          0
__________________________________________________________________________________________________
item_input_layer (InputLayer)   [(None, 1)]          0
__________________________________________________________________________________________________
user_id_embedding_layer (Embedd (None, 1, 64)        386624      user_id_input_layer[0][0]
__________________________________________________________________________________________________
gender_embedding_layer (Embeddi (None, 1, 64)        192         gender_input_layer[0][0]
__________________________________________________________________________________________________
age_embedding_layer (Embedding) (None, 1, 64)        512         age_input_layer[0][0]
__________________________________________________________________________________________________
occupation_embedding_layer (Emb (None, 1, 64)        1408        occupation_input_layer[0][0]
__________________________________________________________________________________________________
zip_embedding_layer (Embedding) (None, 1, 64)        220160      zip_input_layer[0][0]
__________________________________________________________________________________________________
item_id_embedding_layer (Embedd (None, 1, 64)        237248      item_input_layer[0][0]
__________________________________________________________________________________________________
bilinear_interaction (BilinearI (None, 1, 960)       20480       user_id_embedding_layer[0][0]
                                                                 gender_embedding_layer[0][0]
                                                                 age_embedding_layer[0][0]
                                                                 occupation_embedding_layer[0][0]
                                                                 zip_embedding_layer[0][0]
                                                                 item_id_embedding_layer[0][0]
__________________________________________________________________________________________________
tf_op_layer_Sum (TensorFlowOpLa [(None, 1)]          0           bilinear_interaction[0][0]
__________________________________________________________________________________________________
tf_op_layer_Sigmoid (TensorFlow [(None, 1)]          0           tf_op_layer_Sum[0][0]
==================================================================================================
Total params: 866,624
Trainable params: 866,624
Non-trainable params: 0
__________________________________________________________________________________________________
None

```


# 参考

```python

1. [FFM及DeepFFM模型在推荐系统的探索](https://zhuanlan.zhihu.com/p/67795161)

2. https://github.com/shenweichen/DeepCTR/blob/master/deepctr/layers/interaction.py
```

================================================
FILE: BilinearFFM/data.py
================================================
#-*- coding:utf-8 -*-

# https://github.com/shenweichen/DeepMatch/blob/master/examples/colab_MovieLen1M_YoutubeDNN.ipynb


#! wget http://files.grouplens.org/datasets/movielens/ml-1m.zip -O ./ml-1m.zip 
#! wget https://raw.githubusercontent.com/shenweichen/DeepMatch/master/examples/preprocess.py -O preprocess.py
#! unzip -o ml-1m.zip 


import pandas as pd
import numpy as np

from preprocess import gen_data_set, gen_model_input
from sklearn.preprocessing import LabelEncoder
from tensorflow.python.keras import backend as K
from tensorflow.python.keras.models import Model



# 1. Load data
unames = ['user_id','gender','age','occupation','zip']
user = pd.read_csv('ml-1m/users.dat',sep='::',header=None,names=unames)

rnames = ['user_id','movie_id','rating','timestamp']
ratings = pd.read_csv('ml-1m/ratings.dat',sep='::',header=None,names=rnames)

mnames = ['movie_id','title','genres']
movies = pd.read_csv('ml-1m/movies.dat',sep='::',header=None,names=mnames)

data = pd.merge(pd.merge(ratings, movies), user)



print(data.shape)
# (1000209, 10)



# 2. Label Encoding for sparse features, 
# and process sequence features with `gen_date_set` and `gen_model_input`

sparse_features = ["movie_id", "user_id", "gender", "age", "occupation", "zip"]
SEQ_LEN = 50
negsample = 0


features = ['user_id', 'movie_id', 'gender', 'age', 'occupation', 'zip']
feature_max_idx = {}

for feature in features:
    lbe = LabelEncoder()
    data[feature] = lbe.fit_transform(data[feature]) + 1
    feature_max_idx[feature] = data[feature].max() + 1

user_profile = data[["user_id", "gender", "age", "occupation", "zip"]].drop_duplicates('user_id')
item_profile = data[["movie_id"]].drop_duplicates('movie_id')

user_profile.set_index("user_id", inplace=True)
user_item_list = data.groupby("user_id")['movie_id'].apply(list)

train_set, test_set = gen_data_set(data, negsample)
train_model_input, train_label = gen_model_input(train_set, user_profile, SEQ_LEN)
test_model_input, test_label = gen_model_input(test_set, user_profile, SEQ_LEN)




# 3. Create neg samples

import random
from tqdm import tqdm

train_neg_sample_list = []
test_neg_sample_list = []
all_movie_list = set(data['movie_id'])
neg_sample_num = 10

for i in tqdm(range(len(train_label))):
    a = set(train_model_input['hist_movie_id'][i] + train_model_input['movie_id'][i])
    neg_list = random.sample(list(all_movie_list - a), neg_sample_num)
    train_neg_sample_list.append(np.array(neg_list))
    
for i in tqdm(range(len(test_label))):
    a = set(test_model_input['hist_movie_id'][i] + test_model_input['movie_id'][i])
    neg_list = random.sample(list(all_movie_list - a), neg_sample_num)
    test_neg_sample_list.append(np.array(neg_list))




# 4. Write to .txt

train = open("train.txt", "w")

for i in range(len(train_label)):
    a = train_model_input["user_id"][i]
    b = train_model_input["gender"][i]
    c = train_model_input["age"][i]
    d = train_model_input["occupation"][i]
    e = train_model_input["zip"][i]
    
    h = train_model_input["movie_id"][i]
    m = train_neg_sample_list[i]
    
    train.write("%s\t%s\t%s\t%s\t%s\t%s\t1\n"\
               %(str(a), str(b), str(c), str(d), str(e), str(h)))
               
    for x_i in m:
        train.write("%s\t%s\t%s\t%s\t%s\t%s\t0\n"\
               %(str(a), str(b), str(c), str(d), str(e), str(x_i)))
    
train.close()



test = open("test.txt", "w")

for i in range(len(test_label)):
    a = test_model_input["user_id"][i]
    b = test_model_input["gender"][i]
    c = test_model_input["age"][i]
    d = test_model_input["occupation"][i]
    e = test_model_input["zip"][i]
    
    h = test_model_input["movie_id"][i]
    m = test_neg_sample_list[i]
    
    test.write("%s\t%s\t%s\t%s\t%s\t%s\t1\n"\
               %(str(a), str(b), str(c), str(d), str(e), str(h)))
               
    for x_i in m:
        test.write("%s\t%s\t%s\t%s\t%s\t%s\t0\n"\
               %(str(a), str(b), str(c), str(d), str(e), str(x_i)))
test.close()


================================================
FILE: BilinearFFM/data_generator.py
================================================
#-*- coding:utf-8 -*-

import numpy as np


def init_output():
    user_id = []
    gender = []
    age = []
    occupation = []
    zip = []
    movie_id = []
    label = []


    return user_id, gender, age, occupation, zip, movie_id, label


def file_generator(input_path, batch_size):

    user_id, gender, age, occupation, zip, movie_id, label = init_output()

    cnt = 0
    
    num_lines = sum([1 for line in open(input_path)])

    while True:

        with open(input_path, 'r') as f:
            for line in f.readlines():

                buf = line.strip().split('\t')

                user_id.append(int(buf[0]))
                gender.append(int(buf[1]))
                age.append(int(buf[2]))
                occupation.append(int(buf[3]))
                zip.append(int(buf[4]))
                movie_id.append(int(buf[5]))
                label.append(int(buf[6]))

                cnt += 1

                if cnt % batch_size == 0 or cnt == num_lines:
                    user_id = np.array(user_id, dtype='int32')
                    gender = np.array(gender, dtype='int32')
                    age = np.array(age, dtype='int32')
                    occupation = np.array(occupation, dtype='int32')
                    zip = np.array(zip, dtype='int32')
                    movie_id = np.array(movie_id, dtype='int32')
                    
                    label = np.array(label, dtype='int32')

                    yield [user_id, gender, age, occupation, zip, movie_id], label

                    user_id, gender, age, occupation, zip, movie_id, label = init_output()



================================================
FILE: BilinearFFM/master.sh
================================================
python data.py 

python train_BilinearFFM_model.py


================================================
FILE: BilinearFFM/preprocess.py
================================================
import random
import numpy as np
from tqdm import tqdm
from tensorflow.python.keras.preprocessing.sequence import pad_sequences

def gen_data_set(data, negsample=0):

    data.sort_values("timestamp", inplace=True)
    item_ids = data['movie_id'].unique()

    train_set = []
    test_set = []
    for reviewerID, hist in tqdm(data.groupby('user_id')):
        pos_list = hist['movie_id'].tolist()
        rating_list = hist['rating'].tolist()

        if negsample > 0:
            candidate_set = list(set(item_ids) - set(pos_list))
            neg_list = np.random.choice(candidate_set,size=len(pos_list)*negsample,replace=True)
        for i in range(1, len(pos_list)):
            hist = pos_list[:i]
            if i != len(pos_list) - 1:
                train_set.append((reviewerID, hist[::-1], pos_list[i], 1,len(hist[::-1]),rating_list[i]))
                for negi in range(negsample):
                    train_set.append((reviewerID, hist[::-1], neg_list[i*negsample+negi], 0,len(hist[::-1])))
            else:
                test_set.append((reviewerID, hist[::-1], pos_list[i],1,len(hist[::-1]),rating_list[i]))

    random.shuffle(train_set)
    random.shuffle(test_set)

    print(len(train_set[0]),len(test_set[0]))

    return train_set,test_set

def gen_model_input(train_set,user_profile,seq_max_len):

    train_uid = np.array([line[0] for line in train_set])
    train_seq = [line[1] for line in train_set]
    train_iid = np.array([line[2] for line in train_set])
    train_label = np.array([line[3] for line in train_set])
    train_hist_len = np.array([line[4] for line in train_set])

    train_seq_pad = pad_sequences(train_seq, maxlen=seq_max_len, padding='post', value=0)
    train_model_input = {"user_id": train_uid, "movie_id": train_iid, "hist_movie_id": train_seq_pad,
                         "hist_len": train_hist_len}

    for key in ["gender", "age", "occupation", "zip"]:
        train_model_input[key] = user_profile.loc[train_model_input['user_id']][key].values

    return train_model_input,train_label


================================================
FILE: BilinearFFM/train_BilinearFFM_model.py
================================================
#-*- coding:utf-8 -*-

import tensorflow as tf
from tensorflow.keras.optimizers import Adam

from data_generator import file_generator
from BilinearFFM import BilinearFFM




# 1. Load data

train_path = "train.txt"
val_path = "test.txt"
batch_size = 1000

n_train = sum([1 for i in open(train_path)])
n_val = sum([1 for i in open(val_path)])

train_steps = n_train / batch_size
train_steps_ = n_train // batch_size
validation_steps = n_val / batch_size
validation_steps_ = n_val // batch_size


train_generator = file_generator(train_path, batch_size)
val_generator = file_generator(val_path, batch_size)

steps_per_epoch = train_steps_ if train_steps==train_steps_ else train_steps_ + 1
validation_steps = validation_steps_ if validation_steps==validation_steps_ else validation_steps_ + 1

print("n_train: ", n_train)
print("n_val: ", n_val)

print("steps_per_epoch: ", steps_per_epoch)
print("validation_steps: ", validation_steps)




# 2. Train model

early_stopping_cb = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=True)
callbacks = [early_stopping_cb]


model = BilinearFFM()
print(model.summary())
tf.keras.utils.plot_model(model, to_file='BilinearFFM_model.png', show_shapes=True)


model.compile(loss='binary_crossentropy', \
    optimizer=Adam(lr=1e-3), \
    metrics=['accuracy'])


history = model.fit(train_generator, \
                    epochs=2, \
                    steps_per_epoch = steps_per_epoch, \
                    callbacks = callbacks,
                    validation_data = val_generator, \
                    validation_steps = validation_steps, \
                    shuffle=True
                   )



model.save_weights('BilinearFFM_model.h5')


================================================
FILE: DeepFM/DeepFM.ipynb
================================================
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "from tensorflow.keras import backend as K\n",
    "\n",
    "from tensorflow.keras.layers import Input, Embedding, \\\n",
    "    Dot, Flatten, Concatenate, Dense, Layer\n",
    "\n",
    "from tensorflow.keras.models import Model\n",
    "from tensorflow.keras.initializers import Zeros, glorot_normal\n",
    "from tensorflow.keras.optimizers import Adam\n",
    "from tensorflow.keras.regularizers import l2\n",
    "from tensorflow.keras.utils import plot_model\n",
    "\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "from sklearn.model_selection import train_test_split"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "class Linear(Layer):\n",
    " \n",
    "    def __init__(self, l2_reg=0.0, mode=2, use_bias=True, **kwargs):\n",
    " \n",
    "        self.l2_reg = l2_reg\n",
    "        if mode not in [0, 1, 2]:\n",
    "            raise ValueError(\"mode must be 0, 1 or 2\")\n",
    "        self.mode = mode\n",
    "        self.use_bias = use_bias\n",
    "        super(Linear, self).__init__(**kwargs)\n",
    " \n",
    "    def build(self, input_shape):\n",
    "        if self.use_bias:\n",
    "            self.bias = self.add_weight(name='linear_bias',\n",
    "                                        shape=(1,),\n",
    "                                        initializer=tf.keras.initializers.Zeros(),\n",
    "                                        trainable=True)\n",
    "            \n",
    "        if self.mode == 1:\n",
    "            self.kernel = self.add_weight(\n",
    "                'linear_kernel',\n",
    "                shape=[int(input_shape[-1]), 1],\n",
    "                initializer=tf.keras.initializers.glorot_normal(),\n",
    "                regularizer=tf.keras.regularizers.l2(self.l2_reg),\n",
    "                trainable=True)\n",
    "            \n",
    "        elif self.mode == 2 :\n",
    "            self.kernel = self.add_weight(\n",
    "                'linear_kernel',\n",
    "                shape=[int(input_shape[1][-1]), 1],\n",
    "                initializer=tf.keras.initializers.glorot_normal(),\n",
    "                regularizer=tf.keras.regularizers.l2(self.l2_reg),\n",
    "                trainable=True)\n",
    " \n",
    "        super(Linear, self).build(input_shape)\n",
    " \n",
    "    def call(self, inputs, **kwargs):\n",
    "        if self.mode == 0:\n",
    "            sparse_input = inputs\n",
    "            linear_logit = tf.reduce_sum(sparse_input, axis=-1, keepdims=True)\n",
    "        elif self.mode == 1:\n",
    "            dense_input = inputs\n",
    "            fc = tf.tensordot(dense_input, self.kernel, axes=(-1, 0))\n",
    "            linear_logit = fc\n",
    "        else:\n",
    "            sparse_input, dense_input = inputs\n",
    "            fc = tf.tensordot(dense_input, self.kernel, axes=(-1, 0))\n",
    "            linear_logit = tf.reduce_sum(sparse_input, axis=-1, keepdims=False) + fc\n",
    "        if self.use_bias:\n",
    "            linear_logit += self.bias\n",
    " \n",
    "        return linear_logit\n",
    " \n",
    "    def compute_output_shape(self, input_shape):\n",
    "        return (None, 1)\n",
    " \n",
    "    def compute_mask(self, inputs, mask):\n",
    "        return None\n",
    " \n",
    "    def get_config(self, ):\n",
    "        config = {'mode': self.mode, 'l2_reg': self.l2_reg,'use_bias':self.use_bias}\n",
    "        base_config = super(Linear, self).get_config()\n",
    "        return dict(list(base_config.items()) + list(config.items()))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "class FM(Layer):\n",
    "\n",
    "    def __init__(self, **kwargs):\n",
    "\n",
    "        super(FM, self).__init__(**kwargs)\n",
    "\n",
    "    def build(self, input_shape):\n",
    "        if len(input_shape) != 3:\n",
    "            raise ValueError(\"Unexpected inputs dimensions % d,\\\n",
    "                             expect to be 3 dimensions\" % (len(input_shape)))\n",
    "\n",
    "        super(FM, self).build(input_shape)  # Be sure to call this somewhere!\n",
    "\n",
    "    def call(self, inputs, **kwargs):\n",
    "\n",
    "        if K.ndim(inputs) != 3:\n",
    "            raise ValueError(\n",
    "                \"Unexpected inputs dimensions %d, expect to be 3 dimensions\"\n",
    "                % (K.ndim(inputs)))\n",
    "\n",
    "        concated_embeds_value = inputs\n",
    "\n",
    "        square_of_sum = tf.square(tf.reduce_sum(concated_embeds_value, axis=1, keepdims=True))\n",
    "        sum_of_square = tf.reduce_sum(concated_embeds_value*concated_embeds_value, axis=1, keepdims=True)\n",
    "        cross_term = square_of_sum - sum_of_square\n",
    "        \n",
    "        cross_term = 0.5 * tf.reduce_sum(cross_term, axis=2, keepdims=False)\n",
    "        return cross_term\n",
    "\n",
    "    def compute_output_shape(self, input_shape):\n",
    "        return (None, 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "class DNN(Layer):\n",
    "    def __init__(self, hidden_units, activation='relu', l2_reg=0, \\\n",
    "                 dropout_rate=0, use_bn=False, seed=1024, **kwargs):\n",
    "        self.hidden_units = hidden_units\n",
    "        self.activation = activation\n",
    "        self.dropout_rate = dropout_rate\n",
    "        self.seed = seed\n",
    "        self.l2_reg = l2_reg\n",
    "        self.use_bn = use_bn\n",
    "        super(DNN, self).__init__(**kwargs)\n",
    " \n",
    "    def build(self, input_shape):\n",
    "        input_size = input_shape[-1]\n",
    "        hidden_units = [int(input_size)] + list(self.hidden_units)\n",
    "        self.kernels = [self.add_weight(name='kernel' + str(i),\n",
    "                                        shape=(hidden_units[i], hidden_units[i + 1]),\n",
    "                                        initializer=glorot_normal(seed=self.seed),\n",
    "                                        regularizer=l2(self.l2_reg),\n",
    "                                        trainable=True) for i in range(len(self.hidden_units))]\n",
    "        self.bias = [self.add_weight(name='bias' + str(i),\n",
    "                                     shape=(self.hidden_units[i],),\n",
    "                                     initializer=Zeros(),\n",
    "                                     trainable=True) for i in range(len(self.hidden_units))]\n",
    "        if self.use_bn:\n",
    "            self.bn_layers = [tf.keras.layers.BatchNormalization() for _ in range(len(self.hidden_units))]\n",
    " \n",
    "        self.dropout_layers = [tf.keras.layers.Dropout(self.dropout_rate, seed=self.seed + i) for i in\n",
    "                               range(len(self.hidden_units))]\n",
    " \n",
    "        self.activation_layers = [tf.keras.layers.Activation(self.activation) \\\n",
    "                                  for _ in range(len(self.hidden_units))]\n",
    "        \n",
    "        super(DNN, self).build(input_shape)\n",
    " \n",
    "    def call(self, inputs, training=None, **kwargs):\n",
    " \n",
    "        deep_input = inputs\n",
    " \n",
    "        for i in range(len(self.hidden_units)):\n",
    "            fc = tf.nn.bias_add(tf.tensordot(\n",
    "                deep_input, self.kernels[i], axes=(-1, 0)), self.bias[i])\n",
    "\n",
    "            if self.use_bn:\n",
    "                fc = self.bn_layers[i](fc, training=training)\n",
    " \n",
    "            fc = self.activation_layers[i](fc)\n",
    "            fc = self.dropout_layers[i](fc, training=training)\n",
    "            \n",
    "            deep_input = fc\n",
    " \n",
    "        return deep_input\n",
    " \n",
    "    def compute_output_shape(self, input_shape):\n",
    "        if len(self.hidden_units) > 0:\n",
    "            shape = input_shape[:-1] + (self.hidden_units[-1],)\n",
    "        else:\n",
    "            shape = input_shape\n",
    " \n",
    "        return tuple(shape)\n",
    " \n",
    "    def get_config(self, ):\n",
    "        config = {'activation': self.activation, 'hidden_units': self.hidden_units,\n",
    "                  'l2_reg': self.l2_reg, 'use_bn': self.use_bn, 'dropout_rate': self.dropout_rate, 'seed': self.seed}\n",
    "        base_config = super(DNN, self).get_config()\n",
    "        \n",
    "        return dict(list(base_config.items()) + list(config.items()))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 读取数据\n",
    "df_data = pd.read_csv('train.csv')\n",
    "\n",
    "\n",
    "# 类别变量重新编码\n",
    "# 数值变量,用0填充缺失值\n",
    " \n",
    "sparse_feature_list = [\"Pclass\", \"Sex\", \"Cabin\", \"Embarked\"]\n",
    "dense_feature_list = [\"Age\", \"SibSp\", \"Parch\", \"Fare\"]\n",
    "\n",
    "sparse_feature_reindex_dict = {}\n",
    "for i in sparse_feature_list:\n",
    "    cur_sparse_feature_list = df_data[i].unique()\n",
    "    \n",
    "    sparse_feature_reindex_dict[i] = dict(zip(cur_sparse_feature_list, \\\n",
    "        range(1, len(cur_sparse_feature_list)+1)\n",
    "                                     )\n",
    "                                 )\n",
    "    \n",
    "    df_data[i] = df_data[i].map(sparse_feature_reindex_dict[i])\n",
    "\n",
    "\n",
    "for j in dense_feature_list:\n",
    "    df_data[j] = df_data[j].fillna(0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 分割数据集\n",
    "\n",
    "data = df_data[sparse_feature_list + dense_feature_list]\n",
    "label = df_data[\"Survived\"].values\n",
    "\n",
    "xtrain, xtest, ytrain, ytest = train_test_split(data, label, test_size=0.2, random_state=2020)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "xtrain_data = {\"Pclass\": np.array(xtrain[\"Pclass\"]), \\\n",
    "              \"Sex\": np.array(xtrain[\"Sex\"]), \\\n",
    "              \"Cabin\": np.array(xtrain[\"Cabin\"]), \\\n",
    "              \"Embarked\": np.array(xtrain[\"Embarked\"]), \\\n",
    "              \"Age\": np.array(xtrain[\"Age\"]), \\\n",
    "              \"SibSp\": np.array(xtrain[\"SibSp\"]), \\\n",
    "              \"Parch\": np.array(xtrain[\"Parch\"]), \\\n",
    "              \"Fare\": np.array(xtrain[\"Fare\"])}\n",
    " \n",
    "xtest_data = {\"Pclass\": np.array(xtest[\"Pclass\"]), \\\n",
    "              \"Sex\": np.array(xtest[\"Sex\"]), \\\n",
    "              \"Cabin\": np.array(xtest[\"Cabin\"]), \\\n",
    "              \"Embarked\": np.array(xtest[\"Embarked\"]), \\\n",
    "              \"Age\": np.array(xtest[\"Age\"]), \\\n",
    "              \"SibSp\": np.array(xtest[\"SibSp\"]), \\\n",
    "              \"Parch\": np.array(xtest[\"Parch\"]), \\\n",
    "              \"Fare\": np.array(xtest[\"Fare\"])}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def input_embedding_layer(\n",
    "    shape=1, \\\n",
    "    name=None, \\\n",
    "    vocabulary_size=1, \\\n",
    "    embedding_dim=1):\n",
    "    \n",
    "    input_layer = Input(shape=[shape, ], name=name)\n",
    "    embedding_layer = Embedding(vocabulary_size, embedding_dim)(input_layer)\n",
    "    \n",
    "    return input_layer, embedding_layer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "def deepfm(sparse_feature_list, \\\n",
    "        sparse_feature_reindex_dict, \\\n",
    "        dense_feature_list, \\\n",
    "        dnn_hidden_units=(128, 128), \\\n",
    "        l2_reg_embedding=1e-5, \\\n",
    "        l2_reg_linear=1e-5, \\\n",
    "        l2_reg_dnn=0, \\\n",
    "        init_std=0.0001, \\\n",
    "        seed=1024, \\\n",
    "        bi_dropout=0.2,\n",
    "        dnn_dropout=0.2, \\\n",
    "        dnn_activation='relu', \\\n",
    "        task='binary'):\n",
    "    \n",
    "    sparse_input_layer_list = []\n",
    "    sparse_embedding_layer_list = []\n",
    "    \n",
    "    dense_input_layer_list = []\n",
    " \n",
    "    \n",
    "    # 1. Input & Embedding sparse features\n",
    "    for i in sparse_feature_list:\n",
    "        shape = 1\n",
    "        name = i\n",
    "        vocabulary_size = len(sparse_feature_reindex_dict[i]) + 1\n",
    "        embedding_dim = 64\n",
    "        \n",
    "        cur_sparse_feaure_input_layer, cur_sparse_feaure_embedding_layer = \\\n",
    "            input_embedding_layer(\n",
    "                shape = shape, \\\n",
    "                name = name, \\\n",
    "                vocabulary_size = vocabulary_size, \\\n",
    "                embedding_dim = embedding_dim)\n",
    "        \n",
    "        sparse_input_layer_list.append(cur_sparse_feaure_input_layer)\n",
    "        sparse_embedding_layer_list.append(cur_sparse_feaure_embedding_layer)\n",
    " \n",
    "    \n",
    "    # 2. Input dense features\n",
    "    for j in dense_feature_list:\n",
    "        dense_input_layer_list.append(Input(shape=(1, ), name=j))\n",
    "    \n",
    "    \n",
    "    # === linear ===\n",
    "    sparse_linear_input = Concatenate(axis=-1)(sparse_embedding_layer_list)\n",
    "    dense_linear_input = Concatenate(axis=-1)(dense_input_layer_list)\n",
    "    linear_logit = Linear()([sparse_linear_input, dense_linear_input])\n",
    " \n",
    "    \n",
    "    # === fm cross ===\n",
    "    fm_logit = FM()(Concatenate(axis=1)(sparse_embedding_layer_list))\n",
    "    \n",
    "   \n",
    "    # === DNN cross ===\n",
    "    dnn_input = Concatenate(axis=-1)([Flatten()(sparse_linear_input), dense_linear_input])\n",
    "    dnn_output = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout, False, seed)(dnn_input)\n",
    "    dnn_logit = tf.keras.layers.Dense(1, use_bias=False, activation=None)(dnn_output)\n",
    " \n",
    "    \n",
    "    # === finally dense ===\n",
    "    out = Dense(1, activation='sigmoid')(tf.keras.layers.add([linear_logit, fm_logit, dnn_logit]))\n",
    "    deepfm_model = Model(inputs = sparse_input_layer_list + dense_input_layer_list, outputs=out)\n",
    "    \n",
    "    return deepfm_model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "deepfm_model = deepfm(sparse_feature_list, \\\n",
    "              sparse_feature_reindex_dict, \\\n",
    "              dense_feature_list)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"model\"\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "Pclass (InputLayer)             [(None, 1)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "Sex (InputLayer)                [(None, 1)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "Cabin (InputLayer)              [(None, 1)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "Embarked (InputLayer)           [(None, 1)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "embedding (Embedding)           (None, 1, 64)        256         Pclass[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
      "embedding_1 (Embedding)         (None, 1, 64)        192         Sex[0][0]                        \n",
      "__________________________________________________________________________________________________\n",
      "embedding_2 (Embedding)         (None, 1, 64)        9536        Cabin[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "embedding_3 (Embedding)         (None, 1, 64)        320         Embarked[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "concatenate (Concatenate)       (None, 1, 256)       0           embedding[0][0]                  \n",
      "                                                                 embedding_1[0][0]                \n",
      "                                                                 embedding_2[0][0]                \n",
      "                                                                 embedding_3[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "Age (InputLayer)                [(None, 1)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "SibSp (InputLayer)              [(None, 1)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "Parch (InputLayer)              [(None, 1)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "Fare (InputLayer)               [(None, 1)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_1 (Concatenate)     (None, 4)            0           Age[0][0]                        \n",
      "                                                                 SibSp[0][0]                      \n",
      "                                                                 Parch[0][0]                      \n",
      "                                                                 Fare[0][0]                       \n",
      "__________________________________________________________________________________________________\n",
      "flatten (Flatten)               (None, 256)          0           concatenate[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_3 (Concatenate)     (None, 260)          0           flatten[0][0]                    \n",
      "                                                                 concatenate_1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_2 (Concatenate)     (None, 4, 64)        0           embedding[0][0]                  \n",
      "                                                                 embedding_1[0][0]                \n",
      "                                                                 embedding_2[0][0]                \n",
      "                                                                 embedding_3[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "dnn (DNN)                       (None, 128)          49920       concatenate_3[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "linear (Linear)                 (None, 1)            5           concatenate[0][0]                \n",
      "                                                                 concatenate_1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "fm (FM)                         (None, 1)            0           concatenate_2[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dense (Dense)                   (None, 1)            128         dnn[0][0]                        \n",
      "__________________________________________________________________________________________________\n",
      "add (Add)                       (None, 1)            0           linear[0][0]                     \n",
      "                                                                 fm[0][0]                         \n",
      "                                                                 dense[0][0]                      \n",
      "__________________________________________________________________________________________________\n",
      "dense_1 (Dense)                 (None, 1)            2           add[0][0]                        \n",
      "==================================================================================================\n",
      "Total params: 60,359\n",
      "Trainable params: 60,359\n",
      "Non-trainable params: 0\n",
      "__________________________________________________________________________________________________\n",
      "None\n"
     ]
    }
   ],
   "source": [
    "print(deepfm_model.summary())\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAABXAAAANTCAIAAACIBaJbAAAABmJLR0QA/wD/AP+gvaeTAAAgAElEQVR4nOzdaVwT19s//jMkAWRRFgGxIKAii1gFBBfcF2grri2LIEqL4g5S+7Vq67e9ba1LrYqKCwougAv8tNZqa+vWWnGpgNQV12rZRZFFkC2Z/4NzN//cLDEJIZOEz/sBr2SSObkyczGZuebMGYZlWQIAAAAAAAAAIA8drgMAAAAAAAAAAM2DggIAAAAAAAAAyA0FBQAAAAAAAACQGwoKAAAAAAAAACA3FBQAAAAAAAAAQG58rgMAAADF/fHHH6NGjWpoaOA6EFAyPp9/7ty5oUOHch2ISiGf1V9MTMyGDRu4jkKNfPzxxxs3buQ6ClAabdr22tra5uXlcR0FaAwbG5vc3FwFZkRBAQBAgxUWFjY0NKSmpnIdCChZYGBgYWEh11GoGvJZzW3YsAHHJ43k5eUNHDjw448/5joQUA5t2vbm5eXFxMQMGjSI60BAA1y+fFnh2igKCgAAGi8gIIDrEACUBvmsttLS0rgOQR3Z2toiaUE9DRw4EMkJsmBZVuF5MYYCAAAAAAAAAMgNBQUAAAAAAAAAkBsKCgAAAAAAAAAgNxQUAAAAAAAAAEBuKCgAAAAAAAAAgNxQUAAAAAAAAAAAuaGgAAAAAAAAAAByQ0EBAAAAAAAAAOSGggIAAAAAAAAAyA0FBQAAAAAAAACQGwoKAAAAAAAAACA3FBQAAAAAAAAAQG4oKAAAAAAAAACA3FBQAAAAAAAAAAC5oaAAAADNe/bsWVpa2jfffMN1IAAA7VpFRQXXIQDIpPW5Wl9fn56e3spGysvLW9kCyA4FBQAALZeenv7OO+8wDMPj8Xx9fUeNGjVs2LCFCxc+e/ZMylw5OTkrV64MDAxMSkpSQZAXL15ctmwZwzAMw8yYMeP48eNt/Ym//fZbYGAg/cQ5c+ZcunSprT8RlCgxMdHNza1fv342NjZ0Jf72229t8UHITFCitLS08ePHe3h4+Pn5TZw4ccGCBWvXrv3Pf/7T0vuFQuHatWuHDh1qbm7e0nsGDhy4ZMmS1kSFJG+31q9fb2pqyjAMn8/38/MbP368v7//mDFj7OzsGIbJzc2VvSlZcvWNXr58uXz5clNT0yFDhijWQm1t7TfffDN48GBZwkDmKw0LAAAa6/Dhw7JsyfPz8wkhjo6O9GlxcfHo0aNNTEwyMjKkzFVTU0MIcXZ2Vk6sMrCzsyOEVFdXt91H5Obmih9XV1cTQuzs7Nru4xRGCDl8+DDXUaiajPmcmJhICDl06BB9+v3333fq1CkpKantAkNmUgEBAQEBAVxHoV5kXCYlJSUjR47s2bPn1atX6RSRSJScnGxubh4RESFlxtevX5uZmUn5vwgODl6xYoW8YTeFJKe0adsry3cpKCiQ3D2gRCKRv7//o0eP5Pq4N+aqjCwtLVvTiLxhIPMpGX9/m4UeCgAA2q9r166EEB6PR59aWlrGxsaWlZVJv5xBT09PFcFJ6NChg/hvW3jy5ElISIjKPg7ayP79+wkh7777Ln06adKk+Pj4vLy8tvtEZCa0BsuykyZN+uuvv65evert7U0nMgwTGhp65MiRqqoqKfPq6+vT46uWHDx4cOXKla0PEknePllbWxOJ3QOKYZhly5YZGRnJ1dQbc1VGtBygMHnDQOa3HgoKAADtES3J054L7UR+fr6/v39JSQnXgUBriUQiQsjGjRvFU95//31nZ2fuImoVZKbWO3r0aHp6+tKlS5seKQ0fPjwgIICTqFQJSa5ZcnJy3N3dlVIdaOfaSeajoAAA0B79+eefhBAfHx9CSFVV1ddffx0WFhYdHT1ixIjY2NhmZ3nw4EFAQMDSpUunT58+bNiwmzdv0ukZGRkDBw5csGDBf//7X4FAQM+2NTvx/Pnztra2Fy5ckCXC48ePz54929bWtqysLDw8vHPnzn369MnMzCSEXLly5ZNPPnFwcCguLv7ggw/Mzc379Olz9OhRQsiuXbt0dHQYhiGEVFZWbtiwQfx07969t2/fLioqmjt3roxLqdmvnJKSYmhoyDDM2rVrhUIhIeTAgQN6enr79u0jhNTU1Kxbt27mzJleXl5jx469deuWSCT6/fffY2JiHBwcCgoKRowYYWdnV1ZWJmMM0NTChQsJIV9++eXEiROLi4sJITweb9KkSfTVpquAEHLjxg1fX1+GYSZMmFBaWrpkyZJu3bqJxwdBZiIz2xTNgdGjRzf76pQpU+iDlrax1MOHDydMmGBmZubt7U1HDBGJRGlpaeHh4cOHDydSM5MgyZHksmFZ9tmzZwsXLqRjK1ZXV6ekpISEhPj4+Fy5csXDw8Pe3j49Pf3+/fuTJ0+2sLBwcXER55hY01wlza1QWdbOd999p6+v/8knn9BhGpvdvBNCXr9+vXjx4tmzZ69YsWL58uWSvX6Q+arIfGVeewEAAKol+zVvhJBevXoJhcIXL14cO3bMzs6uY8eOOTk59fX1I0aMCAsLE4lELMvu2bOHEPLjjz+K5xKPoeDo6NijRw+WZevr601MTNzc3Oj0Xr16mZmZ0cdBQUHPnj1raeIPP/xgYGAgbrwpepKZPs7Ly6P9LVetWvX06dPk5GRCyIABA4RC4YkTJ2h3wYULF164cOHAgQPGxsaEkPT0dJZle/ToIblMJJ+SJkNCNJ0iqaWv/PnnnxNCbt++TZ/+888/kydPpo9nzZqVk5NDH/v6+lpZWT1//vzSpUsGBgaEkNWrV585c2bmzJmvXr1q6UPFgWnNdbyykz2fk5KSTExMCCFmZmY7duwQCoXil5qugoqKCpZlq6qqXF1dHRwcamtrJ0yYcP/+ffEsyEwZMxNjKDQlyzLx8vIihJSXl0t/W0urlabfokWLTp8+vXPnTkNDQx6Pd+PGDZZl//nnH3GqtJSZtBEkuYxJrk3bXhm/S7NHiEVFRSzLikSihw8fEkI6dep08uTJO3fuEELs7e2//fbb8vLy69evE0JGjBghbkpKrjZdobW1tc2uHXEqlpaWhoWF0dmpZjfvDQ0NAwYMmDVrFp3+6NEjPp8vTjxkvoyZ35oxFFBQAADQYHIVFCh9ff1u3brNnDmTHlBt2LCBEHLv3j36toaGhj179rx8+VI8l/hnb8OGDQcPHmRZViQS9ejRQyAQ0OkWFhaEkNjYWJFIdOvWLXrw1uxE2r6UICV/11mWdXJyknxqZWWlp6dHH/fq1YsQUlVVRZ9u2rSJEBIcHNy0Ecmn8v6ut/SVX7x4YWxsLN59Wb169YkTJ1iWvXr1atPdMvoS/S6lpaVSvn6jwLRmp1Z2cu3QPH/+fN68efTSX39/f7qrJGUVsCybkZHB5/MHDRq0Z8+eRq0hM6V8fTEUFJqSZZkMHDiQEFJYWCj9bS2tVpoq4q0o7UQ2Y8YM+lQyVaRkJoskly3JtWnbK+N3kVwRIpGoqKho6NChtKDQ9A1vvfWW5Cq2tLQ0MTERP5WSqy2t0KZrhzby+PHjiIiIkpIS8fSWVvHWrVsJIXfv3hW/k6ao+CkyX8rXF8OgjAAA8Gb0B+z169dPnz7dtWuXo6MjIYR2R7SxsaHv4fF44eHh9NxvIzExMePHj9+2bduqVatqa2vr6+vp9O3btxsbG0dHR3t7e7969YrW7JudSJqM/CQd7TEoZmpqWltbSx/r6OgQQmj1nRAyYcIEQsiDBw9kb1wWLX1lMzOzhQsX7tu3j46Pffbs2XfeeYcQcu3aNfHJBLFx48aJv4upqalyI2zPzM3N4+LiMjMzu3XrduLECXrnPCmrgBDi6en56aefXr161d3dvVFryEzlRgiSXF1dCSF3796V/raWVisl3orSq3voueJGpGQmQZIjyWXAMIyVlVVMTIxAIGj2DeI8pMzMzJp2pG82V1taoS2tnXHjxlVVVXXu3Fk8paVV/OuvvxJC7O3txe+kKSqGzFduhE2hoAAA0K7RS9Bl+UW8du1anz59unfv/vnnn0sO/vz+++9nZ2f7+fllZGQMHTqUXtHX7MS2Q29jYWtrq6wGS0pKGhoaWvrKhJCPP/5YV1d306ZNmZmZ3t7edH/lxYsXjx8/pjeFEqMjCIKyXLhwgfazpfr27fvbb78xDHPo0CHyplXAsuyjR49sbW3DwsLq6upUEC0yEwghdIyDK1euSH+blNUqycrKihDSrVs35QapMCS59pk8ebKZmdmrV69auQAlc1XG9BZbv3794cOH165dK57S0iqmw0u/ePGiNaEqAJkvhoICAEC71rdvX0LIqlWr2H8vi3j69OnPP//c9J3Tp0+vr6+nFXHJ36ovvviie/fup06dOnjwYH19Pb3Mr9mJhBA6mJDS0T2JMWPGkH9L8vRwkWXZ8vJy8dsYhmloaJClQdqXvqWvTAgxNzefO3fujh07Nm/e/NFHH9GJzs7O1dXVkjtAd+/epb0xQVmMjY0//vhjydXh4OBgZWVFBySXvgrWrVs3ZcqUxMTEW7duffHFF5LNIjOh7UybNs3T0zM2NrawsLDRS7W1tfQ2qKTlbWwjubm5hBB/f395w0CSg1xCQ0Mbna6Xl2SuypjeYu+9997y5cuXL18u3iFpaRXT6wtOnjzZUlPI/DYn1wUSAACgVmS85u3p06eEEDs7u6YvPX782NDQkBAyatSouLi4FStWzJ49mw7QSAve9vb29J2dOnViGObXX39NSUmhB29Xr17Nzc01MDCgYy7U19d36tSJjgHW7MQTJ04YGRn9/PPPLcVJz2OIL1CkPRjFr9JLN+vr69l/L1AUXxi5b98+T09P+tLkyZMJIStWrHjw4MHGjRvpfdpOnTolFAp79uxpaGj4zz//0Llov8G33nqLfl+qvLw8MjJy2rRpUr4yfWdRUZGenp7keFQ1NTXdu3cnhHz00UcpKSmff/65r68vvZqUfpc3jsUoRrToOl7ZyZLPlZWVhJDw8PDKyko65ccffySEJCYmslJXwZUrV6ZOnUpnofttv//+O32KzJQxMzGGQlMyLpO7d+/a2dl179796NGjND2qq6vPnTs3evToK1eu0Pe0tFpdXFyIxIXQ8+bNmzhxIn1M/x26du1Kn0rJTCS5jEmuTdteWb5LUVERIcTBwUFyYk1NTUxMTGBgIMuyr1+/JoQ4OTnRl+hwg+LNL1224mFxpeRqSyu06dpxcHAghIhEooaGhlGjRpmYmFy/fp1teRVnZ2fz+Xxzc/NTp07Rf6uOHTsSQv7++28WmS9z5mNQRgCAdkqWH4CrV68GBgbSIvL8+fPFO69iN2/e9PPzMzU1feuttxYtWkSHIn/8+HFUVBSda9OmTS9fvoyLi+vUqZO3t/eVK1diY2NNTU0nTpxIK/QeHh5r1qwJDQ319/enP+HNTjx9+nTXrl3PnTvXNMg//vhj6dKl9ONCQ0N/+OGHuLg4+vTrr78uLy+nox8RQpYuXfr69Wv6u75+/frnz58/e/ZszZo14p/M+/fvDxgwwNDQ0NfX9/79+0OHDg0LCzt06FBtbe2yZcusra2PHDnCsuy5c+cmTpxI23R2dh45cuTIkSOdnJz09PQIIfv27WNZtqWvLA7b398/KSlJ8os8efKE3i6rS5cukZGRJSUlVVVVK1eupB8UGRlJd4zeSJt2amUn4w6NtbU1IcTc3Hzs2LFjx44dPHjw999/L3616SpgWfbIkSMWFhZz586l71m+fDkhxMTEhI7OiMyUMTNRUGhK9mVSWVm5du3acePGOTg4uLm59evX77PPPpNcay2t1tOnT48fP37EiBGRkZFRUVFxcXH0+K2qqmrZsmV0DW7YsGHNmjVSMhNJLmOSa9O2943f5fz58/RImGEYFxcXPz+/cePGDRkyhA6CEB8fX1xc/PHHHxNC9PT0zpw588svv9AbKERFRb148WLLli30zPy6deueP3/OsmxLuco2t0LHjh27aNEiybVTWlr61Vdf0Ta/+eab/Px82n+nY8eOq1evLisra3bzzrLshQsXfHx8jI2Nu3fvvmbNmmHDhs2ZM+fs2bNCoRCZL2Pmt6agwLAt3CwEAADUX2pqalBQUDvckru4uNCbJHEYQ3V1dd++fW/cuEFvJaVcDMMcPnxYXAlqJ7Qgn7U7M2lCpqamKrdZjdYOl4l2J7k2bXu16buoA+3O/Nb8/mIMBQAAAEXExcUtXLiwLaoJAK2BzASthySH9kk9M5/PdQAAAAByq6qqon/pABCqdPXq1cjIyOrqaqFQmJOTo+JPBzWHzASthySH9gmZ3xL0UAAAAE1SVVX12Wef0bGjo6Ki3ngnNqUzNDSsqKjQ0dE5cOCArq6uij8d1BYyE7QekhzaJ2S+dOihAAAAmsTQ0HDVqlWrVq3iKgA3N7e///6bq08HtYXMBK2HJIf2CZkvHXooAAAAAAAAAIDcUFAAAAAAAAAAALmhoAAAAAAAAAAAckNBAQAAAAAAAADkhoICAAAAAAAAAMgNBQUAAAAAAAAAkBsKCgAAAAAAAAAgNxQUAAAAAAAAAEBuKCgAAAAAAAAAgNxQUAAAAAAAAAAAuaGgAAAAAAAAAAByQ0EBAAAAAAAAAOSGggIAAAAAAAAAyI3PdQAAAKA4Pp9PCGEYhutAQPnoym1XkM/qLzg4mOsQ1AuPxzt06BCSVptozbaXz+cHBQUFBQVxHQhoBoUzn2FZVrmhAACAytTU1Pz0009CoZDrQFRh48aNhJCYmBiuA1EFHo/33nvv6evrcx2ISmlfPgcGBsbExAwaNIjrQJTGy8vL3t6e6yjUyJMnT65du8Z1FG3u8uXLGzduTE1N5TqQNqdN294//vijqKiI6yjUTrval5BLly5dhg4dqsCMKCgAAIBmCAwMJIS0hz1a0BoMwxw+fJimLoDmSk1NDQoKwlEDaAHsSygdxlAAAAAAAAAAALmhoAAAAAAAAAAAckNBAQAAAAAAAADkhoICAAAAAAAAAMgNBQUAAAAAAAAAkBsKCgAAAAAAAAAgNxQUAAAAAAAAAEBuKCgAAAAAAAAAgNxQUAAAAAAAAAAAuaGgAAAAAAAAAAByQ0EBAAAAAAAAAOSGggIAAAAAAAAAyA0FBQAAAAAAAACQGwoKAAAAAAAAACA3FBQAAAAAAAAAQG4oKAAAAAAAAACA3FBQAAAAAAAAAAC5oaAAAAAAAAAAAHJDQQEAAAAAAAAA5IaCAgAAAAAAAADIDQUFAAAAAAAAAJAbCgoAAAAAAAAAIDcUFAAAAAAAAABAbigoAAAAAAAAAIDcUFAAAAAAAAAAALmhoAAAAAAAAAAAckNBAQAAAAAAAADkhoICAAAAAAAAAMgNBQUAAAAAAAAAkBsKCgAAAAAAAAAgNxQUAAAAAAAAAEBuKCgAAAAAAAAAgNxQUAAAAAAAAAAAufG5DgAAAKB5QqHw4sWL9fX19GlxcTEh5MyZM/SpQCAYMmQIj8fjLD6AJkpKSv766y/JKTdv3jQzM6OPu3Tp4ubmxkVcAHK7detWUVERfXzz5k0isfklhPTt29fCwoKbyADkgX2JtsawLMt1DAAAAM04c+bM2LFjpbzh9OnTY8aMUVk8AG80e/bs+Pj4ll7t1KlTWVmZKuMBUJiJiUl5eXlLr0ZGRu7cuVOV8QAoBvsSbQ2XPAAAgJoaPHhwhw4dWnq1Q4cOgwcPVmU8AG/k6+vLMEyzL/H5fD8/PxXHA6AwPz8/Pr/5vswMw/j6+qo4HgDFYF+iraGgAAAAasrAwGDy5MkCgaDpSwKBYPLkyQYGBqqPCkCKcePGtZSWQqEwNDRUxfEAKCwkJKShoaHZlzp06PDee++pOB4AxWBfoq2hoAAAAOorJCREfN2jpPr6ehybgRrS19f/4IMPdHV1m75kZGSEHgqgQd59992OHTs2nS4QCAIDA6Wc8gVQN9iXaFMoKAAAgPry8/MzNTVtOr1Tp07SL4kE4EpISEhdXV2jiQKBICgoSE9Pj5OQABSgq6sbEBDQ9LxufX19SEgIJyEBKAb7Em0KBQUAAFBffD4/KCio0flegUAQEhLSbPdFAM6NHj1afFsHMRyDgSZq9ryuqanpyJEjOYkHQDHYl2hTKCgAAIBamzp1aqPzvfX19VOnTuUqHgDpeDxeaGhooz3Xzp07Dxs2jKuQABQzYsSIRveG1NXVDQsLa2mwRgC1hX2JtoOCAgAAqLWhQ4daW1tLTunSpYuPjw9X8QC8UaM9V4FAMH36dNznHDSOjo7OtGnTJE/h1tXV4RgMNBH2JdoOCgoAAKDWGIaZNm2a+Hyvrq7u9OnTdXTw+wXqa+DAgTY2NuKnOA8Gmmvq1KmSVz107dp1wIABHMYDoBjsS7QdLEQAAFB3kud7cX4M1B/DMNOnTxef1+3WrZunpye3IQEoxsvLy8HBgT7W1dX98MMPGYbhNiQAxWBfoo2goAAAAOrO3d29Z8+e9HH37t379evHbTwAbzRt2jR6XlcgEISHh+MYDDSX+KqHurq64OBgrsMBUBD2JdoICgoAAKABwsLCBAKBQCCYMWMG17EAvJmLi4uzszMhpL6+HsdgoNHEVz04OTm5ublxHQ6A4rAv0RZQUAAAAA0QGhra0NCAa9FBg0yfPp0Q0qdPHxcXF65jAVCci4sLrSOEh4dzHQtAq2Bfoi3gpi8AAByrqan56aefhEIh14GoOzs7O0JIdnZ2dnY217GoNR6P99577+nr67eyHWRmK3Xs2JFhmL59+6alpXEdiwbz8vKyt7dvZSNPnjy5du2aMsJpp/r163f79m1jY2Mks8KUtWX+448/ioqKlBJS+4R9idbr0qXL0KFDxU8ZlmU5jAYAAI4ePfr+++9zHQVolSNHjkyZMqWVjSAzQR0EBwcfPHiwlY1MnTr10KFDSokHQGFK2TILBIKGhgalxAOgGD6fL3nzF/RQAADgGN0zQHkXlIVhGKXsbiIzgXOBgYFK6SMjFAoDAgJSU1Nb3xSAYpS4ZT58+HBgYGDrmwJQQGpqalBQkOQUjKEAAAAAAAAAAHJDQQEAAAAAAAAA5IaCAgAAAAAAAADIDQUFAAAAAAAAAJAbCgoAAAAAAAAAIDcUFAAAAAAAAABAbigoAAAAAAAAAIDcUFAAAAAAAAAAALmhoAAAAAAAAAAAckNBAQAAAAAAAADkhoICAAAAAAAAAMgNBQUAAAAAAAAAkBsKCgAAAAAAAAAgNxQUAAAAAAAAAEBuKCgAALQvFRUVrWyhvLxcgZfUH5YMt7D8lQvLk0NY+MqF5ckVLHnl0tbliYICAEC7IBQK165dO3ToUHNzc8VaqK2t/eabbwYPHty0hWZfGjhw4JIlSxSPWKr169ebmpoyDMPn8/38/MaPH+/v7z9mzBg7OzuGYXJzc2VvSsuWjMbRvuVfUFCwZ8+eoKCgwYMHyzgL8lk7aN/CT0hIcHd3NzY27tev3549e2SZBcmsBbRvyd+5c2fSpEmdO3e2sLCYOnVqYWHhG2dBJsuBBQAATh0+fFg1W+PXr1+bmZm15rOktND0peDg4BUrVij8WW9UUFBACHF0dJScKBKJ/P39Hz16JFdTWrZkCCGHDx9ufTvITIX9888/hBBnZ2fZZ0E+NysgICAgIEB92nkjbVr4S5cunTZtWlxcXHR0dIcOHQghW7ZskWVGJHOzlLVlVlY70mnTkr9z587kyZO///7769evh4WFEUJGjx4ty4zI5GY13Tfgt2m1AgAA1Ie+vr6lpWVpaWlbtND0pYMHDyr8QbKwtrYmhPB4PMmJDMMsW7bMyMhIrqa0bMloHO1b/ra2tvLOgnzWDlqz8PPy8nJzc5OTk+nT9957z8/PLzY2dsGCBW+cF8msBbRpyZ8+fTolJYUWxRITE3/88cerV6/KMiMyWUYoKAAAgPbIyclxd3en+w0Amg75DFx5+vTpd999J37q6+trYWHx7NkzhRtEMgNXoqKiJJ82NDREREQo3BoyuSmMoQAAoBlqamrWrVs3c+ZMLy+vsWPH3rp1ixBSXV2dkpISEhLi4+Nz5coVDw8Pe3v79PT0+/fvT5482cLCwsXFJTMzs1FTDx8+nDBhgpmZmbe392+//SalfULI69evFy9ePHv27BUrVixfvryqqkrcTksviUSitLS08PDw4cOHE0KOHz8+e/ZsW1vbsrKy8PDwzp079+nTRzKqrVu3hoWFzZs3T19fn/kXIeT8+fO2trYXLlyQZfmwLPvs2bOFCxfSQY+0e8molXaYmYpBPqt/PiOZxXx8fKysrCSn1NXVDR06lD5GMqt5MiOTW/Lf//5306ZNmzZtok+RycrJ5Da6uAIAAGQk45Xqs2bNysnJoY99fX2trKwqKipEItHDhw8JIZ06dTp58uSdO3cIIfb29t9++215efn169cJISNGjBA34uzsTAhZtGjR6dOnd+7caWhoyOPxbty40VL7DQ0NAwYMmDVrFp3+6NEjPp9Po5XyEvt/ryHPy8ujnQNXrVr19OlT2od2wIAB9J1btmzh8XgvXrxgWXb16tWEkMWLF9OXfvjhBwMDgx9//LGlZdLs71pRURHLstq9ZKQjqh1DoR1mpixIkzEUkM8KLE8Vj6GAZG5Jenp6hw4dsrKy6FMkswLLU1lbZlnaQSY39f333w8bNowQ4uDgsHv3bjoRmazA8my6b4CCAgAAx2Q5bGv2er8TJ07QV4nEoctbb70l2ZqlpaWJiYn4Kf09q6iooE9jY2MJITNmzGip/a1btxJC7t69K26hV69etH0pLzWNysnJSfIlKysrPT09+njChAk6Ojp1dXUsy9Kq/MCBA8XvbGhokLJYJD9CJBIVFRUNHTqU/tJr/ZKRvlhUVlBot5n5RqS5QRmRz/IuT1UWFJDMLWloaBg+fPjBgwcbTZQyC5K52WWimoICMrlZL1++vHPnztatWw0MDAghe/fupdORyfIuz6b7BrjkAQBAA1y7ds3Nza3RNn3cuHFN32lsbCz51MzMrKysrKX3TJo0iRBy586dltr/9ddfCSH29nhVYwIAACAASURBVPbieXV0/veHQ8pLTTXqQWdqalpbW0sfjx07ViQSnTx5khCir69PCBk1apT4nY0GQ5KCYRgrK6uYmBiBQNDsG7RsyaiJdpuZikE+q3M+I5lb8j//8z+jR48ODg6WnIhkVttkRiY3y8TExMXFZf78+Tt37iSE7N+/n05HJrc+kzEoIwCABnjx4sXjx4+rq6tpZZ0SiURSfkJkQS+R7datW0vt5+fn00+ndXdJUl6Sy4IFCzp06BAREZGenv7gwYOVK1cuX75c4dYmT55MCHn16pWBgUFrFo72LZk2gsxsU8hnVUIyN+vEiROGhoaffvppawIgSGYVQiZLN3HiREKIrq6uYjEgk5tCDwUAAA3g7OxcXV29du1a8ZS7d+/SPm+tkZubSwjx9/dvqX3aQ49Wr5uG1NJLchEKhbdu3bpy5cq333577NixFStWSJ4uEAqFCrQZGhrayvGx1H/JqIl2m5kKt6nAXMhn1UAyN3X69Om8vDzJasLly5fFbSoQCZJZBZDJ0hUWFhJC3nvvPXGbCkSCTP4/WAAA4JQsV6rX1NR0796dEPLRRx+lpKR8/vnnvr6+9Eq8169fE0KcnJzoO3v06EEIqayspE9pjzihUEifuri4EEJKS0vp03nz5k2cOFFK+9nZ2Xw+39zc/NSpU9XV1efOnevYsSMh5O+//5byEsuylZWVhJCuXbtKhiH+OrSmXl9fz7LsypUre/TokZCQcOrUqUuXLt2/f198QeOJEyeMjIx+/vnnZpdJUVERIcTBwaHRgoqJiQkMDNTuJSMdUeEYCu0zM9+ourqaEOLo6Cg5EfmswPJU5RgKSOZGzpw5M2rUqK3/2rJlS0xMzOeff84imRVansraMr+xHWRyIxs2bEhISCgrK6PBT5o0KSgoSCQSschkhZYnBmUEAFA7Mo6l/+TJE3qroS5dukRGRpaUlLAsW1xc/PHHHxNC9PT0zpw588svv9CRfqOiol68eLFlyxZaQV+3bt3z589Zlj19+vT48eNHjBgRGRkZFRUVFxcn/qlrtn2WZS9cuODj42NsbNy9e/c1a9YMGzZszpw5Z8+eFQqFLb1UWVm5bNkyWrbesGHDmjVr6OOvv/66vLxcfLumpUuXvn79+vTp041uTmZhYXHkyBEabdeuXc+dO9d0aZw/f552O2QYxsXFxc/Pb9y4cUOGDKFXJ8bHx2v3kpFOWbutyEzFlv/58+cjIyMJIQKBYN26ddnZ2XQ68lmB5aniuzwgmcUuXbok2QGbYhjm0aNHLJJZoWRW1pZZlnaQyZK+/PLLnj17mpqazp07Nzo6+syZM+KXkMkKLE8UFAAA1I6Mh23aKjExcd26dfSxUCjMzc3dv3+/paUlt1GpA4WXjLJ2W5GZyEwlUmx5qrigoK2QzMql2PJUZUFBWyGTlUux5dl03wCDMgIAAGfWrl27dOnSFy9e0Kc6Ojo2NjZDhgxp5ThDWgBLhltSlr+U62ZzcnLoDbqgEeQzh5DMyoVk5goyWbmUmMkYlBEAADhz8eJFQsiOHTvEP2lZWVlLly5NTk7mNC7uYclwS8ryl3LeBrutLUE+cwjJrFxIZq4gk5VLiZmMggIAAHBm3759CxcuTEhIsLGx8fHxCQwMzMrKSk5OdnV15To0jmHJcAvLX7mwPDmEha9cWJ5cwZJXLiUuT4Zl2bYIEQAAZJSamhoUFIStMSgLwzCHDx8ODAxsZTvITOAcTePU1FQ1aQdAYcraMiurHQDFNN03QA8FAAAAAAAAAJAbCgoAAAAAAAAAIDcUFAAAAAAAAABAbigoAAAAAAAAAIDcUFAAAAAAAAAAALmhoAAAAAAAAAAAckNBAQAAAAAAAADkhoICAAAAAAAAAMgNBQUAAAAAAAAAkBsKCgAAAAAAAAAgNxQUAAAAAAAAAEBuKCgAAAAAAAAAgNxQUAAAANA2ly5dun//PsuyXAcCAADKUVBQ8NNPP3EdBUBjfK4DAAAAQghJS0vjOgRQgtevX3fo0IHrKEhcXFxsbKyJiYmXl5e3tzf9a21trUBTyMx2orq62sDAgOsoGsvNzbW1tVVWU+qWzEKhsKGhQU9Pj+tAQE3l5+dnZmZmZWVlZmZmZmYWFhYyDEMIuXLlCn2gPpDM7ceVK1caTUFBAQCAY9bW1nw+PzAwkOtAQEvw+fzTp09bWFhkZmamp6d///33q1evFolE1tbWnv8aPHiwubm59HaQmaAOBg0a1PpGbGxs0tLSkMzAIT6f/8aqbkFBQaaEwsJCQgjddEdGRnp6eg4aNMjd3X3jxo0bN25USdQAzbCxsZF8yqA/JAAAgLJkZWXt3LkzOTmZz+cHBwdHRUX17t2b66BIeXl5RkbGn3/++eeff167di0/P19HR8fJyal///5eXl5eXl79+vXT19fnOkxtQ49gRSKRup1LbNbvv/8eHx9/5MgRfX39adOmRUZGvv3221wHpW0aGhpOnDgRHx//yy+/WFlZffTRRzNnzrS3t+c6rjdLTU0NCgrCUYPSPX36lHZAyMrKysrKKi4u1tHRcXR09PDwoMVfDw+Pjh07ch1mM2gy79y589dff+3SpQtNZjs7O67jAg6goAAAAKBk5eXle/fu3bx58+PHj318fKKjoydPnsznq0uvwIKCAlpZyMjIuHbt2suXLwUCgZubm9e/evfurT7Raq6DBw+GhYU1NDRwHYgcysrKUlNTt2zZcuvWLXpSNDQ01NDQkOu4NF5+fn5ycvK2bdvy8vJGjRoVGRk5adIkgUDAdVyyQkFBWR4/fiyuIGRmZr548YKWd2kFwcPDw93dXT0rCGIFBQVJSUnbt2/Pzc3VxGQGpUNBAQAAoE2IRKJz587Fx8cfPXrUyspq1qxZ8+fPt7Cw4DquxiQ72aanp9P6gqOj45AhQ3x8fDw9PV1cXHR0MIqz3JKTkyMiImpra7kORBGZmZnx8fFJSUm6urpBQUHz5s3r27cv10FpHvFG4Pvvv+/cufOMGTMiIyO7d+/OdVxyQ0FBYZIb2KtXr5aUlPB4PCcnJ09Pz969e7u6uvr4+JiZmXEd5ptJJrO5uXl4eLiGJjMoHQoKAAAAbevRo0e7du3avXt3ZWXlxIkTIyMjx4wZw3VQzRMKhTk5OZIX8dbU1BgbG7/99tvi8RdcXV01og8/5/bu3Tt//vyqqiquA1Hcy5cv09LSYmNj79y5QzssTJs2TQ3HblRDmt4loREUFGQnWUG4fPnyixcv+Hx+r169xJtQd3d3zer1U1hYuH///h07djx9+nT06NGansygdCgoAAAAqEJtbe3x48c3btx4+fJlDw+P2bNnq/+xWUNDw71798Q7x9euXaurqzMxMenduzftv+Dt7W1lZcV1mGoqISEhJiamoqKC60Bai2XZs2fPxsfHHzt2zMDAICgoaMGCBX369OE6LnWkNV0SGkFBoSW0CHvnzp3bt29nZmZeunSptLS0UQXBw8NDzTf1zZJMZmNj44CAgOjoaFdXV67jArWDggIAAIBKSXYmnzFjRnR0tKYcb1RVVV2/fl1cX7h79y7LsvLePKL92Llz57Jly0pLS7kORGmKi4v37t0bHx//+PFjdFhoRMu6JDSCgoJYozJrdnZ2VVUVvUzMU4I63D9YYUVFRfv27du5c+fff/9N/9PDwsI0+htBm0JBAQAAgAPPnj3bs2eP5LhWU6ZM4fF4XMclh4qKihs3boh3rO/cuUP+vcMZ7b+goefllGXbtm1ffPFFSUkJ14Eomfi85bFjxwwNDQMDAxcuXOjm5sZ1XNzQ1i4JjbTngkKjCsL169erq6sbVRC8vLz09PS4jrS18K8NikFBAQAAgDN0By42NvbkyZPdu3efNWvWzJkzNfQkf9Oxxxr1+9WOfW7Zbd68efXq1fRO8lqpnZ/G1O4uCY20q4JCfX39/fv3xVuzrKys169fGxkZOTk5ubq60q2Zt7e3rq4u15EqTdPOR+3qfxlaCQUFAAAA7j148CAhIWHXrl1VVVUTJkyIiYkZNGgQ10G1Cm4esWHDho0bN+bm5nIdSNtqbxdat5MuCY1od0GhUQWh2cFonZ2dNasHmSwwPAooBQoKAAAA6uLVq1cHDhyIi4u7ceOGNp0manTzCHrGT+tvHvHtt9/GxcU9efKE60BURDwU/JMnT3x8fKKjo7XsjH276pLQiJYVFOrq6h48eCDeImVkZNTW1nbs2LFPnz7iLZJ2VzxxAxdQIhQUAAAA1E5mZmZsbOyhQ4dMTU0//PDDuXPn2tnZcR2U0rzx5hFeXl5dunThOszWWr16dUJCwsOHD7kORKW072b17bNLQiOaXlB49epVdna2eJtz7949oVDYqVMnNze3dlJBEJMcFTgoKGj+/Plvv/0210GBZkNBAQAAQE3Ra9Tj4uLy8/NHjRoVFRXl7++vZafxSZObR+Tk5IhEIi24ecTXX3+dlJR07949rgPhRkFBQVJSkuSwoxp3Pr89d0loROMKCo2GjKVbFVq11OJeUVKUlZWlpqZu2bLl1q1btEtCaGiooaEh13GBNkBBAQAAQK0JhcKffvpp8+bNZ8+e7dmzZ0RExKxZs8zMzLiOq628fPny2rVrGRkZ165du3btWn5+PsMwvXr18vLy6t+/v6enp7u7u0bsB3/55ZdpaWm3b9/mOhAuNT29P3v2bAcHB67jkgZdEppS/4JCeXn5zZs3G1UQTE1NxcMotrcKgpi4S4JAIAgODp47d26/fv24Dgq0CgoKAAAAmiEnJ2f79u2JiYmEkJCQkHbSVbWwsJBWFjIyMjIyMp4/f87j8VxcXPr/q2/fvvr6+lyH2YwVK1b88MMPN27c4DoQtaARZ/s1IkhOqGFBobi4OEsCHazExsbGw8PD09OT/rW2tuY6TM6Ul5cfPnx469atN2/epF0SQkJCjIyMuI4LtBAKCgAAAJqkoqLi0KFDmzdvvn37tqenZ1RU1NSpU9vPMY/kzSOuXLny/PnzRjen7N+/v5rUF5YvX37q1KmsrCyuA1EjQqHw/Pnz8fHxR48etbS0nD59+pw5c+zt7bmNCl0S3kgdCgq5ubnXr18XVxDy8/MJIQ4ODu7u7h4eHrSCYGlpyWGEaoJ2SUhOTubxeFOnTp0zZ467uzvXQYE2Q0EBAABAI128eHHz5s3iQ6D58+fb2tpyHZSqSdYXLl26VFpaSm9OKa4veHl56enpcRLbkiVLzp8/f+3aNU4+Xc3l5eWlpKRs3bq1oKCA9gWYPHkyn89XcRjokiAjTgoKkv/dGRkZRUVFhBDJ0VUGDBiACoIY7ZIgeZMgdEkA1UBBAQAAQIMVFBTEx8dv27attLT03XffjY6OHj16dDu8TpiiRyDp6ekXL17Mzs6uqqoS1xfo/SNUOZD74sWLL126dPnyZdV8nCaSHB/E2to6LCxMNTc0QZcEeammoCBZQbh69WpJSQmPx3NycurduzcdCkFDx2dta7RLQkpKilAoHD9+fHR0tI+PD9dBQTuCggIAAIDGq6ur++GHH+Lj48+cOePk5DR37tyIiIh2fm5KKBTm5OSIj0+ysrJev35tZGTUt29f1dwoLjo6OjMz8+LFi23UvjZ5+PDh7t279+zZ8/z58zbtsIAuCYppi4KC+D/0zp07t2/fpj2MGl3BpCkjsHKCXv62ffv27OxsV1fX6dOnR0ZGmpqach0XtDsoKAAAAGiPrKysnTt3Jicn8/n84ODgqKio3r17cx2UWmhoaLh3716mhJqamo4dO/bp06eNBoGfP3/+7du3f/vtN2U1qPXEdbGzZ8927dp12rRp8+bN69atW+tbRpeEVlJKQaHR/+D169erq6sbXaPk6enZoUMHZYWtrWiXhAMHDjQ0NIwfPz4yMnLMmDFcBwXtFwoKAAAA2qa8vHzv3r2bN29+/Pixj49PdHQ0Jxeoq7P6+vr79++Lj22uXbtWV1fXqVMnNzc38YFNK2sxc+bMefjw4ZkzZ5QVc/vx4MGDhISExMTE0tLSkSNHRkZGTpkyhcfjKdAUuiQohWIFhUb/ZbSKZ2Rk5OTkJL6bI4ejnGicysrKgwcP7tix4/r1687OzuHh4dp9C2HQFCgoAAAAaCfxWdmjR49aWVnNmjVr/vz5FhYWXMeljurq6m7evHnx4sVGN7H39PT08fGhhz1dunSRq82ZM2fm5ub+8ssvbRSz1qutrT1+/DjtsPDWW2+FhoYuWLDAxsZGlnnRJUG5ZCwovHr1Kjs7W3wVA63TNeoH5OzsrFhtqD0Td0mor6+fMGECuiSAWkFBAQAAQMs9evRo165dCQkJFRUVEydOxM7oG4mPi6i7d++yLCvv8PIffvjhs2fPTp48qZqYtdj9+/cTExMTEhJevnz5xpFH0SWhLbRUUKioqLhx44b4P4VW4kxMTHr37q2akUq0W01NzY8//hgbG5uenu7k5PThhx/OnDkTw1KCukFBAQAAoF2g53s3btx4+fJlDw+P2bNnT5s2zcDAgOu4NECjo6am9YVmB58PCwsrLy8/fvw4JzFrH8kOCz169Jg5c+aHH34oLuugS0KbEhcUysrKbt261fR/QXwjBqWPRdI+3blzZ//+/fHx8dXV1bRLQnu+fQ+oORQUAAAA2hfaezYpKUlXVzcoKGjRokUuLi5cB6VJJI+p0tPTHz9+TAiRrC8MGTLE1NQ0JCSkpqbm6NGjXMerbXJycvbu3bt79+7KysqJEydOmTLlyZMn27dvR5eEtpCfn5+VlZWUlJSWlmZjY5OXl0cIsbe39/DwcHd39/Dw8PDwkPdqIGgJ7ZJA79fTq1evjz76KCIionPnzlzHBSANCgoAAADt0bNnz/bs2bN9+/bc3Fx6GKbwuHftXEFBQYaEkpISHo/n7OxcVVVlYmKybdu2fv36YeB6pauurl6zZk18fHxxcTGPxxsyZMj69ev79+/PdVwa7+nTp1lZWVlZWfRmq8XFxQzDdOnSpbCwcO3atbSCgIEAlY6WyXbt2lVVVYUuCaBZUFAAAABov2hH8djY2JMnT3bv3n3WrFk4IdZKT58+zcjIyMzMTEhIKCsrq6ur4/P5Li4utPNC//79+/bti/pCazQaJWHcuHEFBQUJCQk4ElNMQUGB5O1OiouLyf/tcTNo0KBz5861/raR0JT4Qp4zZ844OjpGRER89NFHGDoXNAsKCgAAAPC/N+oTnx+LiYkZNGgQ10FptokTJxobG69bt058tHb16lXaf8HJyUl8tObh4YGRLGQhfZQE9BWXnWQFoWlO9u7du2kfBMVuGwlS3Lt3b8+ePeIrd1AIA82FggIAAAD8r1evXh04cCAuLu7GjRuenp6RkZFhYWE4na6YcePGWVhY7N27V3Ki9GM51BeaJdeNGzCaXSNCofDp06e3b9+mWXf58uUXL17w+fxevXrJnnUoKCiL9LFFATQRCgoAAADQWGZmZmxs7KFDh0xNTT/88MM5c+bY29tzHZSGeeedd2xsbHbv3i3lPZL1hStXrjx//lzeIz0t1pobN9TU1KSmpsbGxmZlZTk7O4eHh7ef++01NDTcu3dPnFfZ2dlVVVUCgcDR0dFTglyFQhQUWk/y7qcjR46Miory9/dvz6Uu0BooKAAAAEDzioqK9u3bFxcXl5+fP2rUKOwBy2XMmDE9evTYuXOn7LOgvkDJ1SVBOnpPkwMHDjQ0NIwfPz4yMnLMmDFKD5hbjSoI169fr66ublRB6N+/v76+vsIfgYKCwiS7JLz11luhoaHz58+3tbXlOi4ApUFBAQAAAKQRCoU//fTT5s2bz54927Nnz4iIiFmzZmGY9zcaOXKkq6trXFycwi28sb4g73lmNdeaLgnSVVZWHjx4cMeOHdevX3dxcZkxY4ZG53B9ff39+/czJdTU1BgZGfXt27d3796urq6enp7e3t66urrK+kQUFBRAB6ZJTEwsLS0dOXIk7qQD2goFBQAAAJDJvXv3tm3blpiYSAgJCQmZP3/+22+/zXVQ6mvo0KHu7u6bN29WVoNaXF9QYpcE6WiHhZSUFKFQqEEdFurq6h48eCBe+xkZGbW1tcbGxm+//bZ41Ts7O7fdwSoKCrKrq6v74YcfaJeErl27Tps2bd68ed26deM6LoC2goICAAAAyKGiouLQoUObN2++ffu2p6dnVFTU1KlT2+LYT9MNHjx44MCBGzZsaKP2JesLzY60p/71hbbrkiAdzeHt27dnZ2e7urpOnz49MjLS1NS0rT9XdpWVlX/99RdduXfu3Ll582ZdXV2nTp3c3NzEK9fFxUVHR0c18aCgIIuHDx/u3r17z549z58/p3WxyZMn8/l8ruMCaFsoKAAAAIAiLl68uHnzZvGhIC4MbsTb23v48OHffvutaj5Os+oLKuuSIJ1kh4WAgIBFixZ5eHioOAaqrKws61+ZmZkPHz4UiUSWlpYeEhwcHDiJjaCgIJVQKDx//nxsbOzJkyetra3DwsLmzp1rZ2fHdVwAKoKCAgAAACiuoKAgPj5+27ZtpaWl7777bnR0dDu/S5+Yp6enr6/v6tWrOfl09awvcNUlQbry8vLDhw9L3i01JCTEyMioTT+0oqLixo0b4nWUk5MjEolMTU3pCAiUq6urmvwroaDQrLy8vJSUFPGwteiSAO0TCgoAAADQWuLLhs+cOePk5DR37tyIiIi2PiRTN1evXt24caOuri49UP/jjz8sLS3d3d319PQIIUZGRkuWLFHiOHly4by+oCZdEqSjHRaSk5N5PN7UqVPnzJnj7u6urMZpHwTxWnj06BHLsm+99ZbkLTysra2V9XGtt2rVqn/++Yc+LiwszMrKGjdunPjVyZMnv/POOxyFxjHaJSE+Pv7o0aOWlpbTp0/HjXWhPUNBAQAAAJQmKytr586dycnJfD4/ODh44cKFbm5uXAelIsnJyWFhYTo6OpJj4zEMwzCMUCgUiURFRUUWFhYcRigmWV+4dOlSaWlpo/pCK+8yKKaeXRKkKysrS01N3bp1682bN2mHhdDQUENDQ3nbkRwHQdwHwdraWnIhq1UFoZGuXbsWFxc3O9BjfX394sWL169fr/qouKURdTEAFUNBAQAAAJSM9iHftGnT3bt3fXx8oqOj20NP4IqKis6dO9fX1zd9icfjjRw58vTp06qPShZtUV/QgkMv2mEhKSlJIBAEBwfPnTu3X79+Ut6v6RWERpYsWbJp06Zm85kQcu3atf79+6s4JK40rYvNnj2bwyEtANQKCgoAAADQJsR74UePHrWyspo1a9a8efMsLS25jqsNjRs37pdffhEKhY2m6+joJCUlhYSEcBKVvFpTX9DELgnS0Q4L4tuaSHZYaLaCIB4HYciQIUOGDNGgCkIj169fb2mIym7duj19+lTF8XCioKAgKSlp+/btubm5GloXA2hrKCgAAABA23r06NGuXbsSEhIqKiomTpwYGRk5ZswY6bPk5uZq4j0jkpKSwsPDRSJRo+kdOnR49uyZhg4qkZ+fL774X0p9QQu6JEiXmZkZFxd34MABhmFsbW2FQuGTJ0+ajqTYu3dvybmEQmGzVw1ohF69ej148KDRRIFAsHz58i+//JKLiJRAljUiWRczNzcPDw+fNWtWjx49VBMhgGZBQQEAAABUoba29vjx4xs3brx8+bKHh8fs2bOnTZtmYGDQ9J2///77iBEjPvvss6+++kpNRrmXUWVlpbm5eaNe4gKBYOrUqfv27eMqqtZ48ODBu+++O2PGjBUrVtApkv0X0tPTX758yefzDQwMKisrjY2Nx48f/9lnn7m4uHAbtrI02wfBwMBAJBLV1NTY29vPmDFjyZIlzaYxIeSLL77Yv3//qVOnnJycVBy5Unz11VdfffVV06sebt++7erqyklIrbRv37558+ZdvHixpeE2CwsL9+/fv2PHjidPnnh6ekZFRQUHB3M1liqARkBBAQAAAFRKfGm6rq5uUFDQokWLGh1/Tpky5YcffmBZdsaMGbt27dKswRfGjx9/6tSphoYGyYlnz54dNWoUVyEp7NKlS+PGjSsvLzczMyssLGzU3YB2SdiyZUtBQYGDg4Opqenjx49fvnwpEAgcHR2VPr6jajRbQTAxMendu7dkHwSWZc+ePRsfH3/s2DEDA4OgoKAFCxb06dNHsqm6urouXbqUlZUZGxufPHlyyJAhXH0phT169MjR0VHyYIFhGDc3txs3bnAYlcJWrlz55ZdfMgzz4Ycf7t69W/IlcZeEY8eOGRoaBgYGRkVFNepsAgDNYwEAAABUrri4eM2aNfb29jo6OmPGjElNTW1oaGBZNj8/X9whmc/nv/POO1VVVVwHK4ekpCQdHR3JfS1LS0v61TTLkSNHdHV16bpgGOb//b//R6cLhcLTp08HBATw+fwuXbp8+umn9A6IVH5+/vHjxz/99FMfHx860IBAIHB1dQ0LC9u0adMff/xRU1PTysAePHhQXl7eykbEKisr//jjj02bNoWFhbm6utJ1Z2Ji4uPjExUVtW/fvlu3bkmZvbCwcM2aNXSQCE9Pz507d1ZXV9OXDh48SPvX6OjoCASCAwcOKCtmVerXr59kLyGBQPDtt99yHZTc6uvrIyMjxV+kQ4cO4hSia5COsNhoDQKALNBDAQAAADhDTwzGxsaePHmye/fus2bNKi4u3rp1q7iXNZ/Pf/vtt0+dOqUmN1x8o0ZXPQgEgk8++eSbb77hNip5xcbGxsTEEELojiKPxxs+fPj+/fvlGiVBKBTm5OSIz/ZnZ2dXVVU16r/g5eWlp6cne2BCodDU1LShoWH79u3Tp09X4IqYV69eZWdnv7EPglxtNnt+e86cOZcvX5YcofOLL77QuKEHNm7cuGTJEnGPG4Zh/v77bzs7O26jksurV68++OCDM2fOiNcFj8fbsmWLo6Oj5CprV/e4BVAiFBQAAACAezk5Odu2bdu3b9/r16+bjkHQrVu3dD25WAAAIABJREFUs2fPasphzIQJE37++WfxMdjNmzc16EBFKBQuWrRo69atTV/S0dGxsrL66KOPZs6caW9vr0DLrawvXLx4MSIi4v79+4QQhmE8PT137Njh6ekp/XPbooLQkoKCgoSEhN27d5eUlNC+GJKv0s72O3fu1KCreAoLC21sbOg4ozo6OgMHDkxPT+c6KDkUFhb6+vreu3dPcqvCMEzXrl3z8/OHDRsWGRn5wQcfyFXVAgBJKCgAAACAukhMTJw5c2bTnROBQNCpU6fTp0/369ePk8DkkpKSMn36dJFIxDBMnz59/vrrL64jklVVVVVwcPBPP/3U9EYVAoFgwoQJhw4dUtbBsFz1hcLCwv/85z8HDhzQ0dERn2fm8/lCoTA0NPS7776TvB2pKisIzRKJROHh4YcOHWo6nCGPxxs5cuTRo0eNjY3bLgDlGj58eHp6Or05Qlxc3OzZs7mOSFa3bt3y9fV9/vx50xVBCDl8+HBgYKDqowLQMigoAAAAgLrw9vbOysqS7CUuxufz9fX1T548OWzYMNUHJpfKysrOnTvX1dXx+fzvvvsuKiqK64hk8vz583HjxmVlZTUaUVLMzMysqKioje4E2VJ9oWfPnkZGRn/99ZdIJGo2MIFAoKurGxERYW9vf/369ZYqCK6uriq7Y0htbS0djrHZVwUCQe/evU+dOmVlZaWaeFpp9+7dc+bMoQWFoqKizp07cx2RTM6dOzdhwoTa2tqW0mbatGmJiYmqDwxAy6CgAAAAAGohOzu7pXu5UTo6OjweLzk5Wf3PK06YMOHEiRM6OjqFhYUaMfrDw4cPx44dm5+f3+y5XIphmLS0tPfff18F8TQ0NNy9ezclJSU+Pv7ly5eyzMLj8dzd3f38/GgFoVu3bm0dZEtSUlLCwsKk7GMLBAIrK6vTp087OzurMjDFvHz50tLSUigU+vr6njp1iutwZLJ///6IiAiWZZutTlK6urpFRUWmpqaqDAxA++i8+S0AAAAAbS8uLk76OWR6jnrq1Knbt29XWVSKCQ4OZlnW19dXI6oJFy5c8PT0lF5NIIQwDLNt2zbVhFRcXLx27dq1a9dWVlbKPldGRsb169c9PDw4rCYQQrZt2yY9k+vr64uKigYMGKAR4xGYmpq+8847LMtOnz6d61hksnLlyvDw8IaGBinVBEJIXV1dSkqKyqIC0Fb/p4fCxx9/nJeXx2E0oEQBAQEBAQGtbwdZoVl4PN7q1asVGCurkSdPnixbtkz6LzGoD/y/Q7NsbGw2bNjQ+nbS0tLS0tJa384b3b17lx7T1tfXN3swwDAMwzD08n4PD48ePXqoICrFNDQ0nDhxwtvbu2vXrlzH8gYlJSUXLlwQiUTiu102HUBBzMjI6N133227YHg83ldffXXy5Mnly5c3NDTU1dXJ24JAIODxeAsWLPjnn3+46od76tSpqqqqlhYjTWNCCF3mI0aMMDc3V22AcsvPz8/IyBg3bpz6Dyd58+bNnJwcQgjNZ3pbu2bfyePxevXqxdWAqcraPgNw7v8UFBiGGThwoK2tLYcBgVJcvnx50KBBqamprW8KWaFZ0tLSlDLIUGpqalBQkFKOUaGt4f8dmpWbm3vlyhWlHFAFBgbSNGt9U3JhWZYWF+rq6uqbsLa2lhyHTw1JHqKrs8rKykePHvF4PIFAoKOjw+fz6TE5ncLj8fh8Pp/Ppw/aOpi0tDQLC4uSkhKFW+Dz+eJr5rn9FaMJLBQKG/5FH4sn1tfXi0Si7t27a8QAjZqSz3l5eaWlpTSBdXV1xZlMc1gypTkMUonbZwDONf5fiomJUf/rEuGNlLsSkRUaRLkjTinlGBXaGv7foVm0LKis1pRVtAKQjmGY2tpa8VNdXV16WN7sm+mYGvRVhmEsLS0dHR179+7ds2fP4uLi9evXI2lBPSl3+wzALXXvtgQAAAAA7ceuXbv8/f0fSXjw4MG9e/cKCgrEV8EwDMOyrLOzc0RERM+ePR0dHbt3705vMEmhlAAAoBooKAAAAACAGjEwMOjTp0+fPn0kJ9bX19+4cYPeTvLp06cPHjx477335syZw1WQAABAUFAAAAAAAPUnEAjo/SC5DgQAAP5/GjC2CgAAAAAAAACoGxQUAAAAAAAAAEBuKCgAAAAAAAAAgNxQUAAAAAAAAAAAuaGgAAAAAAAAAAByQ0EBAAAAAAAAAOSGggIAAAAAAAAAyA0FBQAAAAAAAACQGwoKAAAAAAAAACA3FBQAAAAAAAAAQG4oKAAAAAAAAACA3NSxoFBRUcF1CAAAoBzYpAMAAABoK/kKCunp6e+88w7DMDwez9fXd9SoUcOGDVu4cOGzZ8+UEs369euHDx9ubm4u74wXL15ctmwZwzAMw8yYMeP48eNKiUeK3377LTAwkH7inDlzLl261NafqLbWr19vamrKMAyfz/fz8xs/fry/v/+YMWPs7OwYhsnNzW3TT8eqVx8ikWjw4MG1tbVt/UFY6ZxLTEx0c3Pr16+fjY0NXSy//fab+NWBAwcuWbJEKBSuXbt26NChUjbp0ttRAHID1FxaWtr48eM9PDz8/PwmTpy4YMGCtWvX/uc//+E6LoBmJCQkuLu7Gxsb9+vXb8+ePXLNi1QHaEdYCYSQw4cPs1Ll5+cTQhwdHenT4uLi0aNHm5iYZGRkSJ9RFjU1NXTXU7HZ7ezsCCHV1dWtj6Qlubm54sfV1dWEEDs7u7b7OIUFBAQEBAQopSlZsqKgoEAyKyiRSOTv7//o0SOlhCEdVr2YLOtLFocPH5b3P/HYsWOEkN27d7f+02WBlS6m4v/3xMREQsihQ4fo0++//75Tp05JSUniNwQHB69YsYJl2devX5uZmbWUSG9sR2HIDUqB/+KWKDHH2pTkelG3lktKSkaOHNmzZ8+rV6/SKSKRKDk52dzcPCIiotUBykedFxSHv2KcUNt1sXTp0mnTpsXFxUVHR3fo0IEQsmXLFllmRKrLQlPyE0AWcl/y0LVrV0IIj8ejTy0tLWNjY8vKyr755hvFKhqS9PT0LCwsFJ6dbu/o37bw5MmTkJAQlX2cBrG2tiYSWUExDLNs2TIjIyMVBIBVrw4SExO7deu2YcMGlmVV8HFY6VzZv38/IeTdd9+lTydNmhQfH5+Xlyd+w8GDB1euXEkI0dfXt7S0VLgdhSE32qdG60WtWmZZdtKkSX/99dfVq1e9vb3pRIZhQkNDjxw5UlVVpYwwZaXOC6q9Udt1kZeXl5ubm5SUNG/evE2bNtETBrGxsW+cEakO0A4pYQwFei6I9lzQYvn5+f7+/iUlJVwHojH++uuvwYMHSzmc0BRY9bK4ceNGz549Fy9efOfOnVOnTnEdTmthpUshEokIIRs3bhRPef/9952dnblqR8WQG+qp7daLUlo+evRoenr60qVLaZ8dScOHDw8ICGhN43JR8wXVrqjzunj69Ol3330nfurr62thYSHLBc5IdYB2SAkFhT///JMQ4uPjQwh58OBBQEDA0qVLp0+fPmzYsJs3b4pEot9//z0mJsbBwaGgoGDEiBF2dnZlZWVVVVVff/11WFhYdHT0iBEjGlU9S0pKPvjgA3Nzczc3t4yMDDrx/Pnztra2Fy5ckCWq48ePz54929bWtqysLDw8vHPnzn369MnMzCSEXLly5ZNPPnFwcCguLqaf0qdPn6NHjxJCdu3apaOjwzAMIaSysnLDhg3ip3v37r19+3ZRUdHcuXNlXDJNlwYhJCUlxdDQkGGYtWvXCoVCQsiBAwf09PT27dtHCKmpqVm3bt3MmTO9vLzGjh1769atlhagjDFwor6+/tatWwsXLqRPZc+Kpl+ftoBVr/6rftu2bYsWLYqIiDAzM5PcCxHbunVrWFjYvHnz9PX1mX+R5r44fT9WutqudPqv/eWXX06cOLG4uJgQwuPxJk2aRAgRiURpaWnh4eHDhw+XnOXhw4cTJkwwMzPz9vYWj5IgpR0pa40gN9Q4N1r6Za+oqPj000+XLVu2ePFiPz+/xYsX08ilrB0prTW7PJuul2a3LdI/sTUtE6mZSfNq9OjRzS63KVOmtKsFpVbaedK2xMfHx8rKSnJKXV3d0KFD6WOkuiamOkAbkrz+gch23RohpFevXkKh8MWLF8eOHbOzs+vYsWNOTg7Lso6Ojj169GBZtr7+/2PvvuOauvf/gZ+wRMCCIkOGVLQtIhYVBBUElICDIdoCLqy1Co4KWhdWvWqrV8GJFmfdxQGVViEyEgEFnAjYokj9gSwVAdlDCCG/P869+XIRECTkk5DX8w8f5iSc8+KcT0jyzmdw1dTUTExMGhoa7ty5o6SkRFHUrl27OBzO4sWLKyoq7OzsvLy8mpub+Xw+PdFLREQEn8+nv57aunVrbm4ui8WiKGr8+PH0ca9du6akpEQ/rE30z9L/LywspDvb79y5My8v77fffqMoytLSksfjRUZG0n1TV65cefv27YsXL/br14+iqOTkZD6fP3To0JanpeVNiqKMjIxanYpWW1p6/2zQ2zdv3kxR1JMnT+ib+fn5M2fOpP+/ZMkS+kzy+XxHR0ctLa3S0tL3T2BNTU3H10jEY6r5bXVxV1NTo+/qZKuoqal5/9evqqri49J35dJ38np9UJdG95WUlCxevJj+/6ZNmyiKSktLa/mAw4cPy8rKvn37ls/n79q1i6KoNWvW0Hfhorf83SXl+X7hwgU1NTWKogYMGHDs2DEejye4Kz8/v+WJoi/TqlWr2Gz28ePHlZWVZWVl//rrrw7288GrhrbRybYh4jkUuFxum6/s1dXVn3/++bZt2+iHFRcXf/7554aGhhUVFe1dnQ72xm//fLa6Cm3+bengiN3cM7/Dljl27FiKoiorKzs4gdJzojomylcxNNpOSk5O7tu3b2pqKn0TTb37pxdzKEBv8pEFBZqiouLgwYMXL178zz//0Hft37//0qVLfD6/ubl56NCh8vLy9PYvvviCoqiysjLBwyiKysrKom82NTWdOXOmvLyc/983gvQfCz6fr66urqSkJDh0U1NTB8FavokUHFRwU0tLq0+fPvT/P//8c4qiamtr6ZsHDx6kKGr27Nnv76Tlza6+iWzvbLx9+7Zfv35Lliyhb+7atSsyMpLP59+/f//9j+X0Xa1O4AcRKSgITgWPx3v+/LmpqSl9s5OtooNfn49L3+lLL8q3YgI7d+5MT0+n/19UVKSoqOjl5dXyAa6urjIyMo2NjXw+n67cjxs3jo+LLrHPdz6fX1paunz5cnrmFGdn55afbKn3CgqCt1b0F0rffPPNB/fTwVXjo210rm2IuKDQ3is7XWR8/fq14JH09Bnr16/nt391Onif0N75bHkVPngO3z9i9/fMb79ljhs3rtVJeJ9UnagOdP6vUMc60/7RaDujqanJ1taWPkrLjW0+GE29k6cXBQXoTeTefw50hpGRUWZm5vvbV69eXVtbe+TIkbKysoaGBi6XS2+ne4r279+fvkl3edXT06NvysrKLly4sOV+6MdTFKWhofHs2TPB9lbT/nVMsBNa//796V61FEXJyMhQFEV/1UNRlKur66pVq54/f975nXdGe2djwIABK1eu3Lt377Zt23R0dG7evEmvo/Pw4UMTExO6P1Wbv4vgBIo5GRmZYcOGrVixgr7ZyVbRwa9P4dKL8aXncrnBwcH0+wOBy5cv79q1S1dXl77p4OBw/fp1Fovl5uamqKhIUdTkyZMpXHSJvegURamrqwcHB3t7e7u6ukZGRq5fvz44OLi9B9Pf/FMU5ebm5ufn9/Tp0w/up+OrhrYh3IRC0d4re3JyMtWiDVAUZWNjQ1EUvcRme1eng/cJ7Z3Plj54Dt8/Yvf3TLXfMo2Nje/du5eZmamtrd3mAygpO1FiAo22M7Zv325vbz979uyWG9HUO7NnACkhhDkUWnr48OHIkSMNDQ03b97cwfT+9BNV6G/aPhq9dIW+vr6wdlhSUtLU1NTB2fjhhx8UFBQOHjz46NEjCwsL+u/y27dvc3Jy6BXIBOh5yyTRkiVL6P90slUQ+fVx6bsvLCxs3bp1LeuUISEhXC738OHDgsd8//33v/7663fffbdu3bo1a9b89NNP9CoAuOgSd9Fv376dlpYmuGlqapqQkMBgMC5fvtyZH6cH5Q4ePLir+xH6VRPNUaSqbbT3yk5XbXJzcwVb6Gagqqr6EXujOvea8nHnsOf2TM8qcu/evQ4egxMlemi0HxQZGamsrLxly5bOPJhCU+/0ngF6EyEXFBYsWMDlcqdOnUp1+FwyNTWlKGrnzp38/w6gyMvLi4qK+uD+6ZmrhO7t27cURTGZTOq/dcrGxkaKovh8fmVlpeBhDAajqampMzuke/B2cDbU1dWXLVt27NixQ4cOLVq0iN5oZGRUV1cXEBAgeFhmZuYvv/zSzd+OuE62io5/fVx68dTc3Lxnz5758+e33Pj1119ramoeP368pqaG3sLj8TIyMu7du7dnz54///xzy5Yt9AcnXHSJu+j9+vX74YcfWv6CQ4YM0dLS6uR6LgUFBRRFOTs7d3U/La8ahbYhltp7Zae/e6QnRaLRzUBwNbu0N6r915SW1+XjzmH399xey5w/f76ZmVlQUNDr169b3dXQ0ED395aqEyUm0Gg7xmazCwsLN2zYINhy9+5d+j9o6pLV1AF6VsvvFalOjFvLy8ujKMrAwKDNe1VVVRkMRmxsbEhICP2+8P79+wUFBZ9++ilFUYLBsTk5OcrKyhRFTZ48OTg4eMuWLT4+PvS8CfQXRIIBt4MGDRL8YGRkpIqKSlRUVHvZBg8eTLUYDUsfVHAv3fuay+Xy/zsaVjAA7Ny5c2ZmZvRdM2fOpChqy5Ytz58/P3DgAL3sTXR0NI/HGzZsmLKycn5+Pv1Tr169oihKV1dXMOMD/abT29t7/vz5HZwN+pFFRUV9+vSxs7MT/Oy7d+8MDQ0pilq0aFFISMjmzZsdHR3pU9HqBH6QiMdU058bBw8e3Oa9nWwVHfz6uPSdv/SduV6d0cnRfRcuXLC3t39/O/3RaPv27fTNn376aejQoadOnYqOjr5z584///xDXwJcdIl7vldXV1MUtXDhwurqanpLREQERVGnT59u+QAdHR365vDhw6kWY/6XL18+Y8aMzuyng6uGttHJtiHiORTae2Wvq6szMTHR09MTDJn28/OzsrKiz3N7V6eD9wntnc+W1+WD5/D9I3Z/zx23zMzMTAMDA0NDw/DwcLrJ1dXVxcXF2dvb37t3j74pJSeqY6J8FUOj7QCHw5k8efIv/3X48OHVq1dv3ryZj6YujNOLORSgN+laQeH+/fseHh4URVEUtWLFCvrvQkvBwcGqqqoWFhb37t0LCgrq37+/g4PDqlWr6B/x9vYWzP3+999/T5kypX///rq6uqtWraqsrKS/6qQfuWrVqpqamsDAQPrmmjVrGhoa2Gy2jo5OXFzc+8ESExP9/f3pB8+bN+/atWuC0bw7duyorKykp9qiKMrf37++vp5+E7l3797S0tLi4uLdu3cL3p/9888/lpaWysrKjo6O//zzz8SJE728vC5fvtzQ0LBx48ZBgwZdvXqVz+fHxcXNmDGD3qeRkdGkSZMmTZr0xRdf9OnTh6Koc+fOtXk2ZsyYQU90T3N2dr5w4ULLXyQ3N5deXE1bW9vb27ukpKS2tpbuHN7qBHZMlB8w7ty589133wlOr2ASYIHOt4r3f316Oy595y+9KN+KhYeHa2lpqaurHz16tOX2P/74w8zMjKKovn37BgQE8Pl8NpvdagEqDQ0N+nziokvW853/3zqvurq6g4ODg4PDhAkT/vjjD/qu2trajRs30vn3799fVVXFZrNdXFzs7Oy8vb19fX2Dg4MFS0J0sB/+fz/qt3nV0DY62TZEXFDgt/XKTm+vrq5ev369o6PjmjVr1q9f/9NPPzU0NPD5/I6vTnt7a+98trwu/Hb+tnR8xO7smd9hyxSch4CAACcnpyFDhpiYmIwaNWrTpk0tW4KUnKiOifJVjC/1jbY9gjVlWmIwGNnZ2Xw0dWE0dRQUoDdh8Fus2sBgMK5cuSIoGfRiw4cPp9d3IZihrq7O1NT0r7/+otctEy76IoaGhnZ/V72sVfT6Sy+s6xUaGurp6SmsE3XmzJnS0lJ6wrnm5uZXr17Fx8evXbtWMO9Rj+r1F71XPt9Fc9V6d9sQ4rNYiG0MoGPi+SoGIFxon9CbfOQqD9B9wcHBK1eu7IlPFyDmpO3SBwQE+Pv706PTKYqSkZHR09OztrYWrAEhDaTtokPnoW0AABGtVjdo6dmzZ/RiigAAHySlBYXa2lr6X3o4lijdv3/f29u7rq6Ox+O1XBETRAOXXvSSkpIoijp27JiPj4+6ujpFUampqQEBAb/99ptoAuCiSyLRXDW0DQCQWvh6HACEQsirPIi/2traTZs20dPJ+vr6drywTU9QVlauqqqSkZG5ePGigoKCiI8uzXDpSTl37tzKlStPnTqlp6dnZWXl4eGRmpr622+/GRsb9/ShcdElkWiuGtoGAAAAQPdJXQ8FZWXlnTt37ty5k1QAExOTFy9ekDq6NMOlJ2XAgAGHDh06dOiQ6A+Niy6JRHPV0DYAAAAAuk/qeigAAAAAAAAAQPehoAAAAAAAAAAAXYaCAgAAAAAAAAB0GQoKAAAAAAAAANBlKCgAAAAAAAAAQJehoAAAAAAAAAAAXYaCAgAAAAAAAAB0GQoKAAAAAAAAANBlKCgAAAAAAAAAQJehoAAAAAAAAAAAXYaCAgAAAAAAAAB0GQoKvVZRUVFcXNy7d+9IBwEAAAAAAIBeSI50AOgpOTk59vb2ioqKVlZWkydPnjx5srm5uZwcrjgAAAAAAAAIQeuPlwcOHPj999+JRAEhunv37oQJEw4dOpSYmMjhcI4fP75p0yZlZeXx48czmUwmkzl69GgZmc72T0GrEJnCwsJBgwbJysqSDvIfHh4epCOIFI/HE5+T33l3794dP368sPYmDs/3mpoaeXn5Pn36kI0h6QoKCoS4t7t370rJH4Ta2lplZWXSKUA4pKTRgsQR7t9nALJkt23bJrhRVVXFYDDIhQGh0dfXd3d3t7S0HDFihIuLy+rVq2fPnj1s2LBXr16FhIQEBQUdO3bs0aNHZWVln3zyibq6ege7QqsQmdra2lu3buXk5DQ3N6uqqn7cJ9svv/zym2++UVNT62YYZWXlN2/e8Pn8bu5Hgrx58+bmzZuGhoYSV1Ogn+8jRozo/q7E4fmel5eXnJzM4/G0tbXJJpF0qqqqU6ZMmTJlilD2VlVVJZT9iLn09PTU1FSh/B14+fJlfHy8kZGRUIJJD7yKEVdRUZGenq6trd35b56gq4T79xmALAb+1Eqb5ubm9PT0uLi4uLi427dv19bW6unpTZo0afLkyZMmTTIwMCAdUKoVFxcfOXLk0KFDXC530aJFa9asGTx4MOlQ0qKmpkZPT2/Tpk3r1q0jnUVKVVVVrVixIiQkZOXKlYGBgeihAKLE4/GWLl169uzZU6dOLViwoPs7DA0N9fT0xLsskDhbt249c+ZMXl4e8foyAEgEFBSkGo/HS09P53A4HA4nOTm5vr5+0KBB1tbWTCZzypQpKC6QUlNTc+rUqX379hUVFbm5ua1fv97c3Jx0KKng6+t7/fr17Oxsieuk0As8ePBg7ty5VVVVZ86ccXJyIh0HpAuXy50/f/61a9cuX77s5uYmlH2ioAASyszMzNLS8siRI6SDAIBkQF8mqSYrK2tmZrZhwwY2m11VVZWSkuLn51deXu7r6/vpp58OHTrUx8fn/PnzhYWFpJNKFxUVFT8/v+zs7F9//fXp06djx461traOiIggnav3W7ZsWX5+fkxMDOkg0qW5uTkoKMja2nrIkCGPHz9GNQFErKGhwcPDg8ViRUZGCquaACChXr9+nZaWhr/DANB56KEAbairq0tNTU1OTuZwOLdv325sbDQ0NKRnc5w8eXLHcy6AcPH5/Js3bwYFBUVGRo4ePXrVqlXz5s3D9+c9h14bhcVikQ4iLQoKCubPn3///v3t27evW7cOQ3ZBxGpra93c3FJSUm7cuCHEyU0p9FAAyXTy5Ek/P7/S0lIlJSXSWQBAMqCgAB9QW1t79+5dDoeTlJT04MEDLpcrKC4wmcz+/fuTDigtUlNTDx48ePHiRQMDA19fX29v7759+5IO1QtdvXrV3d09Kyvrs88+I52l9wsPD1+yZImmpubFixdHjx5NOg5InfLy8unTp7948SImJsbU1FS4O0dBASSRm5sbj8dDp0gA6DwUFKALampq7t27R8+5kJaWxmAwRo0aZWVlZW1t7ejoqKqqSjpg75ednX3o0KGTJ0/269dv2bJlvr6+AwYMIB2qV2lqahoyZMicOXMCAwNJZ+nN6uvr/f39Dx065OXldfToUazSB6L35s0bR0fHiooKNpv9+eefC33/KCiAxGloaNDQ0AgMDFy6dCnpLAAgMVBQgI9UWlp69+5delhEamqqjIzMqFGj6G4LVlZW+PK8R9GLQRw+fLixsRGLQQjdzz//fODAgcLCQnT47CEZGRlz5swpKCg4duzY7NmzSccBaZSfn89kMpuamjgcjqGhYU8cAgUFkDjR0dHTpk3Lzc3FtNwA0HkYrQofaeDAgS4uLrt3705JSSkqKrp06ZKVlRWHw3FwcPjkk0/Mzc39/f05HE5DQwPppL2Qpqbmtm3b8vLyduzY8ccffwwbNszDwyMlJYV0rl5iyZIltbW1oaGhpIP0Qnw+PygoyNzcfODAgRkZGagmABFZWVnW1tby8vJJSUk9VE0AkEQsFsvU1BTVBADoEvRQACErKipKTEzkcDhsNvvFixdKSkqjR4+ml6K0sbFRUFAgHbC34XK5ly5dCgwMfPLkiZWV1YYNG1xcXEiHknhz5szJyspKTU0lHaRXKS4u/vbbb2PGVPRCAAAgAElEQVRjYzdt2rRlyxbMLQpEPHnyxNHRUUdHJzo6ukfnGEYPBZA4w4YNmz179o4dO0gHAQBJgoIC9KBXr17RYyKio6Pz8/OVlZXHjx9Pj4mwtLSUl5cnHbD3wGIQwpWYmGhjY/PgwYOxY8eSztJLsNnsb775RkFBISQkxMrKinQckFIPHz6cNm2aiYlJREREv379evRYKCiAZHny5ImJicmdO3eEu9wJAPR6GPIAPUhHR8fd3f348eN5eXnZ2dn79+8fNGjQL7/8MnHixAEDBjg4OAQEBDx69Ki5uZl0UonHYDCYTGZERMSjR49MTEwWLVr0+eefBwUF1dfXk44mkSZOnPjll18GBweTDtIbvHv3zt/ff+rUqdbW1unp6agmACm3bt2yt7cfN25cVFRUT1cTACQOi8VSV1e3sLAgHQQAJAx6KICo8fn8J0+exMXFJSQk3Lp1q6ysTENDw9bWdtKkSXZ2dsbGxqQD9gZYDKL7jh07tmrVqoKCAg0NDdJZJFhmZubcuXOzs7ODg4O9vLxIxwHpFRkZ6eHhMWPGjPPnz4umfxx6KIBksbW1NTAwOH/+POkgACBhUFAAkpqbmx8/fhwfHx8fH3/79u2qqiotLS1bW1s7Ozs7O7vhw4eTDijZsBhEd9TU1Ojp6W3atGndunWks0iq8+fPL1++3NjY+OLFi8OGDSMdB6TX5cuXFyxYMGfOnFOnTsnJyYnmoCgogAQpLy/X1NT87bffPD09SWcBAAmDggKICx6P9+zZM3rOBQ6HQ7+2WVhY0BM6jhkzhsFgkM4okWpqak6dOrVv376ioiI3N7f169ebm5uTDiUZfH19r1+/np2djdkouqqiomLp0qWhoaErV67cu3cvJkwBgk6cOLFs2bLly5cfOnRIlK8jKCiABLl06dKCBQtKSkrU1NRIZwEACYOCAogjHo+Xnp6elJSUnJzMZrMrKio0NTVtbW2trKysra1RXPgIWAziIzx79szY2DgiIsLJyYl0FkkSHx/v5eUlIyNz4cIFW1tb0nFAqh0+fNjPz2/9+vW7d+8W8aFRUAAJMn/+/JcvX8bHx5MOAgCSBwUFEHcoLggRFoPoKnt7e0VFRRaLRTqIZGhqatqxY8eOHTtcXV1PnjzZo2vyAXxQQEDAxo0b9+zZs2bNGtEfHQUFkBQ8Hk9LS8vf33/t2rWkswCA5EFBASTJ+8UFLS0tGxsbFBe6KjU19eDBgxcvXjQwMPD19fX29u7bty/pUOIoPDz866+/zsrK+uyzz0hnEXe5ubnz5s1LS0vbtWuXn58f6Tgg1fh8/tq1a4OCgo4dO7Z48WIiGVBQAEmRlJQ0ceLEzMxMIyMj0lkAQPKgoACSCsWF7sNiEB/U1NQ0ZMiQ2bNn79mzh3QWsRYWFubt7a2rq3vp0qWRI0eSjgNSjcfj+fj4nDt37vTp0wTXFkFBASTFxo0bQ0NDs7OzSQcBAImEggL0Bi2LC7GxsZWVlSgudB4Wg+jYzz//fODAgcLCQiUlJdJZxFFVVdWKFStCQkJWrlwZGBjYp08f0olAqjU2Nnp5eV27du3y5ctubm4Ek6CgAJJi5MiR9vb2Bw8eJB0EACQSCgrQ29DFBQ6Hk5SUlJiYiOJCJ2ExiPYUFRUZGBgcP3584cKFpLOInQcPHsydO7e6uvr06dOYuhKIa2homD17NpvN/vPPP5lMJtkwKCiARMjPzzcwMIiNjXVwcCCdBQAkEgoK0Ju9X1zQ1taeOHEiigvtwWIQbZozZ05WVlZqairpIGKkubn58OHD69ats7W1PX/+/KBBg0gnAmlXU1Pj5uaWmprKYrHGjx9POg4KCiAZgoOD/f39S0tL0b8MAD4OCgogLZqamh4/fkwXF27fvl1VVUUXF5hMppWV1YgRI0gHFCNYDKKVxMREGxub+/fvW1hYkM4iFgoKCubPn3///v3t27evW7dORkaGdCKQduXl5dOnT3/x4kVMTIypqSnpOBSFggJIiOnTp/ft2/fq1aukgwCApEJBAaQRigudhMUgBExNTUePHn327FnSQcgLDw9fsmSJpqbmxYsXR48eTToOAFVUVOTg4FBdXc1ms8VnQRYUFED81dfXDxw48PDhw4sWLSKdBQAkFQoKIO0aGxsfPHgQHx9/69atO3fu1NfX6+npTZo0yc7OzsbGZtiwYaQDkofFICiKOn78uJ+fX35+vqamJuksxNTX1/v7+x86dMjLy+vo0aPKysqkEwFQBQUFTCaT7lelr69POs7/QUEBxF9ERMSMGTMKCwt1dHRIZwEASYWCAsD/aWxsvH//fkJCQkJCwt27d+vr63V1dW1tbW1tbW1sbKR8fWYpXwyipqZGT0/vxx9/XL9+PeksZGRkZMyZM6egoODYsWOzZ88mHQeAoigqNzeXyWQqKChwOBxx+0SEggKIv6VLl6ampj548IB0EACQYCgoALSt5bAIekJHTU1NCwsLa2trJpM5evRo6Rw3Ls2LQfj6+l6/fj07O1vappPg8/mHDh3asGHD+PHjL1y4oKenRzoRAEVRVFZWFpPJ1NDQiImJ0dDQIB2nNRQUQMzx+fzBgwcvXrx469atpLMAgARDQQHgw+jVIpKSkpKTkzkcTnl5uYaGhqWlpdQWF6RzMYjnz59/8cUXERERUrU+YnFx8bfffhsbG7tp06YtW7ZIWzEFxNbTp0+ZTKaurm5MTIx4DsJCQQHEXHp6+ujRox8+fCg9XwwAQE9AQQGga3g83rNnz+jKws2bN8vKyvr162dpaUlP6GhpaSkvL086o4hI4WIQTCazT58+LBaLdBARYbPZ33zzjYKCQkhIiJWVFek4AP+Rmpo6ZcoUY2PjyMjIfv36kY7TNhQUQMzt2LHjyJEjL1++xBLaANAdKCgAfLzm5ubMzEy6uBAfH19aWqqiojJu3DgrKytra2sbGxsFBQXSGUVBehaDCA8P//rrr7OyssRnJvke8u7du23btu3Zs+err746ceKEmpoa6UQA/5GcnOzk5DR+/Pjw8HBx/lODggKIufHjx48YMeLXX38lHQQAJBsKCgBCk5OTQ8+5kJCQUFBQoKysPH78eLq4MHHixD59+pAO2LOkYTEIHo83dOhQd3f3PXv2kM7SgzIzM+fOnZuTk/PLL794eXmRjgPwf27duuXi4mJraxsWFqaoqEg6TkdQUABxVlJSoq2t/fvvv8+cOZN0FgCQbCgoAPSInJwces6FmJiYvLw8JSWl0aNH03MuWFtbi/n74O7o9YtB7NixY//+/YWFhUpKSqSz9Ijz588vX77c2Nj44sWLWDYVxEpUVNRXX33l6up64cIF8R9choICiLNz5875+PiUlJSI7aAhAJAUKCgA9DhBcYHNZr948UJOTs7U1JSec8HW1vaTTz4hHVD4evFiECUlJfr6+kePHv32229JZxGyioqKpUuXhoaGrly5cu/eveL/gQ2kSkREhLu7+9dff3327Fk5OTnScT4MBQUQZx4eHpWVlTExMaSDAIDEQ0EBQKRevXpFz7mQlJT09OnTlsUFGxsbVVVV0gGFqbcuBjFnzpysrKzU1FTSQYQpPj7ey8tLRkbmwoULtra2pOMA/I/Lly97eXktXrw4ODhYUlbVQUEBxBaXy9XU1Ny+fbuvry/pLAAg8STjVRmg19DR0XF3dz9+/PiTJ09evXp18eJFMzOziIiIGTNmqKurm5ub+/n5hYWFlZeXk04qBPLy8gsWLPj777/ZbHb//v1dXV3HjBlz/vx5Ho9HOlq3rFixIi0t7cGDB6SDCEdTU9O2bdscHBwsLCzS0tJQTQBxc/LkyXnz5q1evfrIkSOSUk0AEGeJiYkVFRXTp08nHQQAegP0UAAQC0VFRYmJifTIiNTUVBkZmS+++IKec8He3r53zG7YmxaDGDNmzJdffnn27FmKooqLi0+dOiUvL7927VrSubosNzd33rx5aWlpu3bt8vPzIx0HoLWjR49+//3369at2717N+ksXYMeCiC21qxZExUV9fTpU9JBAKA3QEEBQOwUFxffv3+fHhmRlpZGUZSRkRFdXJg0adLAgQNJB+wWwWIQKioqy5cvl9DFII4fP+7n5xceHh4SEhIWFsblcg0MDHJzc0nn6pqwsDBvb29dXd1Lly6NHDmSdByA1gICAjZu3Lh79+7169eTztIpX3755YsXL+j/8/n8pqYmwVwkDAZjzZo1W7duJZcO4D+MjIxcXV0DAwNJBwGA3gAFBQCxVlVV9eDBAw6HQxcXmpubDQ0N6TkXJk2apK+vTzrgR5LoxSAaGhpCQ0P9/PzKy8sVFBQaGxspiurfv39ZWRnpaJ1VVVW1YsWKkJCQlStXBgYG9vo1TUES/fzzz1u3bj148KAEDfO2sLBISUlp850Vg8HYt2/f6tWrRZ8KoKXs7Oxhw4YlJCRggBsACAUKCgASo7q6+v79+/SEjg8ePOByuYaGhlZWVtbW1lOmTDEwMCAdsMskbjGI58+fHz169NSpU7W1tXw+v7m5WXCXgoJCQ0MDwWzvKy0tnT17dmBg4JgxY1puf/Dgwdy5c6urq0+fPu3k5EQqHkAHtm3b9tNPPx06dOj7778nnaULfvnll1WrVrU5TQyDwSgoKNDV1RV9KoCWDh48uG3btpKSEizlAwBCgYICgESqqam5d+8ePefC7du3GxsbBcUFBweHIUOGkA7YBZKyGERNTY26ujqPx2tvUsl3796J1Vf9M2bMuH79+pAhQ/766y8VFRWKong83t69e7ds2WJnZ3fu3LlBgwaRzgjQhi1btvz73/8+efLkokWLSGfpmpKSkkGDBr3/J0JGRmbixIkJCQkkQgH8DwcHh4EDB166dIl0EADoJTBbMoBEUlFRYTKZ27ZtY7PZZWVliYmJ3t7er1+/9vX1NTQ01NHR8fDwOHHixJMnT0gn/bCuLgZx+fJla2vrN2/eiDiniorK+vXrOyjCVlZWijJPx06dOhUREUFRVEFBAd1jvKCgYPLkyVu3bv3555+jo6NRTQAxxOfzV69evWvXrtOnT0tcNYGiKA0NDVtbW1lZ2VbbGQyGl5cXkUgALdXU1CQmJqJvGgAIEXooAPQqdXV1qamp9ISOSUlJ7969GzRoED2ho5WVlbGxMYPBIJ3xAzpeDKK5uXnYsGEvXrwYOnTorVu3RNx/mM/nL168+Ny5c20WO54/fz5s2DBR5mnPixcvRowYUV9fL9iyevXqc+fOaWpqXrx4cfTo0QSzAbSHz+f7+fkdOXLk7Nmz8+fPJx3nI509e/a7775rOR6Koig5Obk3b95I4gS00MtcvXrV09OzqKhI0id4BgDxgYICQK/V1NT0+PFjurJw+/btqqoqbW3tiRMn0iMjxowZ06XiwrJly6ZPny6ywQjtLQbx+++/u7u7UxQlLy+vqal5+/ZtQ0ND0USi8Xi8WbNm3bhxo6mpqdVdKSkpZmZmogzTpubm5okTJz58+JDL5dJbGAyGrKysh4fHyZMnlZSUyMYDaBOfz//+++9//fXXS5cuzZo1i3Scj1dVVaWhoUHP1UqTk5ObNm3a9evXCaYCoC1atCgrKys5OZl0EADoPVBQAJAKLYsLSUlJFRUVmpqaFhYWdOeF0aNHy8h0NAAqKyvLyMiIoihPT8/Dhw9raGiIJvb7i0HMmjUrPT2d7iAgJyenoaFx+/ZtEfcLqK+vt7OzS0tLE3xip928eXPy5MmiTNKmHTt2bN26tdUXpPLy8mZmZklJSe93xgYgjsfjLV68+OLFi1euXHFzcyMdp7tmzZoVEREhqDkyGIzLly97eHiQTQXQ3Nysq6u7cuXKH3/8kXQWAOg9UFAAkDo8Hi89PZ2e0JHD4ZSXl3/yyScWFhZMJrO94sKJEyeWL1/O4/Hk5OT69u27d+9eb29vkQWuqqo6ceJEUFBQSUlJq5UU5OXlVVVVExISRowYIbI8FEW9fft23LhxeXl5LWsK4eHhM2fOFGWM96WmplpaWr7fe4KiKFlZ2X/961//+te/RJ8KoAM8Hu/bb78NDQ0NCwsTzwlZu+rq1avu7u6C91eKioqlpaXKyspkUwE8ePDA0tLy8ePHX375JeksANB7oKAAINV4PN6zZ8/oykJcXNzbt2/79etnaWlJz7lgaWlJLys1f/78K1eu0B9T6YESDg4OJ0+eHDx4sMiiNjY2Wltbp6ent+oXICcn169fv4SEBBG/QyosLBw7dmxpaSl9WmRlZU+dOvXNN9+IMkMrdXV1pqamubm5bRYUKIqSkZG5ffu2lZWViIMBtIfL5c6ZMycqKuratWtMJpN0HOF49+7dwIEDa2trKYqSl5f39PS8cOEC6VAA1NatW8+cOZOXlyf+sykBgATBKg8AUk1WVnbEiBHe3t6hoaHFxcVpaWk///xzv3799u7dO3HixIEDBzo7OwcGBsbExAg+o/L5fD6fHx8fP3z48KCgoFZd63tOVlZWSkpKq2oCRVFNTU3V1dXW1tYPHjwQTRKanp4em81WVFSkO3TIysoSX+Vh/fr1HVQTFBQUmpubz549K9pQAO1qbGz08PCIjo6OiIjoNdUEiqIUFRW//vprBQUFiqK4XO7cuXNJJwKgKIqKjIx0dnZGNQEAhAs9FACgbTk5OfScCxwO5/Xr120+RkZGZuzYsWfPnqVnWOhR8+fPDw0Nfb+gQJOVle3bty+bzR43blxPJ2kpISHB0dGxqalJQUFh06ZNW7ZsEeXRW4qOjp4+fXqrP+n0dIxNTU36+vqzZs1ydna2sbGhP+cAkNXQ0ODh4XHr1q2oqKjx48eTjiNksbGxU6ZMoShKVVW1pKSE7uoFQNDr1691dXUjIiKwZiQACBcKCgDwAefOnVu0aFF7PRHoN8o//vjjpk2beu5Nc0FBwZAhQ9pcrFFAVla2T58+MTEx1tbWPRSjTVeuXJkzZw5FUT/88MPevXtFeWiB0tLS4cOHl5WV0ZdJXl6ePldjx46dOXMmk8kUh+UnAATq6upmzJjx6NGjmJiYsWPHko4jfE1NTVpaWmVlZcuWLTty5AjpOADUyZMn/fz8SktLsdYPAAgXhjwAwAfcvn27g6UBuFwul8v9+eefR40a9ejRox7K8PLlSzU1tZZb5OTk+vTp07KEwePx6uvrHRwc4uLieihGmzw9Pffv38/n8wkOefD29i4tLaX/r6qq6unpeenSpbKysnv37m3YsAHVBBArdXV1zs7OaWlpN2/e7JXVBIqi5OTk6JEOdLURgDgWi2Vvb49qAgAIHXooAEAb9PX1CwsLSacAaJucnFxcXNzEiRNJB4Euq6urc3FxSUtLY7PZHZe6EhMTJ0+e3N6cICACenp6BQUFpFOAEDQ0NGhoaAQGBi5dupR0FgDobeRIBwAAcVRYWLh69erx48eXl5f7+PjIyMjIysryeDzBwAcGg6GioqKmpjZw4MABAwb0799fVVVVS0vL1NSU4IRPNTU1FRUVDAZDV1eXVAYiuFyuVA3S9vDwaG9eDxBndDXh77//7syyLK9fv25qagoNDRVNNmjl7t27Bw4cIJ0ChCM+Pr66unratGmkgwBAL4SCAgC0bdy4ce7u7pWVlYmJiZ988omWlpaurq62traOjo62trampmYH4yAAAFqhRzpkZGTcvHlz5MiRnfwpd3f3Hk0F7UEP1t6ExWKZmpoaGBiQDgIAvRAKCgDQEVVVVaygDgDdVFtb6+Li0tVqAgAIRVRUlKenJ+kUANA7oaAAAAAAPai2ttbZ2fnp06eoJgCI3tOnT7Ozs7FaJAD0EBQUAAAAoKe0rCaYmJiQjgMgdSIjI9XV1S0tLUkHAYDeCQUFAAAA6BG1tbVOTk6ZmZmoJgCQwmKxpk+fjmmPAKCHyJAOAAAAAL1QbW3t9OnTnz17FhcXh2oCABHl5eV37tzBeAcA6DnooQAAAABCVlVVNWXKlBcvXty8eXPEiBGk4wBIqejoaIqipkyZQjoIAPRa6KEAAAAAwlRZWUlXE+Li4lBNACCIxWJZW1urqamRDgIAvRYKCgAAACA0dDUhNzc3Li7O2NiYdBwA6cXj8aKjozHeAQB6FIY8AAAAgHDQ1YS8vLy4uLjhw4eTjgMg1e7evfv27VtnZ2fSQQCgN0NBAQAAAISgsrLS0dHx1atXiYmJw4YNIx0HQNqxWCxDQ0MjIyPSQQCgN8OQBwAAAOiuiooKupoQHx+PagKAOIiMjHRxcSGdAgB6ORQUAAAAoFvoasLr169RTQAQE/n5+RkZGZhAAQB6GoY8AAAAwMejqwlFRUXx8fFDhw4lHQcAKIqiIiMjVVRUbGxsSAcBgF4OPRQAAADgI9HVhDdv3qCaACBWWCyWo6Njnz59SAcBgF4OBQUAAAD4GOXl5Q4ODuJWTSguLg4LC/v3v/9NOkgXSGJmEGf19fUJCQkY7wAAIoAhDwAAwvfq1auYmJjo6OiCgoI7d+506WfDwsLOnz//8uVLDQ0NRUVFfX19fX390tLSPXv29FBagI9QXl7u6OhYXFwcHx9vaGhINszhw4dfvnz54MGDjIyMUaNGsdlsIyOjH3/8kWwqgb179+7cubOiokJWVtbe3l5BQYHP57979+758+f5+fmxsbHXrl0LDg4Wq8wg0TgcTn19/dSpU0kHAYDeDwUFAJBUhYWFenp64rlnHR0dJpO5aNGiLq3XVVpa6uHhUVBQEBISYmFhQVEUn8+/ePGin5+fm5tbd/J8BHE+vUAc3TehtLQ0ISFhyJAhZMMcOnRo06ZNFRUVNTU1ixYtWr16NZvN7swPtmqKPdcy165dO2/ePB0dHUNDw5iYGMF2Pp/v6uo6dOjQffv2BQcHd2ZXIssMEo3FYpmbm+vo6JAOAgC9H4Y8AIBEys3NnTt3rjjvWV9fv0uP5/P5bm5ujx8/vn//Pl1NoCiKwWDMmzfv6tWrtbW13Y/UeeJ/eoGgkpISOzu70tLS+Ph44tUEiqKOHj2qq6srKyurqqp69epVa2vrzvxUq6bY0y1z0KBBFEXJysq23MhgMDZu3KiiotLJge4izgySKyoqCuMdAEA00EMBACTPy5cvnZ2deTyeBO35g8LDw5OTkwMDAwcMGNDqLltb27dv34osSa88vSAsJSUl9vb2VVVVYlJNoCiqoKCgq/W7Vk2RVMt8/PjxhAkTOvlgMckM4i89PT0/Px8FBQAQDfRQAICPVFtbu2PHDi8vLz8/Pzs7u6CgIHp7VVXVhg0bNm7cuGbNmilTpqxZs6aiooKiqOvXr/v4+Ojr61dUVCxcuHDgwIEjR4589OhRx3t7/vy5u7u7v7//ggULbGxs/v77b4qizp49++TJk6KiomXLltEPe/fuXWBg4OLFi8eOHevg4JCRkfHBI3Znzx8tPj5eX1//9u3b798VHh5OUZS9vX2bPzhr1iycXiCOribU1dUlJiaKQzWBxWItW7astraWblT0/1s9pjNNUShtvoNn9/u4XG5GRsbKlSvbvFdkmaFXioyM1NbWNjMzIx0EAKQDHwDgPRRFXblypYMHcLlcOzs7Ly+v5uZmPp9/5swZiqIiIiKqq6s///zzbdu20Q8rLi7+/PPPDQ0NKyoqCgsLVVRUKIrauXNnXl7eb7/9RlGUpaVlB3vj8/mfffbZ0KFD6ceoqamZmJgIEhoZGQnyLFmy5NmzZ/T/HR0dtbS0qqqqOjhiN/fc+dPYcld8Pv/atWtKSkr0r9bK2LFjKYqqrKzsYIc4vYKddNw+oSeUlZWZmZkZGBi8ePFCBIe7cuVKJ9+lvP9Ea7mlk02x+22+g2e34BCtqKmpkc3cgc6ffxA348aN++6770inAABpgZcKAGjDBz+w7d+/n6KorKws+mZTU9OZM2fKy8s3bdpEUdTr168Fjzx//jxFUevXr+fz+V988UXLd6haWlp9+vTpYG/0XZcuXeLz+c3NzUOHDpWXlxckFLyTvn///vvv1CMjIzs4Yvf33MnT2OpzDv3btfngcePGtTp178PpFewEBQURKy8vNzc3Hzx4cE5OjmiOKKyCQmeaIl8YbZ7f/rP7/UPweLznz5+bmpoSz9weFBQkVHFxsaysbHh4OOkgACAtMIcCAHyMhIQEiqIEs4vLysouXLiQoqjk5GSKovr16yd4pI2NDUVR9NKJDAaj5U769+//5s2bDvZGUdTq1atra2uPHDlSVlbW0NDA5XLfD/Pw4UMTExO6V3Ar7R2x+3v+aK1mZRMwNja+d+9eZmamtrZ2ez+L0wtEVFRU0CtEisOaDl3VmabYyse1ear9Z/f7ZGRkhg0btmLFCuKZoZe5ceOGrKwsk8kkHQQApAXmUACAj0G/H33+/Hmr7TIyMhRF5ebmCrZoaWlRFKWqqvoRe6Mo6uHDhyNHjjQ0NNy8eTPdcfd9b9++zcnJqaura7mxubm541+h5/b8cWxtbSmKunfvXgePwekF0ausrJwyZUpRUZH4zMLYJZ1piq2IrGUuWbKkze3inBnEHIvFsrOza1l3BgDoUSgoAMDHMDU1pShq586d/P8ODM7Ly4uKiqK/MGexWIJHFhQUUBTV8bcl7e2NoqgFCxZwudypU6dS//vmmMFgNDU10f83MjKqq6sLCAgQ3JuZmfnLL790/Cv03J471t6s7PPnzzczMwsKCnr9+nWruxoaGuihDTi9IGKVlZWOjo6vXr2Kj483NDQkHedjdKYpUkJqmcJac0GUmaE34XK5bDYb6zsAgEgRHnIBAGKJ+tAY9ZycHGVlZYqiJk+eHBwcvGXLFh8fn+bm5rq6OhMTEz09PcE4fz8/PysrKy6Xy+fzP/3005Z/dnR1dSmK4nK57e2Nz+erqqoyGIzY2NiQkBBNTU2Kou7fv19QUDBs2DBlZeX8/Hw+n//u3Tv6o86iRYtCQkI2b97s6OhIz+3X3hG7v+cPor8q/Oyzz1pujIyMVFFRiYqKavNHMquZ0n4AACAASURBVDMzDQwMDA0Nw8PD6cHYdXV1cXFx9vb29+7do2/i9PIxh4KoVFZWWlpaamtrZ2Zmiv7onRzDX1ZWRlGUoaGhYAv91Pv000/pm51pinw+v/ttvuNnd01NDUVRgwcPbvNeUpk7gDkUJNHNmzcpinr+/DnpIAAgRfBSAQBt6MwHtr///nvKlCn9+/fX1dVdtWqVYHmC6urq9evXOzo6rlmzZv369T/99FNDQwOfzw8ODqbrmDt27KisrDx48CB909/fv76+vr29BQcHq6qqWlhY3Lt3LygoqH///jNmzHj79u3GjRsHDRp09epV+mG5ubmurq4DBgzQ1tb29vYuKSn54BG7s+cPio+P9/b2pihKXl4+MDAwPT2d3s5ms3V0dOLi4tr7werq6oCAACcnpyFDhpiYmIwaNWrTpk1v375t+QCcXhQURKCmpsbGxkZLS+vp06dEAnTmA+3ff/+9dOlSiqJkZGS2b9/++PHjnJwcX19fuikePHiwvLy8k02x+22+g2f3nTt3vvvuO8GDU1NTW95LMHM3zz+Imx9++GH48OGkUwCAdGHw21rHCACkHIPBuHLlioeHB+kgAG1A++xptbW1Tk5OmZmZ8fHxxsbGRDKEhoZ6enriXQopOP+SyMjIyNXVNTAwkHQQAJAimEMBAKDLGO3LysoinQ6gW+rq6pydnTMzM+Pi4khVEwCgq7Kzs7OysjCBAgCIGJaNBADoMnxrB70VXU148uRJXFzciBEjSMcBgM6KiIhQVVWdMGEC6SAAIF3QQwEAAAAoiqLq6upcXFwyMjJu3rxpYmJCOg4AdAGLxZo6daq8vDzpIAAgXVBQAAAAAKq+vt7V1TUtLS06OnrkyJGk4wBAF9TU1CQmJmK8AwCIHoY8AAAASLuGhgZ3d/dHjx5xOJwxY8aQjgMAXRMTE9PU1DRt2jTSQQBA6qCHAgAAgFRrbGz8+uuvk5OT2Wy2mZkZ6TgA0GUsFsvS0nLgwIGkgwCA1EFBAQAAQHo1NjZ+9dVXiYmJsbGx5ubmpOMAQJc1NzdHRUVhvAMAEIGCAgAAgJRqbGx0d3enqwljx44lHQcAPkZKSkpRUZGzszPpIAAgjTCHAgAAgDTicrkeHh4JCQmxsbEWFhak4wDAR2KxWPr6+phLFQCIQEEBAABA6tDVBA6Hc+PGDUtLS9JxAODjRUZGOjs7MxgM0kEAQBphyAMAAIB04fF4CxYsYLPZLBbLxsaGdBwA+HivX79OS0vDBAoAQAp6KAAAAEgRHo/n5eV1/fp1Fotla2tLOg4AdEtkZKSiouKkSZNIBwEAKYUeCgAAANKC7ptw7dq1yMhIOzs70nEAoLtYLJa9vb2SkhLpIAAgpVBQAAAAkAo8Hm/hwoV//vlnREQEvs8E6AUaGhri4uIw3gEACMKQBwAAgN6Px+N988034eHhERERkydPJh0HAIQgISGhurp62rRppIMAgPRCQQEAAKCXa25uXrx48dWrV69fv25vb086DgAIB4vFMjU1NTAwIB0EAKQXhjwAAAD0Znw+f8WKFRcvXgwLC3NwcCAdBwCE5saNGxjvAABkoYcCALRBTk7O09PT09OTdBCAtsnJ4fWrU/h8/sqVK0+dOvX77787OzuTjtMF9CVmMBikg0gvPMvE3NOnT7Ozs1FQAACy8FIBAG2Ii4srKioinQK65sqVK+Hh4WZmZt7e3mpqagSTVFZWXr58OT09/e3bt/369TM1NR09evSXX36pqqoqlP3LyspOnz5dKLvq9fz9/Y8dOxYSEuLq6ko6S9dMnz796tWrPB6PdJCPdPfu3QMHDoSGhpIO8vG0tbVJR4COREZGqqurW1pakg4CAFKNwefzSWcAAADhuHPnzsKFC0tKSgICAry9vUnHoXJycjgcTkREBJvNbmhoMDY2dnFxYTKZtra28vLypNP1fj/++GNgYOD58+fnzp1LOovUCQ0N9fT0xLss6Dm2trYGBgbnz58nHQQApBrmUAAA6D0mTJiQnp7u4+OzbNkyZ2fnV69ekc1jaGjo7e0dERFRVlbGZrNdXFyuXbvm4OAwYMAAFxeXEydOFBYWkk3Yi23ZsiUgIODs2bOoJgD0PuXl5Xfu3MF4BwAgDgUFAIBeRUlJaffu3QkJCVlZWaNGjfr9999JJ6IoilJSUmIymbt3787MzMzOzt63b1/fvn3XrFmjr68/dOhQPz8/DofT2NhIOmbvsX379p07dx49enT+/PmkswCA8EVHR1MUNWXKFNJBAEDaoaAAANALTZw48dGjRzNnzvTw8PDw8CgtLSWd6P/Q3RZCQ0OLi4vZbLa7u3tycjLdbcHBwSEoKCg/P590Rsm2b9++7du3BwcHi8OwFwDoCSwWy9ramux0OQAAFOZQAADo3WJjYxcvXtzQ0HD06NFZs2aRjtMuerYFDocTHR1dXV1taGjIZDKdnZ0dHR379OlDOp0kOXjw4A8//HD48OEVK1aQziLVMIcC9Bwej6etrb1hw4a1a9eSzgIA0g49FAAAejNHR8e///7bzc3tq6++8vDwePv2LelEbRN0WygrK0tMTHR3d3/06JGrqyvdbSEgIODZs2ekM0qAw4cPr169evfu3agmAPRid+/eLS0tlayFYAGgt0IPBQAAqRAdHb148WIej3fs2LEZM2aQjtMpubm5sbGxHA4nNja2srKS7rbAZDKnTZumoqJCOp3YOXXq1JIlS3bu3Llx40bSWQA9FKAHbdy4MTQ0NDs7m3QQAAD0UAAAkA5Tp07NyMhwdXV1c3Pz8PAoKysjnejDPv30U7rbQmlpqaDbgqenp6amJt1t4dGjR6QziouzZ896e3v/9NNPqCYA9HosFsvFxYV0CgAAikIPBQAAaXPjxo0lS5bw+fzjx49L4lvSN2/exMTEREZGstnsiooKQbeFqVOn9uvXj3Q6MkJDQ+fOnbt58+Zt27aRzgL/gR4K0EPy8/MNDAxiY2MdHBxIZwEAQA8FAAApM3369IyMDBcXF1dX1wULFlRXV5NO1DVaWloLFiyguy2kpKR4eXlJebeF33//fd68eX5+fqgmAEiDyMhIFRUVGxsb0kEAACgKPRQAAKRWWFjYihUrlJSUTp06ZW9vTzpOt5SUlCQkJERERLBYrLKysk8//dTR0ZHJZDo6OqqqqpJO14PCw8M9PT2///77AwcOkM4C/wM9FKCHODk5KSoqXr16lXQQAACKQg8FAACp5e7unpGRYW5u7uDg4OPjU1NTQzrRx9PQ0HB3dz9//nxxcXFKSsrSpUufPHkye/bsgQMHWltb090Wet9Hu6ioqLlz5y5btmz//v2kswCAKNTX1yckJDg5OZEOAgDwH+ihAAAg7cLCwpYtW9avX7/Tp09PmjSJdByhKS0tjY+P53A4ERERr1+/1tLScnR0dHFxcXBwUFNTI52uu2JiYmbMmDF//vyTJ08yGAzScaA19FCAnhARETFjxozCwkIdHR3SWQAAKAo9FAAAwN3d/cmTJ6NGjbK3t/fx8amtrSWdSDgGDhzo7u5+/PjxwsLClJSU1atXv379et68eQMHDjQ3N9+2bZvkdltgs9lubm5z5sw5ceIEqgkA0oPFYpmZmaGaAADiAz0UAADgP8LCwpYuXaqmpnb69GlbW1vScXpEWVnZzZs3ORwOi8V6+fKlhoaGnZ2ds7Ozs7PzgAEDSKfrlKSkpKlTpzo5OV28eFFWVpZ0HGgbeihATzAwMPj2228xAysAiA/0UAAAgP+guyqYmJhMnjzZx8enrq6OdCLhGzBggKDbQkZGxpo1a8rLy5csWaKpqWlubu7v75+UlNTc3Ew6Zrvu3Lkzbdq0adOmhYSEoJoAIFXS09Pz8/OdnZ1JBwEA+D/ooQAAAK2FhYX5+PgMGDDgzJkzEydOJB2nx9XW1sbFxUVGRkZFRRUUFAwcOHDSpElMJtPZ2Vmsuhbfu3fP0dHRwcHhypUrcnJypONAR9BDAYRux44dwcHBr169wkAnABAf6KEAAACtubu7p6enDxkyZPLkyf7+/g0NDaQT9SxlZWUXF5fjx4/n5+dnZ2dv3ry5vLzc19dXX1+f7rbA4XC4XC7ZkGlpadOnT7e2tr548SKqCQBSiMViOTk5oZoAAGIFPRQAAKBtfD7/5MmTa9euHTx48NmzZ83NzUknEqm6uro7d+5ERERcu3YtLy9vwIAB9vb2TCbTyclJV1dXxGHS0tLs7e3Hjx//xx9/KCgoiPjo8BHQQwGEq6SkZNCgQWFhYTNnziSdBQDg/6CHAgAAtI3BYHh7e//1119aWlrjx4/39/dvbGwkHUp0lJSUmExmUFBQbm5udnb2rl276uvr/fz89PT0RowYQXdbEM0JycjIcHR0HDt27NWrV1FNAJBON27ckJWVZTKZpIMAAPwP9FAAAIAPoLsqrFmzZsiQIWfPnh0zZgzpRMTU19cnJydzOJzr169nZmYqKytPmjTJxcVl2rRp+vr6PXHE58+f29raGhoaRkdHq6io9MQhoCeghwIIl4eHR2VlZUxMDOkgAAD/Az0UAADgAwRdFdTV1ceNGydtXRVa6tu3L5PJ3L1799OnT7Ozs/fv39+3b196VMjQoUP9/Pw4HI4Qp5zIy8tzcHAwMDCIiopCNQFAanG5XDab7eTkRDoIAEBr6KEAAACdRXdV+OGHH4YOHXru3LlRo0aRTiQWBN0WOBzOo0ePlJSUJkyY4Ozs7ObmZmBg8NG7LSgosLW1VVVVjYuL69+/vxADgwighwIIUVxcnL29/fPnz4cNG0Y6CwDA/0APBQAA6Cy6q8Ljx4/V1NQsLCy2bdvG4/FIhyJP0G0hJSUlJyfnwIED/fv3/9e//vXpp58OHTrUx8cnIiLi3bt3XdrnmzdvHBwc+vXrx+FwUE0AkHIsFmv48OGoJgCAGEIPBQAA6LLm5ubDhw9v2LBh1KhRZ8+eNTIyIp1I7DQ1Nd27dy8yMpLD4aSmpioqKlpZWTGZTFdX1+HDh3f8s8XFxZMmTeLxeAkJCdra2qIJDMKFHgogREZGRi4uLnv27CEdBACgNfRQAACALpORkfHz80tNTW1ubh49enRAQAC6KrQiJydnbW1Nd1t4/fr1sWPH+vfvv3v3bmNjY7rbQlhYWHV19fs/WF5ePnXq1MbGxri4OFQTACAnJycrKwsTKACAeEJBAQAAPpKxsfGdO3e2bdu2devWiRMnZmVlkU4kprS0tBYsWBAaGlpaWpqSkuLl5fXo0SNPT08tLS0HB4eAgIBHjx7Rj6ysrHR0dHz79i2bzdbR0SEbGwDEwfXr11VVVa2srEgHAQBoA4Y8AABAd2VkZCxcuPDp06dbt25dt26djAyq1R9WXFx869atiIiIyMjI8vLyIUOG2NraJicn19bWJiYmGhoakg4I3YIhDyAsDg4O6urqly9fJh0EAKANKCgAAIAQNDU17du371//+tfYsWPPnDnz2WefkU4kMXg8Xnp6elRU1N69eysrK2VlZceNG+fi4sJkMseMGcNgMEgHhI+BggIIRU1NzcCBA0+ePOnl5UU6CwBAG/AlEgAACIGcnNyGDRsePnxYV1c3atSogICA5uZm0qEkg6ysrLGxcXx8vIKCQmJi4qVLl0aMGBEUFGRubj5o0KAFCxaEhYVVVFSQjgkABMTExHC53ClTppAOAgDQNvRQAAAAYeJyufv379+yZcu4ceNOnz6Ndc4+qLGxcdasWcnJyTdv3hwzZgy9sbm5OS0tjcPhcDichIQEPp8/atQoZ2dnFxcXdFuQCOihAEKxaNGirKys5ORk0kEAANqGHgoAACBM8vLyGzZsSEpKKi0tNTMzO3HiBD5TdYDL5Xp4eCQlJcXGxgqqCRRFycjImJmZbdiwgc1mFxUVXbp0yczM7OTJk+bm5tra2h4eHufPny8vLyeYHAB6WnNzc1RUFNZ3AABxhoICAAAIn4WFRWpq6rJly5YvXz516tSCggLSicQRj8f75ptvOBxORETE2LFj23uYurq6u7v78ePHX758mZGR8cMPP5SXly9evFhDQ8Pc3Nzf3z8pKQkDTAB6n5SUlKKiImdnZ9JBAADahYICAAD0CEVFxd27dyclJeXn548cOfLEiROkE4mX5ubmhQsX/vnnn5GRkRMnTuzkT40YMYLutlBWVvbHH3+YmZmFhIRMnDiR7rZw4sSJoqKiHo0NACLDYrH09fVHjhxJOggAQLswhwIAAPSs+vr67du3792718HB4ddff9XV1SWdiDw+n79s2bIzZ8788ccf06dP7+becnJy6OUnb9++3dTUNHr0aCaTyWQy7ezs5OTkhBIYOiMjI0NQ0Ll169aOHTvYbLbgXlNTUw0NDULRQCKZm5tbWFgcOXKEdBAAgHahoAAAAKJw586db7/9tri4OCAgwNvbm3Qc0SkoKKisrDQxMRFs4fP5K1euPHHixNWrV11cXIR4rNra2rt370ZERPz555/5+fnq6uqTJ09mMpnOzs46OjpCPBC0SU1NrbKysr17vb29jx8/Lso8INFev36tq6sbERGBORQAQJxhyAMAAIjChAkT0tPTfXx8li1b5uTk9OrVK9KJRGTevHlmZmZRUVGCLRs3bjx27NiFCxeEW02gKEpZWZnJZAYFBeXl5WVnZ//73/+ur6/39fXV1dUdMWKEv78/h8PhcrnCPSgITJkypb0uIQwGw9HRUcR5QKKxWCxFRcVJkyaRDgIA0BH0UAAAAJFKSkr69ttv3759u3v37lZdFZqamuiVEVesWEEqnnClpKSMHTuWwWDIysqGhobOnDlz8+bNu3fvPn/+/Ny5c0WToa6u7s6dOxwO59q1a8+ePVNRUbGzs3NxcZk+fbqenp5oMkiJa9euubm5tXmXkpJSaWlp3759RRwJJNfMmTObmpoiIiJIBwEA6AgKCgAAIGp1dXUbN248fPjw119/HRwcLBhYvnPnzs2bN8vKyiYlJY0bN45sSKFwd3e/du0al8tlMBgMBuO77747derUqVOnFi5cSCRPTk4Oh8PhcDhRUVE1NTWGhoZ0BcfGxkZBQYFIpN6ksbFRQ0Ojqqqq1XZ5efl58+adOXOGSCqQRA0NDRoaGoGBgUuXLiWdBQCgIygoAAAAGWw2+7vvvmtoaDh69OisWbP++usvMzOzpqYmWVlZDQ2NjIwMdXV10hm75cWLF8OGDWu5oKOMjMz3338fFBREMBWtvr4+OTmZLi48evRIWVl5/Pjxzs7OM2fOHDx4MOl0Emzx4sXnz59/f1xJbGysg4MDkUggiWJiYqZOnZqbm2tgYEA6CwBAR1BQAAAAYqqqqtatW3fy5MmvvvoqMzMzKyurqamJoih5eXkmk8lisRgMBumMH+/7778/ceJEy8+W9K9z8OBBX19fcrlaE3RbiI6Orq6uNjQ0pOdxdHR07NOnT2f2wOVyg4KCFixYoKmp2dNpxVxcXJy9vX2rjf379y8uLsaKG9B5vr6+t2/fTk9PJx0EAOADUFAAAADCWCzWnDlzamtrW32Zv2/fvlWrVhEM1h1lZWW6urrv3r1r8969e/euWbNGxJE+6N27d0lJSYJuC0pKShMmTGAymTNmzDAyMurgB2/cuOHk5DRgwIBz5845OzuLLLAYam5u1tbWLikpEWxRUFBYunSpOHRLAQkybNgwT0/PnTt3kg4CAPABWOUBAAAI09XVraura1lNoCiqubl57dq1d+/eJZWqm3755Rcej9fevWvXrr169aoo83SGoqIik8ncvXt3SkrKixcvDhw40L9//127dg0fPnzo0KE+Pj5hYWE1NTXv/2B0dLS8vHx5ebmLi8vSpUvr6upEH15MyMjIzJ8/X15eXrClsbFxzpw5BCOBxHn69Gl2djZWiwQAiYAeCgAAQFJjY6Opqen/+3//jx7s0JKsrKympmZGRsaAAQOIZPto9fX1urq65eXl798lJyfH4/Hc3NwOHz6sq6sr+mxd1dTUdO/evcjISA6Hk5qaqqioaGVlxWQyXVxcjI2N6ccYGBjk5+fT/5eTkxs0aNDly5cnTJhALjVJDx8+tLCwENzU0dEpLCyU6ME7IGKBgYEBAQHF/5+9+w6L4lzcPj7DLmIQBGwogmJL0GCPvffeFaKi0WPE3kANYI1RsCBC7MYWo1ExmlhWY0ONsddEI0c9KqLErohKW2DfP/b3cjgWQtndZ8v384fX7uzwzO0sly43zzPz+LFCoRCdBQD+ATMUAAAiff311zdv3ny3TZAkKT09/cmTJ4MHDza57nvdunXvXupfqVTKstymTZvz58/v2LHDJNoESZKUSmXjxo210xZiY2O//fZbBweHkJCQTz/9tFKlSmPGjFm7dm1mmyBJUlpa2oMHD5o0aTJz5sxs5miYsTp16pQrV077uECBAoMHD6ZNQK6oVKpOnTrRJgAwCcxQAAAIk56ebmdnp71s4Yd++JRl2diuYpi99PT0ChUqxMbGZv4Pq1Qq09LSWrRosWDBgtq1a4uNpxPp6emXL1/evXv3nj17Ll68aGVl9e7bp1AoatasuXnz5ooVKwoJKdD06dPnzp2r/ca+cuWKp6en6EQwGS9evChRosTGjRu9vb1FZwGAf0ahAAAQ6caNG7/++uvhw4ejoqJev35tY2OjVqvfup6CUqk8derUZ599Jipkrmzbts3b21v736u1tbVarW7evPmCBQtMJX9utWjR4rfffnvrLdOytrZWKpXh4eG+vr6GDyZQdHS0dj3IJ5988u9//1t0HJiSzZs3Dxw48MmTJ46OjqKzAMA/o1AAABiF9PT08+fPR0VFHThw4NSpUykpKTY2NikpKZIkWVlZlS5d+sqVKw4ODqJj/rNatWpdvnxZoVCkpaW1bdt29uzZderUER1KX1JSUhwdHT90MwstWZa7deu2evXqokWLGiyYcFWrVr169WpISEhAQIDoLDAlPj4+cXFxR44cER0EAHKEQgEAoBfJycl79+7N2yp6tVp9/fr1q1evXr58OSYmRvvb7wYNGkyYMEHXMXUsOjp6xowZkiRVrVr1888/r1SpkgEOWrJkySZNmuRzkLy9X3/88UcO72zn4ODg5+dXuXLlPKUzPbt27dq0adPixYtLlCghOouZ0Mn3uZFLT08vWbLkV199NXHiRNFZACBHKBQAAHqxY8eOXr16iU5hEZRKpXa5fn7wfsHI6eT73Mj9/vvvTZo0iY6O9vDwEJ0FAHJEKToAAMA8aW/cQG2tb5GRkTq5eFve3q/Fixdv3769WLFijo6ODg4ODg4O2geZTx0cHJycnBwdHbnTAfJDV9/nRk6lUpUvX542AYAJoVAAAAB5NGbMmDFjxohOAZgJlUrVpUsX0SkAIBesRAcAAAAALF1sbOyVK1c6deokOggA5AKFAgAAACDYnj177OzsmjZtKjoIAOQChQIAAAAgmEqlatOmjY2NjeggAJALFAoAAACASElJSUePHmW9AwCTQ6EAAAAAiHTo0KGkpKQOHTqIDgIAuUOhAAAAAIikUqlq167t4uIiOggA5A6FAgAAACDSvn37WO8AwBRRKAAAAADCXL58OTY2tnPnzqKDAECuUSgAAAAAwuzZs6dkyZK1a9cWHQQAco1CAQAAABBGpVJ16tRJlmXRQQAg1ygUAAAAADGePHly7tw5LqAAwERRKAAAAABi7Nu3T6FQtG7dWnQQAMgLCgUAAABADJVK1bx5c3t7e9FBACAvKBQAACI9fvx427ZtwcHBooMAgKGp1eoDBw6w3gGA6aJQAAAI8+9//3vWrFleXl4//PBD5sb69etPnjxZYCpJko4ePerl5SXLsizLw4cPP3ny5Ht3M4aoMB7Xrl3r3r17sWLFihcv3rdv3wcPHuT8a7dt29alS5datWq1a9euW7duo0ePnjdv3qRJk/SXFsbg999/j4+P79ixo+ggAJBHFAoAAGE8PDwWLlz41sZy5coVLFhQSJ5MzZs3//777yVJKlu27IoVKxo2bPje3YwhqqW5f/++cY4cHR09derUQYMGHTp0qEOHDlu2bBkwYEBOvvDp06ctW7YMCgqaNm3axYsX9+/f/8svvzRo0GDBggUvXrzIT6Q8MNrTa65UKlXlypUrVqwoOggA5JFSdAAAgEWzsbF5a8vmzZuFJHnLRx99lPnnhxhJVMsRExMzcODA3377zQhHPnjw4KZNm7TfMGvXrt29e/eZM2f+8as0Gk337t2jo6Nv3rxZpEgR7UZZlvv37+/q6rpixYo858kDYz695mrPnj1dunQRnQIA8o5CAQAAmIC4uLjOnTunp6cb58hjx47N+jQtLW3IkCH/+FU7duw4ceLE/PnzM9uETM2aNXv27Fl+IuWKkZ9es3T79u3r168buDYCAN1iyQMAwFhkZGRs27Zt0KBBzZo1kyRp165dw4YNc3Nzi4+PHzRoULFixapWrXrhwgXtzsnJyfPnz//yyy/r1KnTpk2bq1evarffvHmzT58+AQEBAwcObNq06ZUrVzIyMo4dOzZhwoRy5cr9/fffzZs3L1u2bHx8/JEjR9zc3PL2W1MDR83nidWhN2/ezJ49e8CAAePGjWvevHlERIR2e0JCwldffRUYGOjv79+uXTt/f39t7OzPzIdGe/fMSJK0fv36v/766+HDhyNGjNDu9t4Tm/0R8zNyzk2fPj08PDw8PFz7NJvvtB07dkiS1KpVq/eO07NnT06vGdu1a5eDg0OjRo1EBwGAfNAAAKAHW7duzeH/MpIkeXh4aB/HxsZmPr1//76dnZ0kSXPmzLl79+7GjRslSapXr552z6FDh/773//WPm7btq2zs3NCQoJGo6lUqVKFChU0Go1arXZ0dPT09ExJSTl58qStra0kSSEhIYcOHfryyy9fv369c+dOW1vb3bt35yTYuwwZNZuzl/PznL2cjKNWq5s3bz5gwICMjAyNRrNu3TpJunxDrAAAIABJREFUknbv3v3q1auPP/545syZ2t0eP3788ccfly9fPj4+Ppsz86HR3ntmtCO/9Y6898Rm/17kZ+ScnMaff/65adOmkiSVK1du9erV2o3ZfKfVqVNHkqSXL19mMyanV6O773Oj0rp1a29vb9EpACBfzO2fZgCAkchbofDW008++STrIM7OzjY2NhqN5r2r0/fs2aPRaMLCwjZv3qzRaDIyMipUqGBtbZ11qOfPn2c9dFpaWs6DZb+DvqN+iCELhbCwMEmSrl+/rn2alpa2bt26Fy9eTJkyRZKkBw8eZO65YcMGSZImT56s+fCZ+dBomg+fmawnPJsT+6Ej5n/kf/TixYtr164tWbJE2wqtX78+82/33v3r16//1ql7F6dXY46FwqtXr2xsbDZs2CA6CADkC9dQAAAYL1mWsz51cnJ69OiRJEnnzp3z9PTUTqh+y4QJE968ebNs2bLnz5+npKSo1eqsQzk5OWXdWaFQmEpUY3D06FFJklxdXbVPFQrFoEGDJEk6ceKEJEn29vaZe2p/S6+93eaHzsyHRpM+fGayyubEfuiI+R/5Hzk6Ojo6OlauXNnBwWHAgAEbNmz44osvpA9/p1WpUuX06dPR0dElS5b80JicXrO0f/9+tVrdrl070UEAIF+4hgIAwPQ8e/bs9u3biYmJWTdmZGRIknTu3LmqVauWL19+6tSp2rnZOvfkyZO0tLQc7iw2qm5pf268efPmW9utrKwkSYqJicnc4uzsLEmSg4NDHkaTcnZmsjmx2dDfyG/p1q2bJEkFChTIfjftNThOnz6dzT6cXrOkUqnq169fokQJ0UEAIF8oFAAApsfDwyMxMXHevHmZW6Kjo5csWSJJ0sCBA9Vqdfv27aUc/JSStyvPjxw5MudTG3QV1RhUr15dkqQ5c+ZoNBrtlrt37+7bt0/7C3OVSpW557179yRJat26dR5Gkz58ZmRZzqxysjmx2dDfyG958OCBJEkdO3bUPv3Qd5qPj0/t2rUjIiK0+2eVkpKiXdrA6TU/GRkZ+/bt69Spk+ggAJBvgpdcAADMVA7XPGt/Uenu7q59+urVK0mSXFxctE/d3d2zDlK6dGlJktRqdXJycvny5SVJ+te//rVp06apU6e2bdtWe103BwcHWZYPHDiwadMm7W//zpw5c+/ePe1QWS9wuGfPHjs7u3379r032N9//y1JUunSpbXXtNN6+fKlr6+vj4+PgaPm/zzrZJzbt28XKlRIkqSWLVsuXbp02rRpw4YNy8jISExM9PT0dHV1zVznP27cuEaNGqnVas2Hz8yHRtN8+MxUrFixUKFCsbGxGo0mmxP7oSPmf+RshIWFrVmzJj4+XjtC9+7dvb29tX+d7L/ToqOjy5YtW758+R07dmgvtZCYmBgVFdWqVavTp09rn3J6zewaCtoLSfzxxx+igwBAfpnPP80AAKOSwx9Qx44dqy24w8PD4+LiAgMDtU/DwsLmzp2rfTx79uyXL19m3oQvICAgKSkpJiama9euRYoUKVmypK+v75MnT7RjLl261MHBoW7duqdPn46IiHBycmrTps348eO1X+vr63vp0iXtngcPHnRxcYmKino3WFRUlHbKuiRJHh4eLVq0aNGixSeffGJjYyNJ0vfff//mzRtDRs3nec6JHI5z5cqVdu3aOTk5lS5devz48Zm3J3j16tXkyZPbtm3r7+8/efLkWbNmpaSkaDSapUuXZnNmPjTau2emW7duz549CwwMLFWq1Pbt27W7vffEZn/E/IycvZkzZ1asWNHJyWnEiBHjxo07dOhQ5kvZfKdlnr158+Z16tSpXLlynp6eNWrUmDJlyrNnz7LuYOGn18wKhenTp7u5uWUtKwHARMma/z8RDgAAHYqMjNTeEU10EDOnq/PM+wVjZmbfn5999lndunWXLVsmOggA5BfXUAAAAPhn8oddv35ddDqYjAcPHly8eJELKAAwD9w2EgAA4J+Zza/HIZZKpSpYsGCLFi1EBwEAHWCGAgAAAGAgKpWqVatWtra2ooMAgA5QKAAAAACGkJKScvjwYdY7ADAbFAoAAACAIRw9evTVq1cdOnQQHQQAdINCAQAAADAElUpVrVq1smXLig4CALpBoQAAAAAYwt69ezt37iw6BQDoDIUCAAAAoHfXrl27desWF1AAYE4oFAAAAAC927NnT5EiRerVqyc6CADoDIUCAAAAoHcqlapTp04KhUJ0EADQGQoFAAAAQL9evHhx8uRJ1jsAMDMUCgAAAIB+/frrr5IktWvXTnQQANAlCgUAAABAv1QqVePGjR0dHUUHAQBdolAAAAAA9Cg9PX3//v2sdwBgfigUAAAAAD06derU06dPO3fuLDoIAOgYhQIAAACgRyqVqnz58h4eHqKDAICOUSgAAAAAeqRSqZieAMAsKUUHAACYs23btomOYOZOnz6tw9F4v2CcdPt9bmD37t27cuVKaGio6CAAoHsUCgAAvShVqpRSqfTy8hIdxPy5urrmfxDeLxg5nXyfC7F79247O7tmzZqJDgIAuidrNBrRGQAAMA29e/d+9uzZkSNHRAeBsYuMjPT29uZTFiRJ6tSpk42NzY4dO0QHAQDd4xoKAADk1KhRo44ePXry5EnRQQCYhqSkpKNHj3LDSADmikIBAICcatGiRYMGDRYsWCA6CADTcOjQoaSkpA4dOogOAgB6QaEAAEAuTJo0aefOnX/99ZfoIABMgEqlql27touLi+ggAKAXFAoAAORC9+7dq1SpwgXbAeTEvn37WO8AwIxRKAAAkAuyLPv7+2/atOnu3buiswAwapcvX46Nje3cubPoIACgLxQKAADkjo+PT6lSpcLDw0UHAWDUVCpVyZIla9euLToIAOgLhQIAALljbW09fvz4VatWPX36VHQWAMZLpVJ16tRJlmXRQQBAXygUAADINV9fX1tb2yVLlogOAsBIPXny5OzZs1xAAYB5o1AAACDXChUqNGrUqMWLF79+/Vp0FgDGaN++fQqFonXr1qKDAIAeUSgAAJAXY8eOTU1NXb16teggAIyRSqVq3ry5vb296CAAoEcUCgAA5EWRIkWGDBmycOHC1NRU0VkAGBe1Wn3gwAHWOwAwexQKAADk0cSJEx8/fvzjjz+KDgLAuPz+++/x8fEdO3YUHQQA9ItCAQCAPHJ1de3bt++8efMyMjJEZwFgRFQqVeXKlStWrCg6CADoF4UCAAB5FxAQcOPGjV27dokOAsCI7Nmzh/UOACwBhQIAAHnn4eHRpUuXkJAQ0UEAGIvbt29fv36dQgGAJaBQAAAgX4KCgs6ePXv06FHRQQAYhV27djk4ODRq1Eh0EADQOwoFAADypW7dus2bN2eSAgAtlUrVvn17a2tr0UEAQO8oFAAAyK/AwMADBw6cPHlSdBAAgr1+/fr48eOsdwBgISgUAADIr7Zt2zZs2HDu3LmigwAQbP/+/Wq1ul27dqKDAIAhUCgAAKADAQEBu3fvvnDhguggAERSqVT169cvUaKE6CAAYAgUCgAA6ECXLl1q167NJAXAkmVkZOzbt4/1DgAsB4UCAAC6ERgYuH379qtXr4oOAkCM8+fPP3z4sHPnzqKDAICBUCgAAKAbPXv2/PTTT5mkAFgslUrl5uZWtWpV0UEAwEAoFAAA0A1ZlgMDA7ds2XLjxg3RWQAIoFKpOnfuLMuy6CAAYCAUCgAA6Iy3t3eFChWYpABYoAcPHly8eJELKACwKBQKAADojEKhCAgI2Lhx4507d0RnAWBQKpWqYMGCLVq0EB0EAAyHQgEAAF3y8fFxdXWdP3++6CAADEqlUrVs2dLW1lZ0EAAwHAoFAAB0ydraevLkyevWrYuLixOdBYCBpKSkHD58mPUOACwNhQIAADr2r3/9y9nZecGCBaKDADCQo0ePvnr1qmPHjqKDAIBBUSgAAKBjBQoU8Pf3X7ly5YMHD0RnAWAIKpWqWrVqZcuWFR0EAAyKQgEAAN3z9fV1cnIKDw8XHQSAIezdu7dz586iUwCAoVEoAACgewULFpwwYcLSpUufPHkiOgsA/bp27dqtW7e4gAIAC0ShAACAXowcOdLW1vbbb78VHQSAfu3Zs6dIkSL16tUTHQQADI1CAQAAvShUqNDYsWO//fbb+Ph40VkA6JFKperUqZNCoRAdBAAMTdZoNKIzAABgnhISEtzd3f38/KZOnSo6C/Ruzpw5sbGx2scPHjy4ePFi1jnwPXr0aN++vaBo0LEqVapoNJru3bt37ty5cuXKJUuW/OGHH7y9vUXnAgBDo1AAAECPpk+fvnTp0piYGHt7e9FZoF8uLi6PHj1676+p1Wq1v79/aGio4VNBH0qWLPno0SNra2u1Wm1nZ+fg4PD111/37NnTyclJdDQAMCiWPAAAoEfjx49Xq9XLly8XHQR65+Pjo1Ao1O8jSdLnn38uOiB0xs7OTpIk7Tv7+vXrx48fDx06tFixYg0bNgwNDU1MTBQdEAAMhBkKAADoV0BAwPr162/fvm1rays6C/To0qVLtWrVeu9LZcqUuXv3roHzQH9q1qx5+fLld7fLsqzRaI4dO9a0aVPDpwIAw2OGAgAA+uXn5/f69WsmKZi9mjVrVqpU6d3t1tbWgwcPNnwe6E/hwoXfu12pVPbu3Zs2AYDloFAAAEC/SpQoMWrUqLlz57569Up0FujXgAEDrK2t39qoVqu9vLyE5IGeODg4vLtRoVA4OzuvWbPG8HkAQBQKBQAA9G7y5MkpKSnLli0THQT61a9fv7S0tKxbZFmuWrVqlSpVREWCPjg4OFhZvf0pWqPRREZGfmjyAgCYJQoFAAD0rmjRomPGjFmwYAGTFMxbhQoVqlevLsty5halUjlw4ECBkaAP9vb2b93OQ6FQzJo1q0GDBqIiAYAQFAoAABjCpEmT0tPTv/32W9FBoF8DBw7M+qNmWlpanz59BOaBPtjZ2WWdoWBtbV23bt2AgACBkQBACAoFAAAMwdHRcezYsaGhofHx8aKzQI8+//zzjIwM7WMrK6sGDRqULVtWbCTonL29feZjWZZtbGw2b9781pwFALAEFAoAABiIn5+fLMsRERGig0CPSpUq1bhxY+3PlrIss97BLNnZ2WXeeV2j0axevZraCIBlolAAAMBAHBwcxo8fv2jRoufPn4vOAj0aMGBA5uNevXoJTAI9sbe3185DUSqVQ4YM8fb2Fp0IAMSgUAAAwHD8/PwKFCgQHh4uOgj0qFevXrIsy7LcunXrYsWKiY4D3bO3t09PT1cqlWXLlmXOEQBLRqEAAIDh2NnZaScpPHnyRHQW6IuTk1P79u01Gg3rHcyVdsmDLMvbtm0rVKiQ6DgAIIycuQAMAAAYwJs3b8qXLz9kyJDg4GDRWczEtm3btm3bJjrF/4iLizt//nynTp2USqXoLP+lUChCQkLc3d1FB8kjPz+/+/fvi04hSZL09OnTI0eOVK9e/eOPP9bfUUz9/QJgCZihAACAQRUqVMjf3//bb799/Pix6CxmYtu2badOnRKd4n+ULl26S5cuRtUmSJK0ZcuWs2fPik6Rd4sWLbp3757oFJIkSUWKFGnQoIFe2wTJ9N8vAJbAuP6fAwDAEowaNSosLCw0NHT+/Pmis5iJBg0aREZGik5h7GRZFh0hvyZMmODl5SU6hYGYwfsFwOwxQwEAAEMrVKjQ5MmTlyxZ8vfff4vOAgAAkEcUCgAACDBy5MgiRYqEhoaKDgIAAJBHFAoAAAhQsGDByZMnL1++PC4uTnQWAACAvKBQAABAjGHDhhUrVmzevHmigwAAAOQFhQIAAGLY2NgEBgauWrXKSC5cDwAAkCsUCgAACDN06FAXF5eQkBDRQQAAAHKNQgEAAGGsra0DAgJWr159584d0VkAAAByh0IBAACRBg8e7ObmxiQFAABgcigUAAAQydraesqUKevXr799+7boLAAAALlAoQAAgGBffPFF+fLl58yZIzoIAABALlAoAAAgmEKhCAoK+v77769fvy46CwAAQE5RKAAAIF7//v0rVar0zTffiA4CAACQUxQKAACIp1Aovv76682bN1++fFl0FgAAgByhUAAAwCj06dOnRo0aM2bMEB0EAAAgRygUAAAwCrIsz5kzZ9euXSdPnhSdxVK8fPlSdAQAAEwYhQIAAMaiffv2zZs3DwgIEB3EzKWkpAQHBzds2LBo0aJ6OsShQ4c6duwoy7Isyy1btmzZsmWdOnW6deu2Zs2a1NTUzN0iIyNr1Kghy7Knp2dSUlLm9sOHD7dv316W5Tp16kRGRuZwNz39XUzR4sWLAwICWrZs2bRp0xs3buhkzNDQUCcnJ1mWlUplu3btunTp0rlz59atW5ctW1aW5W+//dbLy0v7jh87duzdLz958qT21d69ex89elQnkQBAPA0AADAav//+uyRJ+/fvFx3ElPTp06dPnz65+pKkpKQiRYro9YNQXFycJEnlypXTPs3IyNi9e3eFChUqVar0119/Ze6WOUvC19c365fHxMRIknT9+vVc7ZY9SZK2bt2ar7+VUDnMHxERYWdnl5aWFh8f37Nnz7Nnz+oqwN9//y1JUqVKlbJuzMjI6Ny5861btxITE7XvUdeuXd/92r59+9ra2kqS9PDhwxweztTfLwCWgBkKAAAYkUaNGnXq1CkoKEij0YjOYs4KFixYokQJvR7CxcVFkiQbGxvtU1mWO3fufPz48devX3ft2jU5OVm7vXDhwpIkNW3adNWqVVlnGZQuXVqSpHLlyuVqN0iStHz58tKlSysUCgcHh+3bt9epU0dXI5cqVUqSJIVCkXWjLMuBgYF2dnYfffSRJEmNGjXas2fPf/7zn6z7PHz48Pnz52XKlJEkydnZWVd5AEA4CgUAAIxLSEjIpUuXduzYIToIdK9UqVLffPPNrVu3Fi5cmHX71q1bS5UqNXTo0Dt37mi3KJVKSZKsra3zsJuFu3fvnizLBjvcH3/80bBhw8x+avz48RkZGREREVn3WbVq1YgRIwwWCQAMhkIBAADjUrVqVS8vr6CgoLS0NNFZzEpSUpK/v/+wYcOmTZsWFBT05s0b7fZdu3YNGzbMzc0tPj5+0KBBxYoVq1q16oULF7J/SZKkI0eOuLm5/fbbb7mK0bt3b4VCceDAgawbS5YsGRkZmZiY6O3trVarP/S1OdzNYqlUqhEjRrx58+bhw4cjRowYMWLEkydPNm3a1K9fv0aNGp0+fbpWrVru7u4nTpy4ceNGjx49ihcvXrly5cw3VMrle6pWq69evTpmzJisG3v06FG2bNl169bFx8dn7rZ///4uXbro8G8KAMZC9JoLAADwths3blhbW69bt050ENOQk2sopKWl1atXb+jQodqnt27d0v5uX6PR3L9/387OTpKkOXPm3L17d+PGjZIk1atXL/uXNBrNzp07bW1td+/e/aGDSpLk4eHx7vZSpUoVLVo0627aB4sWLZIkaeLEiW9tz9Vu2ZBMfE1+DvNnPe0ZGRna1QcODg4qleratWuSJLm7uy9YsODly5eXLl2SJKl58+aZX5uT9/Qtjo6OWV/VaDShoaGSJM2fP1+7ccuWLaGhoRqNxsPDI1efvU39/QJgCSgUAAAwRkOHDi1btmxycrLoICYgJ4XCkiVLJEmKjo7O3PLxxx9n/nT3ySefZP1Jz9nZ2cbG5h9f0mg0aWlp2Rz0Q4WCm5ubi4tL1t0yH2vvFKBSqTQfLhSy3y37PCb9A2oeCoV3t2ivOpH5UokSJbI2AprcvKfp6ek3b96sXr161lc1Gk18fLydnZ2bm5tardZoNG3btn3+/LmGQgGAOWLJAwAAxmjmzJmPHz9euXKl6CBmQrvEwN3dPXOLldV/PwW9teTeyckpJSXlH1+S3rk+X06o1epHjx7VqFHjva+uWbPGw8Nj0KBB2hsKfEgOd8O77O3tsz4tUqRI5toErZy/p1ZWVhUrVhw1atRb2x0cHAYPHnzv3r3t27f/8ccf5cuXd3Jyyk9mADBaFAoAABgjFxeXkSNHBgcHv379WnQWc6C9ieOzZ89EB5GioqJSU1NbtWr13lft7Ox27NiRlJTk4+OTzSA53A0GMHTo0Hc3jh071srKatGiRUuWLHnrIgsAYE4oFAAAMFIBAQFJSUnaBfPIJ+1sc5VKpdth09PTc7V/ampqUFBQzZo1x44dq92SkZGR+aeWh4fH2rVrjxw5kvULc7gb8i+372mmrO9RxYoVO3fufObMmbi4uCpVqmh30HAvWABmh0IBAAAjVaxYsUmTJs2fP//Ro0eis5i8SZMmKZXKoKCg/fv3JyUlHTlyRLtYICYmRpKk5OTkrDu/evVKkiTtXTayeUmlUjk6Ov7666/vPWJSUtJbX37p0qU2bdq8ePFi06ZN2ktCSpL0+PFjSZLeeov79OkzYcKErFtyuBtevHghSVJqamrmFu1bkPnDvPbuGJkTf7SvZjY12b+n2juDJCYmvvfVt94j7VszcuTIt778re8oADBpFAoAABgvf39/R0fH4OBg0UFMXvXq1aOiojw8PPr06ePp6Xn27NkaNWoMHz789u3bS5Ys0dYKc+bMSUhIiIiI0K6PmDZtWlhY2IdeSk5OtrGxKVy4sI2NzbuHO3HihHaie0xMTIsWLdq3b9+tW7c5c+Z4e3tfuXKlcuXK2t1+/vnnIUOGSJLk6+v7+++/Zx1h/vz5jRs3ztVuuHr1alBQkCRJMTExs2bN+vPPPx8/fjxlyhTtlsOHDx84cODu3buSJE2ZMuX58+dLlizRPl24cKF2OUw27+mpU6fGjRsnSVJsbGxgYKD2DhGZdu3a5evrK0mSr69vVFSUJEnNmzfv1atXx44dJUmKjo6eOnXq/fv3JUn64osvjh49qtfzAAAGIzP5CgAAY7ZixYqxY8deu3atYsWKorMYKS8vL0mSIiMjRQcxdrIsb926VXu6TJGp588tS/v7AjBFzFAAAMCoDR06tGLFitOnTxcdBAAA4H9QKAAAYNQUCsU333yzZcuWCxcuiM4CAADwXxQKAAAYu169ejVo0GDixImigwAAAPwXhQIAACZg7ty5R48ePXjwoOggAAAA/4dCAQAAE9CkSZNOnTpNmjQp8/52AAAAYlEoAABgGubOnXv16tUtW7aIDgIAACBJFAoAAJgKT0/PAQMGTJkyJSUlRXQWAAAACgUAAEzH7NmzHz16tGLFCtFBAAAAKBQAADAdpUuXHjVq1OzZsxMSEkRnAQAAlo5CAQAAUxIYGJienj5//nzRQQAAgKWjUAAAwJQUKVIkMDAwLCwsNjZWdBYAAGDRKBQAADAx48aNc3FxmTFjhuggAADAolEoAABgYgoUKDB79uwNGzZcuHBBdBYAAGC5KBQAADA93t7e9evXnzhxouggAADAclEoAABgemRZDg0NPXbsmEqlEp0FAABYKAoFAABMUoMGDXr06DFx4sS0tDTRWQAAgCWiUAAAwFQtWLDg9u3ba9asER0EAABYIqXoAAAAII/Kly8/fPjw6dOn9+3bt3DhwqLjiHTq1CkvLy/RKaB3ixYt+umnn0SnAAD8H8XMmTNFZwAAAHlUv3798PDwtLS0Fi1aiM4iUkJCgugI/yMuLu7IkSMeHh6ig/yPatWqffHFF46OjqKD5FFCQoIsyzoZSq1WHz9+3NHRsWDBgjoZUB9M/f0CYAlkjUYjOgMAAMi7+fPnz5w58/r1625ubqKz4P9ERkZ6e3vzKctojRo1auvWrdHR0cWLFxedBQBMGNdQAADAtI0fP97FxWX69OmigwCm4dy5cytXrgwLC6NNAIB8olAAAMC0FShQYNasWRs2bLhw4YLoLICxS0tLGzZsWOPGjQcMGCA6CwCYPAoFAABMXt++fWvXrh0QECA6CGDswsLCrl27tmLFCl1djgEALBmFAgAAJk+W5YULFx46dGjPnj2iswDG6+7du7NmzZo6daqxXS8TAEwUhQIAAOagSZMmffr0GT9+fEpKiugsgJEaPXq0q6vrpEmTRAcBADNBoQAAgJlYuHDhgwcPli1bJjoIYIy2bNmiUqmWL19uY2MjOgsAmAkKBQAAzISbm9v48eNnzZr19OlT0VkA45KQkODv7z9kyJAWLVqIzgIA5oNCAQAA8xEYGGhra8stJIG3TJ48OTU1NSQkRHQQADArFAoAAJgPOzu72bNnr1q16sqVK6KzAMbizJkz3333XURERLFixURnAQCzIms0GtEZAACAzmRkZNSvX9/e3v7w4cOis1iuyMhIb29vPmUZg7S0tM8++6xIkSKHDx/mVpEAoFvMUAAAwKxYWVmFh4cfOXJk165dorMA4s2fP//GjRvfffcdbQIA6ByFAgAA5qZhw4Z9+vTx8/PjFpKwcDExMcHBwdOmTatQoYLoLABghigUAAAwQ6GhoQ8ePFiyZInoIIBIo0ePdnd3nzhxouggAGCeKBQAADBDbm5uEyZMmDVr1sOHD0VnAcTYtGnTvn37Vq5caW1tLToLAJgnCgUAAMxTQECAnZ3d119/LToIIMDz58/9/f19fX0bNWokOgsAmC0KBQAAzJOdnd2cOXO+++67P//8U3QWwNC++uorWZaDg4NFBwEAc0ahAACA2Ro4cGDNmjX9/PxEBwEM6vfff1+zZk14eLiTk5PoLABgzigUAAAwW9pbSEZFRW3fvl10FsBAUlNThw0b1rZtW29vb9FZAMDMUSgAAGDOGjVq1L9//wkTJrx580Z0FsAQ5s2bFxMTs2zZMtFBAMD8USgAAGDmQkNDExIS5s+fLzoIoHc3b94MDg6eMWNG+fLlRWcBAPNHoQAAgJlzdnaeOnXqvHnzbt68KToLoEcajWbo0KGffPLJhAkTRGcBAItAoQAAgPkbP358pUqVJk+eLDoIoEcrVqw4ceLEmjVrrK2tRWcBAItAoQAAgPlTKpXh4eG//PLLvn37RGdKhWFrAAAgAElEQVQB9OLvv/8OCgqaOHFi7dq1RWcBAEtBoQAAgEVo1apVjx49xo4dm5KSIjoLoHsjR44sUaLE9OnTRQcBAAtCoQAAgKUIDw9/8OBBRESE6CCAjm3atGn37t3ffffdRx99JDoLAFgQCgUAACxFmTJlJk2a9M0338TFxYnOAujM06dP/fz8hg8f3rRpU9FZAMCyUCgAAGBBJk+eXLx48a+++kp0EEBnxo4dq1Qq58yZIzoIAFgcCgUAACzIRx99FBYW9uOPPx47dkx0FkAH9u7du3nz5uXLlzs6OorOAgAWR9ZoNKIzAAAAg+rYseO9e/cuXbqkVCpFZzFPkZGR3t7efMrSt4SEBE9Pz2bNmv3www+iswCAJWKGAgAAFufbb7+9efPmqlWrRAcB8mXSpElJSUkLFy4UHQQALBSFAgAAFqdixYrjxo2bNm3a06dPRWcB8ujYsWPffffd4sWLS5QoIToLAFgoCgUAACzR1KlTbW1tJ0+eLDoIkBdJSUlffvllx44dP//8c9FZAMByUSgAAGCJ7O3tFy1atH79+qNHj4rOAuTatGnTnjx5smLFCtFBAMCiUSgAAGChevfu3bFjx9GjR6vVatFZgFw4depUeHh4WFiYq6ur6CwAYNG4ywMAAJbr1q1bVatW/frrrydNmiQ6i8mrVq3anTt3tI81Gk1aWpq1tbX2qSzL/v7+M2bMEJfOfKSkpNSqVcvFxeXAgQOyLIuOAwAWjZtFAQBguSpUqBAYGDhz5sw+ffq4u7uLjmPaChYs+ObNm6y/qklJSdE+kGW5cOHCgnKZm6CgoPv37+/bt482AQCEY8kDAAAW7auvvipTpoyfn5/oICZv4MCBVlYf/GTl5eVlyDDm6vTp0xEREWFhYWXKlBGdBQDAkgcAACzesWPHWrRosXPnzi5duojOYsKePHlSqlSp9PT0t7ZbWVk1adKEi1/mH4sdAMDYMEMBAABL16xZM29v79GjR79580Z0FhNWvHjxZs2aKRSKt7bLsjxgwAAhkczMlClT7t+/v2bNGtoEADASFAoAAEBatGhRQkJCcHCw6CCmbcCAAe/O/ZRluUePHkLymJPTp09r7+zAYgcAMB4seQAAAJIkSYsXL544ceLly5crV64sOoupSkhIKF68eGpqauYWpVLZoUOHXbt2CUxlBrSLHUqVKnXw4EGmJwCA8WCGAgAAkCRJGjVqVI0aNYYPH84vG/KscOHCnTp1Uir/exet9PR0Hx8fgZHMw9SpU+/fv7927VraBAAwKhQKAABAkiTJyspq6dKlJ06c2LRpk+gsJqx///5Zr8toY2PTqVMngXnMwOnTpxctWsRiBwAwQix5AAAA/zVy5MgdO3ZER0c7OTmJzmKSkpOTixUrpr28pbW1tbe39w8//CA6lAljsQMAGDNmKAAAgP/SXpcxKChIdBBTVbBgwd69excoUECSJLVa3a9fP9GJTBuLHQDAmFEoAACA/3J0dAwLC1u1atXvv/8uOoup6tevn/a6jA4ODq1btxYdx4SdOHFi0aJFCxcuZLEDABgnljwAAIC3de3a9caNG5cvXy5YsKDoLKYnLS3N2dn5+fPnI0aMWLZsmeg4purNmzc1a9YsV67cr7/+yvQEADBOzFAAAABvW7JkSVxc3Ny5c0UHMUlKpVK70qFv376is5gwPz+/58+fr1+/njYBAIwWMxQAAMB7LFq0KCAg4OLFi59++qnoLJKfn9+iRYtEpzBzSqUyKiqqSZMmooNIkiQdOHCgffv2W7Zs8fLyEp0FAPBBFAoAAOA9MjIyGjdunJGRcfLkSSsrwVMavby87t275+fnJzaGefPy8tq6dasx/AD/4sWLatWqNW/enBtkAICRU4oOAAAAjJGVldXKlStr1669cuXKESNGiI4jubm59enTR3QKGMLw4cM1Gk1ERIToIACAf8A1FAAAwPtVrVrV398/ICDg/v37orPAUmzatGnbtm2rV68uUqSI6CwAgH9AoQAAAD5o5syZLi4uw4cPFx0EFiEuLm7s2LGjR49u37696CwAgH9GoQAAAD7IxsZmxYoVe/fu3b59u+gsMHMajWbo0KFOTk7BwcGiswAAcoRCAQAAZKdZs2aDBw8ePXr0ixcvRGeBOVu6dOnBgwc3bdpkZ2cnOgsAIEcoFAAAwD8ICwuzsrIKCAgQHQRm69atW4GBgUFBQfXq1ROdBQCQUxQKAADgHzg4OISFhX333XdRUVGis8AMpaWl9e/fv1KlSlOmTBGdBQCQCxQKAADgn3l7e3fp0mXEiBHJycmis8DcBAcHX758ecOGDQUKFBCdBQCQCxQKAAAgRxYvXvzgwYNZs2aJDgKzcv78+dmzZ4eEhHh6eorOAgDIHQoFAACQI2XKlJk3b96CBQvOnTsnOgvMxJs3b/r379+0adNx48aJzgIAyDUKBQAAkFPDhw9v2bLlF198wcIH6MTYsWOfPHmydu1aKys+lAKA6eHfbgAAkFOyLK9cufL+/fuzZ88WnQUmb8eOHWvXrl2xYkWZMmVEZwEA5AWFAgAAyAV3d/e5c+fOmzePhQ/Ij7i4OF9f32HDhnl5eYnOAgDIIwoFAACQOyNGjGjRogULH5BnGRkZAwcOdHJyCg0NFZ0FAJB3FAoAACB3ZFletWrV/fv358yZIzoLTNK8efOOHz/+448/2tnZic4CAMg7CgUAAJBr7u7uwcHBc+fOPX/+vOgs2Xn58qXoCHjbhQsXZs6cGRISUqdOHdFZAAD5QqEAAADyYtSoUdqFDykpKaKzvC0lJSU4OLhhw4ZFixYVnUU6dOhQx44dZVmWZblly5YtW7asU6dOt27d1qxZk5qaKjqdoWnvE9mgQYMJEyaIzgIAyC8KBQAAkBfahQ+xsbFGuPDBxsbGz8/v+vXr6enporNIrVu3Xr16tSRJ5cqVi4qKioqKOnv27NChQ0NCQjw9Pa9duyY6oEGNGTPm2bNnP/74I/eJBAAzwD/lAAAgj7QLH0JCQoxw4UPBggVLlCghOsX/cXFxkSTJxsZG+1SW5c6dOx8/fvz169ddu3a1nGtbbt++ff369atXr9aeEACAqaNQAAAAeTdq1KhGjRoNGTLEAmfv51OpUqW++eabW7duLVy4UHQWQ7h//772PpHdunUTnQUAoBsUCgAAIO+srKzWrVt3+/bt4OBg0VmkpKQkf3//YcOGTZs2LSgo6M2bN5kvJScnz58//8svv6xTp06bNm2uXr0qSdKuXbuGDRvm5uYWHx8/aNCgYsWKVa1a9cKFC9ovOX/+fP369UePHj19+nRra2vtaO8dR5KkI0eOuLm5/fbbb7kK3Lt3b4VCceDAAcOEFEh7n0hnZ2cLaU8AwFJoAAAA8iciIsLa2vrixYt6Gr9Pnz59+vTJfp+0tLR69eoNHTpU+/TWrVtKpTLzo87QoUP//e9/ax+3bdvW2dk5ISHh/v372tsWzpkz5+7duxs3bpQkqV69etrdPv744yJFimgfe3t7P378+EPjaDSanTt32tra7t69+0PxJEny8PB4d3upUqWKFi1qmJDZkyRp69at/7hb3syePdvGxubSpUt6Gh8AIASFAgAAyK/09PRmzZp9+umnSUlJ+hg/J4XCkiVLJEmKjo7O3PLxxx9rC4UzZ868+zuVPXv2aDSaTz75JOvvV5ydnW1sbLSPixcvLklSRERERkbG1atXExISshlHo9GkpaVlE+9DhYKbm5uLi4vBQmafUE+FwvHjx5VKZVhYmD4GBwAIxJIHAACQX1ZWVhs3bvz7778DAwNFZdAuHHB3d8+aSvvg3Llznp6eb30G6tSpkyRJsixnHcTJySnzLpjLly+3t7cfN25c3bp1X79+bW9vn804kiQpFIrcZlar1Y8ePapRo4bBQhre8+fPfXx82rVrN378eFEZAAB6QqEAAAB0wNXVNTw8PCIi4tdffxUSIC4uTpKkZ8+evfvSs2fPbt++nZiYmHVjRkZG9gP26tXr8uXL7dq1O3/+fJMmTb7//vu8jZONqKio1NTUVq1aGXPI/NBoNIMHD87IyPj+++/fqkUAAGaAQgEAAOjGwIEDvby8vvzyy+fPnxv+6B4eHpIkqVSq976UmJg4b968zC3R0dHaJRLZmDFjRvny5X/99dfNmzer1eqpU6dmP056enquAqempgYFBdWsWXPs2LEGC2lg8+fP37t37+bNm4sWLSokAABAr2SNRiM6AwAAMBPx8fHVqlWrW7fuTz/9pMNhvby8JEmKjIzMZp8//vjjs88+c3Bw2LRpU9OmTU+fPt29e/eEhIQ7d+6UKlWqSpUqt2/f/te//tWqVavo6OizZ8/+9NNP9vb25cqVi4mJyfw45OrqGhcXp1arlUploUKF4uLiHB0d09LSihUr5uHhcezYsQ+No1KpPv/8823btrVv3/7dbElJSba2tu7u7nfu3NFuuXTp0vjx4+/du6dSqSpXrixJUkpKir5DZn+SZVneunWr9lTrxJkzZ5o0aRISEuLv76+rMQEARoUZCgAAQGccHR3XrFmzY8eOH3/80cCHrl69elRUlIeHR58+fTw9Pc+ePVujRo3hw4ffvn3b2to6Kiqqa9euv/zyi7+//+PHjzdt2mRvb79s2bKYmBhJkubMmZOQkBAREaFdNzFt2rTk5OTExMRWrVrNmzdv0KBBTZo02bJli42NzXvHkSTJxsamcOHCNjY27wY7ceLEmDFjJEmKiYlp0aJF+/btu3XrNmfOHG9v7ytXrmjbBO0I+g5pSC9evPD29m7Tpo2fn5+BDw0AMBhmKAAAAB0bM2bMDz/88Oeff5YpU0YnA+ZkhgLySYczFDQaTY8ePS5evHjx4sVixYrlf0AAgHFihgIAANCxBQsWuLm5+fj4iLoWIMRasGCBSqXavHkzbQIAmDcKBQAAoGMFCxbcsGHDmTNnwsPDRWeBoZ09e3batGnBwcGNGjUSnQUAoF8UCgAAQPdq1qw5ffr0oKCgP//8U3QWGI720gmtW7eeOHGi6CwAAL2jUAAAAHoRGBhYv379fv36JScni84CQ9BoNIMHD05LS/v+++9lWRYdBwCgdxQKAABAL6ysrNatWxcbGzt9+nTRWWAICxcu5NIJAGBRKBQAAIC+lCtXbtGiRQsXLjx27JjoLNCv06dPBwUFzZkzp3HjxqKzAAAMhEIBAADo0ZAhQ7p27TpgwIBnz56JzgJ9efz4sZeXV9u2bSdNmiQ6CwDAcCgUAACAfq1du9bKymrgwIEajUZ0FuheWlqat7e3QqHg0gkAYGkoFAAAgH45OTn98MMPBw4cWLx4segs0L3JkyefOXNm+/btRYsWFZ0FAGBQFAoAAEDvmjRpMn369MmTJ1+8eFF0FujSli1bFi1atHz58lq1aonOAgAwNAoFAABgCFOmTGnSpIm3t3dCQoLoLNCNK1eufPnll+PHj//iiy9EZwEACEChAAAADMHKymrjxo2vX7/29fUVnQU6EB8f37Nnz+rVq8+bN090FgCAGBQKAADAQJydndetWxcZGbl+/XrRWZAvGRkZPj4+b9682bZtW4ECBUTHAQCIQaEAAAAMp3379v7+/qNHj46OjhadBXn39ddfHzhwIDIy0sXFRXQWAIAwFAoAAMCgQkJCqlWr5uXllZSUJDoL8mLPnj2zZ89esmRJ48aNRWcBAIhEoQAAAAxKqVRu2bIlLi5u0qRJorMg127evDlgwID+/ftzLQwAAIUCAAAwtDJlyqxatWrp0qVbtmwRnQW58Pr16549e5YrV27lypWiswAAxFOKDgAAACxR7969fX19R4wYUb9+fXd39+x3VigUW7ZskWXZINEsl1KZ3SdDjUYzZMiQBw8enDt37qOPPjJYKgCA0ZI1Go3oDAAAwBIlJSXVrVvX3t7+2LFj1tbW2ewZExNz7tw5gwXLv1OnTi1atCgyMlJ0kFxQKBQdO3YsWLDgh3ZYsGBBUFDQ/v37W7ZsachgAACjRaEAAACEuXbtWr169YYMGRIeHi46iy5FRkZ6e3ub06esXbt29ejRIzQ0dMKECaKzAACMBddQAAAAwlSpUmX16tUREREbNmwQnQUfdPny5f79+w8aNIg2AQCQFddQAAAAInl7e586dWrkyJG1atXy9PQUHQdve/DgQdeuXWvXrr18+XLRWQAAxoUZCgAAQLDQ0NBatWr17Nnz5cuXorPgfyQlJXXv3r1QoUI///xzgQIFRMcBABgXCgUAACCYUqncsmXLq1evBg8ebE7XHTB1Go1m8ODBt27d2r17t5OTk+g4AACjQ6EAAADEc3Fx2bp16549e7755hvRWfB/AgICfv75559++qlixYqiswAAjBGFAgAAMApNmzYNDw+fOXOmad1t0VytX79+wYIFq1evbt68uegsAAAjxUUZAQCAsRg5cuTVq1eHDBni4eFRrVo10XEs1/Hjx4cPHz516tQBAwaIzgIAMF7MUAAAAEYkIiKibt26Xbt2ffz4segsFur27ds9e/bs0qXL119/LToLAMCoUSgAAAAjYm1tvW3bNmtr6549e6ampoqOY3GePXvWvn37ihUrbtiwQZZl0XEAAEaNQgEAABiXIkWK/Pzzz3/++aevry83fTCkxMTELl26qNXqX3755aOPPhIdBwBg7CgUAACA0fH09Pzll182b948Y8YM0VksRXp6uo+Pz82bN/ft2+fs7Cw6DgDABHBRRgAAYIxatmy5du3aAQMGlCxZcuTIkaLjmDmNRjNs2LD9+/cfPHjQw8NDdBwAgGmgUAAAAEaqf//+N2/eHDt2bOnSpbt16yY6jjmbOnXqhg0bdu7c2bBhQ9FZAAAmg0IBAAAYrxkzZsTGxvbr1y8qKqpevXqi45inFStWhISErFmzpkOHDqKzAABMCddQAAAAxkuW5ZUrVzZp0qRLly7/+c9/RMcxQzt37hw9enRISMjgwYNFZwEAmBgKBQAAYNS0N5J0dXXt0KHDkydPRMcxK0ePHv3888+HDRv21Vdfic4CADA9FAoAAMDY2dvbq1QqtVrdq1ev5ORk0XHMxJUrV3r06NGtW7fFixeLzgIAMEkUCgAAwASUKlVq7969V69eHThwYEZGhug4Ju/evXsdO3asUaPG999/b2XFB0IAQF7w/wcAADANVapU+eWXX3bt2jV58mTRWUzb06dPW7duXbx48Z07d9rY2IiOAwAwVRQKAADAZDRt2nTdunVhYWFhYWGis5iqly9ftm/fPi0tbe/evYULFxYdBwBgwrhtJAAAMCV9+/Z99OiRn5+fvb390KFDRccxMYmJiV27dn3w4MHx48dLliwpOg4AwLRRKAAAABMzfvz4ly9fDh8+3M7Orm/fvqLjmIzU1NTevXtfu3bt2LFj5cuXFx0HAGDyKBQAAIDpmTFjRmJi4oABAwoUKNCrVy/RcUxAenq6j4/PiRMnoqKiqlSpIjoOAMAcUCgAAACTNHfu3JcvX/br12/Xrl3t2rUTHceoaTQaX19flUq1b9++2rVri44DADATFAoAAMAkybK8bNmyV69e9ezZc//+/Y0bNxadyEhpNJpRo0Zt3Ljxl19+adq0qeg4AADzwV0eAACAqbKystqwYUPHjh07d+588eJF0XGMVGBg4KpVqzZu3NihQwfRWQAAZoVCAQAAmDCFQrFp06YGDRq0b98+OjpadByj88033yxYsGDDhg19+vQRnQUAYG4oFAAAgGkrUKDA9u3bPTw82rZte+fOHdFxjMiSJUtmzJixdOnSfv36ic4CADBDFAoAAMDk2dra7t69u0SJEm3atPn7779FxzEKa9asGTt27IIFC4YPHy46CwDAPFEoAAAAc+Dg4LBv3z5ra+s2bdo8evRIdBzBNmzY4OvrO336dH9/f9FZAABmi0IBAACYiRIlShw9elSSpGbNmlnyPIVt27YNGTJkwoQJM2fOFJ0FAGDOKBQAAID5cHZ2joqKUigULVu2tMxOYfv27f369RszZkxoaKjoLAAAM0ehAAAAzIoldwo7duzo27fvqFGjwsLCRGcBAJg/WaPRiM4AAACgY48ePWrZsmVaWtqRI0dcXFwMcMSrV68+fPhQ+/jYsWOzZ88+ePBg5qvVq1cvXry4XgPs27evR48eAwcOXLlypSzLej0WAAAShQIAADBXjx49atWqlVqtNkyn4Ojo+PLlyw+96uvru3LlSp0cKDExMTo6unbt2lk3/vrrr927d/fx8fnuu+9oEwAAhsGSBwAAYJ6cnZ0PHz5sbW3dokWLrGsfYmNj3d3dly1bptvDtWvXTqlUvvclWZbbtm2rqwPNnDmzTp06P/zwQ+aWAwcO9OjRo1+/fqtWraJNAAAYDDMUAACAOXv8+HHLli3VanVUVFTp0qUfPnzYsGHDmJgYR0fH2NhYOzs7XR1o586d3bt3f+9Ltra2T58+/eijj/J/lKdPn7q5uSUnJ1tZWW3cuLFv374HDx7s2rWrt7f32rVrraz4XREAwHD4XwcAAJizEiVKREVFaecpREdHt2nT5v79+xqN5tWrV4sXL9bhgTp06FC4cOF3t1tbW3t5eemkTZAkKSwsLD09XZKkjIwMHx+fGTNm9OjRo2vXrqtXr6ZNAAAYGDMUAACA+Xv48GHz5s1fvnz57NkztVqt3WhnZ3fv3j1HR0ddHeXLL7/csGFD5viZDhw40KZNm/yP//z5c1dX16SkpMwtsiw3adJEe1eL/I8PAECu0GQDAADz5+Dg4Ojo+PTp06w/7ScnJ4eHh+vwKP369Xu3TXBycmrRooVOxg8PD09LS3tr44kTJ/bs2aOT8QEAyBVmKAAAADOXmpratWvXqKiod3/at7W1jY2NLVq0qE4OlJGRUbJkySdPnmRuKVCgwPDhwyMiIvI/+MuXL11dXV+/fv3WdlmWFQrFjh07unTpkv+jAACQc8xQAAAA5iwtLc3Ly+vw4cPvtgmSJKWmpoaGhurqWFZWVj4+PtbW1lnH79u3r04Gj4iISE5Ofne7RqNJT0/v1avX6dOndXIgAAByiBkKAADAnJ05c6Z+/foKhUJ7LcN3FSxYMCYmxtnZWSeHO3fuXN26dTOfuri43L9/P/+3ckxISHB1dX316tW7L2mvxViyZMnt27fXr18/nwcCACDnmKEAAADMWb169c6fP//5558rFAqlUvnuDunp6fPmzdPV4erUqVOuXDnt4wIFCgwePDj/bYIkSYsXL856LUYthUIhy7Kbm9vy5ctjYmJoEwAABsYMBQAAYBHu3LmzcuXKJUuWpKSkvHVpQ2tr69u3b7u6uurkQNOnT587d652hcWVK1c8PT3zOeCbN29cXV3j4+MztyiVyrS0tKpVq06cOLF///7c4gEAIAQzFAAAgEUoV67c3Llz//7779DQ0JIlS8qyrF0soBUcHKyrA/Xt21fbJnzyySf5bxMkSVqyZEnmYgftBRpq1669a9euP//8c+DAgbQJAABRmKEAAAAsTmpq6pYtW+bPn//XX39ZW1ur1WqlUnnz5k13d3edjF+1atWrV6+GhIQEBATkc6jExEQ3N7fnz59bW1unp6f36NFjypQpNWvW1ElOAMD/a+/uY6us7jiAn0vblThhYrHKSwnCcJ0BQSO+EDUqznWDgAtCcVGiRuNLfEHRPwTcYrQ4RUmMytRkGhOrUpaIuibOLbDNGBGZ8mJkzFDrakGNEKy8ldveuz/u0iHQ0gO3PKX9fP56es5zzvk9vX/06fee516OhEABAOhR3nnnnS+//LKTJ69fv/71119ft25dCOHiiy++9dZb81LDG2+8UV1d/eSTT5aWlh7hVLW1tS+++GJBQcHFF188derUU045JWr4+PHj85WSAMB+BAoAQI9SVFS030ck9GYzZ8585ZVXkq4CgJ7JZygAAD1KS0vLkiVLsmSz06dPb+/LMgHgyAkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAoF3ffvvtYXQd60sDQGcIFAAA9tfc3LxgwYIJEyaUlJR0vuuQMpnMhAkTmpubj/7SAJB3AgUAgP0VFxfffffdGzdubG1t7XzXIb355pvvvffeSy+9dPSXBoC8EygAABxE3759S0tLY7s69vzzzw8bNmzRokXZbPYoLw0AeSdQAAA4GtatW/fjH/94zpw5n3zyyVtvvZV0OQBwpAqTLgAA4Gj79NNP586dO3LkyM2bN9fX1z/99NNjxowJIezevXv+/Pk7duwoLS1tbW3duXNn25AOulasWDFr1qzq6uqLLrqog0UXL148b968E0888YEHHnj88cd/8Ytf7Nt7eEsDQIIECgBArzNp0qRMJrN06dKWlpaTTjrp17/+9fr161tbWy+55JIzzjjjueeeCyHU1dUtXLgwd34HXSGE7777btu2bU1NTR2s+M0337S2tpaVlYUQbrnllqqqqjVr1owbN+6Q83e8NAAkyCMPAECvc8sttzz00EMhhIKCgpKSko0bN4YQnnnmmffff//uu+/OnTNixIgRI0bkjjvoCiFMmTKlqalp8uTJHaz43HPP3Xbbbbnj22+/vW/fvosWLWrrPeylASBBdigAAL3OXXfdtXPnzsWLF2/btq25uTmdTocQ3n777RDC8OHD207r0+d/b7100JVTUFDQwXLpdPrpp5+eN2/evo2vvvrqww8/PGTIkCNcGgCS4g8SANDrfPDBB2PGjBkxYsT8+fOPP/74XGNjY2MIYevWrQee30FXZyxduvTee+/N7qO6ujqdTj/55JNdvTQAdB2BAgDQ68yaNSudTldUVIQQMplMrrG8vDyEUFtbe+D5HXTltLa2tteVyWQWLlx49dVX79t45ZVXlpaWPui6Yu8AAAsoSURBVPvsszt27DjCpQEgKQIFAKDX2bJlS2Nj41/+8peXX355+/btIYRVq1ZdddVVhYWFc+fO/fOf/7x79+4VK1Zs3rw5hFBfX3/vvfe21xVCqK2tPeGEE9r7JsiXX365pKRk4MCB+zb+4Ac/mDx58vbt23OfpNDB/B0vDQAJEigAAL3OggUL+vfvP3/+/JEjR86bN2/AgAELFiw4//zzly9fXl5ePn369NGjR69atWrcuHE333xzXV3dmDFj2uvKZDLFxcX9+/cvLi4+cKHXXnvtnnvuWbNmzTPPPLNv+7Jly9auXRtC+N3vfvfoo4+OHTv28JY+Sr8vADiYVDabTboGAIC8SaVSS5YsmTFjRtKFJC/3S6ipqUm6EAB6JjsUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGiFSRcAAJBnK1euTKVSSVeRvIaGhrKysqSrAKDHSmWz2aRrAADIm7Kysi+++CLpKrqLu+66a9GiRUlXAUDPJFAAAMizmpqayspKd1kA9Gw+QwEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAogkUAAAAgGgCBQAAACCaQAEAAACIJlAAAAAAoqWy2WzSNQAAHPOqqqr+85//5I63bNny4YcfTpo0qa33V7/6VUVFRUKlAUCXECgAAOTB4MGDv/rqq4KCggO70un0nDlzHnvssaNfFQB0HY88AADkwdVXX11QUJA+mBDCzJkzky4QAPLMDgUAgDz46KOPzjrrrIN2DRs27PPPPz/K9QBAV7NDAQAgD84888xRo0Yd2F5UVHTdddcd/XoAoKsJFAAA8uOaa64pKirarzGdTs+YMSORegCgS3nkAQAgPzZt2jRq1Kh9b65SqdTo0aPXrVuXYFUA0EXsUAAAyI+RI0eOHTs2lUq1tRQWFs6aNSvBkgCg6wgUAADyZtasWft+c2RLS8v06dMTrAcAuo5HHgAA8mbLli1Dhw7NZDIhhD59+px33nnvvvtu0kUBQJewQwEAIG8GDRp0wQUX5DYppFIpzzsA0IMJFAAA8umaa65pO542bVqClQBAlxIoAADk07Rp01KpVCqVuuyyywYOHJh0OQDQVQQKAAD5NGDAgIqKimw263kHAHo2H8oIAHQL9fX19913X2tra9KF5EFjY+Pq1asnTZpUWFiYdC15MHTo0EWLFiVdBQDdjkABAOgWampqKisre8yXLGYymT59esJW0IaGhpUrV7pjBOBAPSE1BwB6jJqamqRL4HtyQU/SVQDQHfWE4BwAAAA4ygQKAAAAQDSBAgAAABBNoAAAAABEEygAAAAA0QQKAAAAQDSBAgAAABBNoAAAAABEEygAAAAA0QQKAAAAQDSBAgAAABBNoAAAAABEEygAAAAA0QQKAAAAQDSBAgBwbPv222+TLgEAeiOBAgBwTGpubl6wYMGECRNKSkqSriWEEDZv3vzCCy9UVlZOmDChk0P++te//vKXv0ylUqlU6tJLL7300kvHjx8/derUP/zhD3v37u3SagHgyKWy2WzSNQAAhJqamsrKyqg7kz179gwZMmTbtm3d5H6moaFh2LBh5eXlGzZs6OSQzZs3Dxky5NRTT62rqwshZLPZ2tra2bNn9+nTZ9myZaeffnpX1tsph/G6ANBL2KEAAByr+vbtW1pamnQV/1dWVhY7ZPDgwSGE4uLi3I+pVGry5MnvvPPOjh07pkyZsmfPnjyXCAD5I1AAAOheBg0a9OCDD27atOnxxx9PuhYAaJdAAQA4luzevXvOnDk33XTT/fffP3fu3J07d7Z17dmz59FHH73hhhvGjx//s5/97OOPPw4hvPHGGzfddFNZWdn27duvvfbagQMHjhkz5p///GduyOrVq88777zbbrvtN7/5TVFRUW62g85z2FasWFFWVvaPf/wjatSVV15ZUFDw9ttvd+dLA6C3ywIAdANLliw55J1JS0vLueeee+ONN+Z+3LRpU2FhYduoG2+88V//+lfu+PLLLz/55JObmpq++OKL448/PoRQVVX1+eefv/TSSyGEc889N3faaaedduKJJ+aOKysrv/766/bm6eRVhBDKy8v3bXn99dePO+64N998s/NDcgYNGlRSUpL4pXXmdQGgd/LnAQDoFjrzj+tTTz0VQtiwYUNby2mnnZYb9f777x/4xsmf/vSnbDb7k5/8ZN+ZTz755OLi4tzxSSedFEJ44oknMpnMxx9/3NTU1ME8nXHQdKClpSV2SDabLSsrGzx4cOKXJlAAoD0eeQAAjhm5RwCGDx/e1tKnz/9uZj744IPRo0fvd6MzadKkEEIqldp3kgEDBjQ3N+eOf//73/fr1+/OO+8855xzduzY0a9fvw7mOWwFBQWxQ9Lp9FdffTVu3LhufmkA9GYCBQDgmNHY2BhC2Lp164FdW7duraur27Vr176NmUym4wmnTZu2Zs2an//856tXr77wwgtffPHFw5sn75YvX753796JEyeGHndpAPQYAgUA4JhRXl4eQqitrT1o165dux555JG2lg0bNuQekejAb3/72xEjRrz11luvvPJKOp2eP3/+4c3TsdbW1qjz9+7dO3fu3DPPPPOOO+4I3fvSAOjNUtlsNukaAABCTU1NZWVlx3cma9euPfvss3/0ox9VV1dfdNFFK1euvOKKK5qamj777LNBgwadfvrpdXV1119//cSJEzds2LBq1ao//vGP/fr1O/XUU+vr69tmHjp0aGNjYzqdLiws/OEPf9jY2HjCCSe0tLQMHDiwvLz873//e3vzHPISdu/efdxxx40aNerf//53W2Ntbe3MmTOXLl1aUVHR3pDhw4d/9tlnuZaPPvpo9uzZDQ0NtbW1P/3pT0MIzc3NCV5aZ14XAHonOxQAgGPG2LFjly9fXl5ePn369NGjR69atWrcuHE333xzXV1dUVHR8uXLp0yZsmzZsjlz5nz99dfV1dX9+vVbvHhxfX19CKGqqqqpqemJJ57IPTdx//3379mzZ9euXRMnTnzkkUeuvfbaCy+88NVXXy0uLj7oPIes7W9/+9vs2bNDCPX19QsXLly7dm2uvbi4uH///sXFxQcOeffdd2+//fbckEsuuaSiomLq1KlVVVWVlZXr16/PpQm5GZK9NAA4KDsUAIBuwTvh3ZPXBYD22KEAAHBoqfZt3Lgx6eoAIAGFSRcAAHAM8BY9AOzHDgUAAAAgmkABAAAAiCZQAAAAAKIJFAAAAIBoAgUAAAAgmkABAAAAiCZQAAAAAKIJFAAAAIBoAgUAAAAgmkABAAAAiCZQAAAAAKIJFAAAAIBoAgUAAAAgmkABAAAAiCZQAAAAAKIVJl0AAMD/zZgxI+kS+J6GhoakSwCgmxIoAADdwjnnnDNz5szW1takC+F7ysrKzj///KSrAKA7SmWz2aRrAAAAAI4xPkMBAAAAiCZQAAAAAKIJFAAAAIBoAgUAAAAgmkABAAAAiPZfldgpPo/pp18AAAAASUVORK5CYII=\n",
      "text/plain": [
       "<IPython.core.display.Image object>"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "plot_model(deepfm_model, to_file='deepfm_model.png')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 712 samples, validate on 179 samples\n",
      "Epoch 1/5\n",
      "712/712 [==============================] - 2s 3ms/sample - loss: 1.2442 - accuracy: 0.5758 - val_loss: 0.7062 - val_accuracy: 0.6425\n",
      "Epoch 2/5\n",
      "712/712 [==============================] - 0s 153us/sample - loss: 0.7108 - accuracy: 0.7135 - val_loss: 0.6843 - val_accuracy: 0.6872\n",
      "Epoch 3/5\n",
      "712/712 [==============================] - 0s 153us/sample - loss: 0.5470 - accuracy: 0.7430 - val_loss: 0.6003 - val_accuracy: 0.7151\n",
      "Epoch 4/5\n",
      "712/712 [==============================] - 0s 151us/sample - loss: 0.5368 - accuracy: 0.7612 - val_loss: 0.5636 - val_accuracy: 0.7542\n",
      "Epoch 5/5\n",
      "712/712 [==============================] - 0s 148us/sample - loss: 0.5056 - accuracy: 0.7963 - val_loss: 0.6268 - val_accuracy: 0.7039\n"
     ]
    }
   ],
   "source": [
    "deepfm_model.compile(loss='binary_crossentropy', \\\n",
    "        optimizer=Adam(lr=1e-3), \\\n",
    "        metrics=['accuracy'])\n",
    " \n",
    "history = deepfm_model.fit(xtrain_data, ytrain, epochs=5, batch_size=32, validation_data=(xtest_data, ytest))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3de3RV5Z3/8feXiyAXQUMUJWBAHbkLMQVciAG8FMTLQhkHRK2Oluqv6ljH+ZVRWy1TVq3jTy0OY8s4atUodeloLV5op9KirSABFVREKBeNoAQUBIOXwPf3x7OTnIST5AROck52Pq+1zuLsvZ+z9zc75HOe8+x99jZ3R0REWr42mS5ARETSQ4EuIhITCnQRkZhQoIuIxIQCXUQkJhToIiIxoUCXpMysrZntNrM+6WybSWZ2vJml/TxdMzvDzDYmTK8xszGptD2AbT1gZjcf6OvrWe9PzezhdK9Xmle7TBcg6WFmuxMmOwFfAXuj6e+5e3Fj1ufue4Eu6W7bGrj7ielYj5ldBVzi7mMT1n1VOtYt8aRAjwl3rwrUqAd4lbv/b13tzaydu1c0R20i0jw05NJKRB+pf2NmT5jZLuASMzvFzJaY2Q4z22Jmc8ysfdS+nZm5meVH049Fy180s11m9pqZ9W1s22j5RDN738x2mtl9ZvYXM7u8jrpTqfF7ZrbOzD4zszkJr21rZveY2XYzWw9MqGf/3GJm82vNm2tmd0fPrzKz1dHP87eo91zXukrNbGz0vJOZPRrV9g5wcq22t5rZ+mi975jZedH8IcB/AGOi4axtCfv29oTXXx397NvN7FkzOzqVfdMQM5sc1bPDzF42sxMTlt1sZpvN7HMzey/hZx1lZiui+Z+Y2b+nuj1JE3fXI2YPYCNwRq15PwW+Bs4lvJEfCnwLGEn4pNYPeB+4NmrfDnAgP5p+DNgGFALtgd8Ajx1A2yOBXcD50bIbgW+Ay+v4WVKp8bdANyAf+LTyZweuBd4B8oAcYHH4L590O/2A3UDnhHVvBQqj6XOjNgaMB/YAQ6NlZwAbE9ZVCoyNnt8F/Ak4HDgWeLdW24uAo6PfycVRDUdFy64C/lSrzseA26PnZ0U1DgM6Av8JvJzKvkny8/8UeDh6PiCqY3z0O7oZWBM9HwRsAnpGbfsC/aLny4Bp0fOuwMhM/y20tod66K3Lq+7+O3ff5+573H2Zuy919wp3Xw/MA4rqef1T7l7i7t8AxYQgaWzbc4A33f230bJ7COGfVIo1/szdd7r7RkJ4Vm7rIuAedy919+3AHfVsZz3wNuGNBuBM4DN3L4mW/87d13vwMvBHIOmBz1ouAn7q7p+5+yZCrztxu0+6+5bod/I44c24MIX1AkwHHnD3N939S2AmUGRmeQlt6to39ZkKPOfuL0e/ozsIbwojgQrCm8egaNhuQ7TvILwxn2BmOe6+y92XpvhzSJoo0FuXDxMnzKy/mT1vZh+b2efALKBHPa//OOF5OfUfCK2r7TGJdbi7E3q0SaVYY0rbIvQs6/M4MC16fnE0XVnHOWa21Mw+NbMdhN5xffuq0tH11WBml5vZW9HQxg6gf4rrhfDzVa3P3T8HPgN6JbRpzO+srvXuI/yOern7GuCfCb+HrdEQXs+o6RXAQGCNmb1uZmen+HNImijQW5fap+z9itArPd7dDwN+TBhSaEpbCEMgAJiZUTOAajuYGrcAvROmGzqt8kngDDPrReipPx7VeCjwFPAzwnBId+D3KdbxcV01mFk/4H7gGiAnWu97Cett6BTLzYRhnMr1dSUM7XyUQl2NWW8bwu/sIwB3f8zdRxOGW9oS9gvuvsbdpxKG1f4f8LSZdTzIWqQRFOitW1dgJ/CFmQ0AvtcM21wAFJjZuWbWDvgnILeJanwSuMHMeplZDvDD+hq7+8fAq8DDwBp3Xxst6gAcApQBe83sHOD0RtRws5l1t3Ce/rUJy7oQQruM8N72XUIPvdInQF7lQeAkngCuNLOhZtaBEKyvuHudn3gaUfN5ZjY22va/EI57LDWzAWY2Ltrenuixj/ADXGpmPaIe/c7oZ9t3kLVIIyjQW7d/Br5D+GP9FeHgZZNy90+AfwDuBrYDxwFvEM6bT3eN9xPGulcRDtg9lcJrHicc5KwabnH3HcAPgGcIBxanEN6YUnEb4ZPCRuBF4JGE9a4E7gNej9qcCCSOO/8BWAt8YmaJQyeVr3+JMPTxTPT6PoRx9YPi7u8Q9vn9hDebCcB50Xh6B+BOwnGPjwmfCG6JXno2sNrCWVR3Af/g7l8fbD2SOgtDmCKZYWZtCR/xp7j7K5muR6QlUw9dmp2ZTYiGIDoAPyKcHfF6hssSafEU6JIJpwLrCR/nvw1Mdve6hlxEJEUachERiQn10EVEYiJjF+fq0aOH5+fnZ2rzIiIt0vLly7e5e9JTfTMW6Pn5+ZSUlGRq8yIiLZKZ1fmNZw25iIjEhAJdRCQmFOgiIjGhOxaJtBLffPMNpaWlfPnll5kuRVLQsWNH8vLyaN++rkv57E+BLtJKlJaW0rVrV/Lz8wkXuZRs5e5s376d0tJS+vbt2/ALIi1qyKW4GPLzoU2b8G9xo257LNK6ffnll+Tk5CjMWwAzIycnp9GfplpMD724GGbMgPLyML1pU5gGmH7Q15cTaR0U5i3HgfyuWkwP/ZZbqsO8Unl5mC8iIi0o0D/4oHHzRSS7bN++nWHDhjFs2DB69uxJr169qqa//jq1y6ZfccUVrFmzpt42c+fOpThN47Gnnnoqb775ZlrW1RxazJBLnz5hmCXZfBFJv+Li8An4gw/C39ns2Qc3vJmTk1MVjrfffjtdunThpptuqtGm6u71bZL3NR966KEGt/P973//wIts4VpMD332bOjUqea8Tp3CfBFJr8pjVps2gXv1MaumOBFh3bp1DBw4kOnTpzNo0CC2bNnCjBkzKCwsZNCgQcyaNauqbWWPuaKigu7duzNz5kxOOukkTjnlFLZu3QrArbfeyr333lvVfubMmYwYMYITTzyRv/71rwB88cUXXHjhhQwcOJApU6ZQWFjYYE/8scceY8iQIQwePJibb74ZgIqKCi699NKq+XPmzAHgnnvuYeDAgQwdOpRLLrkk7fusLi2mh17ZM0hnj0FEkqvvmFVT/M299957PPLIIxQWFgJwxx13cMQRR1BRUcG4ceOYMmUKAwcOrPGanTt3UlRUxB133MGNN97Igw8+yMyZM/dbt7vz+uuv89xzzzFr1ixeeukl7rvvPnr27MnTTz/NW2+9RUFBQb31lZaWcuutt1JSUkK3bt0444wzWLBgAbm5uWzbto1Vq1YBsGPHDgDuvPNONm3axCGHHFI1rzm0mB46hP9IGzfCvn3hX4W5SNNo7mNWxx13XFWYAzzxxBMUFBRQUFDA6tWreffdd/d7zaGHHsrEiRMBOPnkk9m4cWPSdV9wwQX7tXn11VeZOnUqACeddBKDBg2qt76lS5cyfvx4evToQfv27bn44otZvHgxxx9/PGvWrOH6669n4cKFdOvWDYBBgwZxySWXUFxc3KgvBh2sFhXoItI86jo21VTHrDp37lz1fO3atfziF7/g5ZdfZuXKlUyYMCHp+diHHHJI1fO2bdtSUVGRdN0dOnRosM2BysnJYeXKlYwZM4a5c+fyve99D4CFCxdy9dVXs2zZMkaMGMHevXvTut26KNBFZD+ZPGb1+eef07VrVw477DC2bNnCwoUL076N0aNH8+STTwKwatWqpJ8AEo0cOZJFixaxfft2KioqmD9/PkVFRZSVleHu/P3f/z2zZs1ixYoV7N27l9LSUsaPH8+dd97Jtm3bKK89ftVEWswYuog0n0wesyooKGDgwIH079+fY489ltGjR6d9G9dddx2XXXYZAwcOrHpUDpckk5eXx7/9278xduxY3J1zzz2XSZMmsWLFCq688krcHTPj5z//ORUVFVx88cXs2rWLffv2cdNNN9G1a9e0/wzJZOyeooWFha4bXIg0n9WrVzNgwIBMl5EVKioqqKiooGPHjqxdu5azzjqLtWvX0q5ddvVxk/3OzGy5uxcma99g9Wb2IHAOsNXdBydZPh34IWDALuAad3/rAGoXEWkWu3fv5vTTT6eiogJ351e/+lXWhfmBSOUneBj4D+CROpZvAIrc/TMzmwjMA0ampzwRkfTr3r07y5cvz3QZaddgoLv7YjPLr2f5XxMmlwB5B1+WiIg0VrrPcrkSeLGuhWY2w8xKzKykrKwszZsWEWnd0hboZjaOEOg/rKuNu89z90J3L8zNzU3XpkVEhDSdtmhmQ4EHgInuvj0d6xQRkcY56B66mfUB/ge41N3fP/iSRCSOxo0bt9+XhO69916uueaael/XpUsXADZv3syUKVOSthk7diwNnQZ977331viCz9lnn52W66zcfvvt3HXXXQe9nnRoMNDN7AngNeBEMys1syvN7Gozuzpq8mMgB/hPM3vTzHRyuYjsZ9q0acyfP7/GvPnz5zNt2rSUXn/MMcfw1FNPHfD2awf6Cy+8QPfu3Q94fdmowUB392nufrS7t3f3PHf/b3f/pbv/Mlp+lbsf7u7DokfSE95FpHWbMmUKzz//fNXNLDZu3MjmzZsZM2ZM1XnhBQUFDBkyhN/+9rf7vX7jxo0MHhy+CrNnzx6mTp3KgAEDmDx5Mnv27Klqd80111Rdeve2224DYM6cOWzevJlx48Yxbtw4APLz89m2bRsAd999N4MHD2bw4MFVl97duHEjAwYM4Lvf/S6DBg3irLPOqrGdZN58801GjRrF0KFDmTx5Mp999lnV9isvp1t5UbA///nPVTf4GD58OLt27TrgfVup5Z9JLyKNdsMNkO4b8QwbBlEWJnXEEUcwYsQIXnzxRc4//3zmz5/PRRddhJnRsWNHnnnmGQ477DC2bdvGqFGjOO+88+q8r+b9999Pp06dWL16NStXrqxx+dvZs2dzxBFHsHfvXk4//XRWrlzJ9ddfz913382iRYvo0aNHjXUtX76chx56iKVLl+LujBw5kqKiIg4//HDWrl3LE088wX/9139x0UUX8fTTT9d7ffPLLruM++67j6KiIn784x/zk5/8hHvvvZc77riDDRs20KFDh6phnrvuuou5c+cyevRodu/eTceOHRuxt5PTxblEpNkkDrskDre4OzfffDNDhw7ljDPO4KOPPuKTTz6pcz2LFy+uCtahQ4cydOjQqmVPPvkkBQUFDB8+nHfeeafBC2+9+uqrTJ48mc6dO9OlSxcuuOACXnnlFQD69u3LsGHDgPov0Qvh+uw7duygqKgIgO985zssXry4qsbp06fz2GOPVX0jdfTo0dx4443MmTOHHTt2pOWbquqhi7RC9fWkm9L555/PD37wA1asWEF5eTknn3wyAMXFxZSVlbF8+XLat29Pfn5+0kvmNmTDhg3cddddLFu2jMMPP5zLL7/8gNZTqfLSuxAuv9vQkEtdnn/+eRYvXszvfvc7Zs+ezapVq5g5cyaTJk3ihRdeYPTo0SxcuJD+/fsfcK2gHrqINKMuXbowbtw4/vEf/7HGwdCdO3dy5JFH0r59exYtWsSmZDcQTnDaaafx+OOPA/D222+zcuVKIFx6t3PnznTr1o1PPvmEF1+s/p5j165dk45TjxkzhmeffZby8nK++OILnnnmGcaMGdPon61bt24cfvjhVb37Rx99lKKiIvbt28eHH37IuHHj+PnPf87OnTvZvXs3f/vb3xgyZAg//OEP+da3vsV7773X6G3Wph66iDSradOmMXny5BpnvEyfPp1zzz2XIUOGUFhY2GBP9ZprruGKK65gwIABDBgwoKqnf9JJJzF8+HD69+9P7969a1x6d8aMGUyYMIFjjjmGRYsWVc0vKCjg8ssvZ8SIEQBcddVVDB8+vN7hlbr8+te/5uqrr6a8vJx+/frx0EMPsXfvXi655BJ27tyJu3P99dfTvXt3fvSjH7Fo0SLatGnDoEGDqu6+dDB0+VyRVkKXz215Gnv5XA25iIjEhAJdRCQmFOgirUimhlil8Q7kd6VAF2klOnbsyPbt2xXqLYC7s3379kZ/2UhnuYi0Enl5eZSWlqJ7EbQMHTt2JC+vcfcLUqCLtBLt27enb9++mS5DmpCGXEREYkKBLiISEwp0EZGYUKCLiMSEAl1EJCYU6CIiMZHKPUUfNLOtZvZ2Hcv7m9lrZvaVmd2U/hJFRCQVqfTQHwYm1LP8U+B6IDtuey0i0kqlcpPoxYTQrmv5VndfBnyTzsJERKRxmnUM3cxmmFmJmZXo68ciIunVrIHu7vPcvdDdC3Nzc5tz0yIisaezXEREYkKBLiISEw1ebdHMngDGAj3MrBS4DWgP4O6/NLOeQAlwGLDPzG4ABrr7501WtYiI7KfBQHf3aQ0s/xho3EV7RUQk7TTkIiISEwp0EZGYUKCLiMSEAl1EJCYU6CIiMaFAFxGJCQW6iEhMKNBFRGJCgS4iEhMKdBGRmFCgi4jEhAJdRCQmFOgiIjGhQBcRiQkFuohITCjQRURiQoEuIhITDQa6mT1oZlvN7O06lpuZzTGzdWa20swK0l+miIg0JJUe+sPAhHqWTwROiB4zgPsPviwREWmsBgPd3RcDn9bT5HzgEQ+WAN3N7Oh0FSgiIqlJxxh6L+DDhOnSaJ6IiDSjZj0oamYzzKzEzErKysqac9MiIrGXjkD/COidMJ0XzduPu89z90J3L8zNzU3DpkVEpFI6Av054LLobJdRwE5335KG9YqISCO0a6iBmT0BjAV6mFkpcBvQHsDdfwm8AJwNrAPKgSuaqlgREalbg4Hu7tMaWO7A99NWkYiIHBB9U1REJCYU6CIiMaFAFxGJCQW6iEhMKNBFRGJCgS4iEhMKdBGRmFCgi4jEhAJdRCQmFOgiIjGhQBcRiQkFuohITCjQRURiQoEuIhITCnQRkZhQoIuIxIQCXUQkJhToIiIxkVKgm9kEM1tjZuvMbGaS5cea2R/NbKWZ/cnM8tJfqoiI1KfBQDeztsBcYCIwEJhmZgNrNbsLeMTdhwKzgJ+lu1AREalfKj30EcA6d1/v7l8D84Hza7UZCLwcPV+UZLmIiDSxVAK9F/BhwnRpNC/RW8AF0fPJQFczy6m9IjObYWYlZlZSVlZ2IPWKiEgd0nVQ9CagyMzeAIqAj4C9tRu5+zx3L3T3wtzc3DRtWkREANql0OYjoHfCdF40r4q7bybqoZtZF+BCd9+RriJFRKRhqfTQlwEnmFlfMzsEmAo8l9jAzHqYWeW6/hV4ML1liohIQxoMdHevAK4FFgKrgSfd/R0zm2Vm50XNxgJrzOx94ChgdhPVKyIidTB3z8iGCwsLvaSkJCPbFhFpqcxsubsXJlumb4qKiMSEAl1EJCYU6CIiMaFAFxGJCQW6iEhMKNBFRGJCgS4iEhMKdBGRmFCgi4jEhAJdRCQmFOgiIjGhQBcRiQkFuohITCjQRURiQoEuIhITCnQRkZhQoIuIxIQCXUQkJlIKdDObYGZrzGydmc1MsryPmS0yszfMbKWZnZ3+UkVEpD4NBrqZtQXmAhOBgcA0MxtYq9mthJtHDwemAv+Z7kJFRKR+qfTQRwDr3H29u38NzAfOr9XGgcOi592AzekrUUREUpFKoPcCPkyYLo3mJboduMTMSoEXgOuSrcjMZphZiZmVlJWVHUC5IiJSl3QdFJ0GPOzuecDZwKNmtt+63X2euxe6e2Fubm6aNi0iIpBaoH8E9E6YzovmJboSeBLA3V8DOgI90lGgiIikJpVAXwacYGZ9zewQwkHP52q1+QA4HcDMBhACXWMqIiLNqMFAd/cK4FpgIbCacDbLO2Y2y8zOi5r9M/BdM3sLeAK43N29qYoWEZH9tUulkbu/QDjYmTjvxwnP3wVGp7c0ERFpDH1TVEQkJhToIiIxoUAXEYkJBbqISEwo0EVEYkKBLiISEwp0EZGYUKCLiMSEAl1EJCYU6CIiMaFAFxGJCQW6iEhMKNBFRGJCgS4iEhMKdBGRmFCgx1xxMeTnQ5s24d/i4kxXJCJNJaUbXEjLVFwMM2ZAeXmY3rQpTANMn565ukSkaaiHHmO33FId5pXKy8N8EYmflHroZjYB+AXQFnjA3e+otfweYFw02Qk40t27p7PQSqtWwaOPQk4O9OhR85GTA4cfDm3bNsWWW54PPmjcfBFp2RoMdDNrC8wFzgRKgWVm9lx0H1EA3P0HCe2vA4Y3Qa0AvP8+zJkDX31VV71wxBE1Q7526Nd+I+jePYwxx02fPmGYJdl8EYmfVHroI4B17r4ewMzmA+cD79bRfhpwW3rK29+FF8IFF4Shg23bwmP79urntac3boTly6GsDL7+Ovk627Sp/00g2bxu3bL/TWD27Jpj6ACdOoX5IhI/qQR6L+DDhOlSYGSyhmZ2LNAXeLmO5TOAGQB9DqKbaAadO4fHscem9hp3+OKL1N4E1q+H118Pz7/5Jvn62rRJrfefOK9bt1B7c6k88HnLLWGYpU+fEOY6ICoST+k+y2Uq8JS770220N3nAfMACgsLPc3brpcZdOkSHvn5qb3GHXbvrvtNIHHe2rXw2mthuq43gXbtan4SSGVY6LDDDu5NYPp0BbhIa5FKoH8E9E6YzovmJTMV+P7BFpUtzKBr1/Do2ze117jDrl119/4Tp9esgb/8JTzfm/QtMLwJpHosoHJe167N+0lARLJDKoG+DDjBzPoSgnwqcHHtRmbWHzgceC2tFbYwZqFXfdhh0K9faq9xh88/r/9NoHLe6tXVz+t6E2jfvmbIH3UUjB0LkyZB797JXyMiLV+Dge7uFWZ2LbCQcNrig+7+jpnNAkrc/bmo6VRgvrs361BKHJiF8fVu3eC441J7zb59+78J1PVp4K9/hd/8Jrxu6FA455zwGDFCp3iKxIllKn8LCwu9pKQkI9tubdzhvfdgwYLw+MtfQu++Rw+YODGE+1lnhdM3RSS7mdlydy9MukyB3vp89hksXAjPPw8vvACffhp66mPGhGGZc86BE0/UOLxINlKgS5327oWlS6t776tWhfnHHVcd7qedBh06ZLZOEQkU6JKyDz4IPffnn4c//hG+/DKc6nnmmSHczz4bevbMdJUirZcCXQ5IeTm8/HII9wULoLQ0zC8sDOE+aRIUFGT/N2ZF4kSBLgfNPQzHVA7NLFkS5vXsGXrt55wDZ5wRzoEXkaajQJe0KyuDl14KvfeXXoKdO+GQQ6CoqLr3nuopmCKSOgW6NKlvvgmnQlYOzbz3Xpjfv391uI8eHb7wJCIHR4Euzepvf6sO9z//OVzlsls3mDAhhPvEieEceBFpPAW6ZMyuXfC//1t95szHH4fz20eNqv7G6pAhOuddJFUKdMkK+/bBihXVvffKX39eXvXQzPjx4ZrtIpKcAl2y0pYt8OKLIdz/8IdwqeKOHeH000O4T5qkuyuJ1KZAl6z31VeweHH1aZHr14f5Q4dWf2N15EhdTExEgS4tinu4Vnzl0Mwrr4RLFOTkhAOqkybBt78dbggu0pKUl4ehxtxcGDDgwNahQJcWbccO+P3vQ7i/+GK4LHDbtnDqqdW99/79dWBVsos7bNgQ7mRW+XjrrdA5ueEGuOeeA1uvAl1iY+/ecL/XBQtCD/6tt8L8fv2qw72oSBcTk+ZX2ftODPCtW8Oyzp3D/QdOOaX6kZNzYNtRoEtsffhhuATwggXhYmJ79oQ/nsSLiR19dKarlLhxh40b9+99V1SE5SecEEJ71Kjw7+DB4XaS6aBAl1Zhzx5YtKi69/7BB2H+ySdX995PPlkXE5PGq937XrIEPvkkLKvd+x41qmm/OHfQgW5mE4BfEG5B94C735GkzUXA7YADb7n7fvcdTaRAl6bkDm+/XX1g9bXXwnnwRx1VfTGxM8/UxcRkfw31vo8/vubQSTp736k4qEA3s7bA+8CZQCnhptHT3P3dhDYnAE8C4939MzM70t231rdeBbo0p+3bw0XEFiwI/+7YEa4tk3gxseOPz3SVkgl79uw/9l3Z++7Uaf/ed25uZus92EA/Bbjd3b8dTf8rgLv/LKHNncD77v5AqkUp0CVTKirCjbMrh2bejbomJ55YPe4+ZEj42KwzZ+Klsve9ZEl1eL/5Zvb0vlNxsIE+BZjg7ldF05cCI9392oQ2zxJ68aMJwzK3u/tL9a1XgS7ZYv366mvNLFoULiYGYWy0b9+6HxquyX61e99LloTrCUF29r5TUV+gp+u9px1wAjAWyAMWm9kQd99Rq5AZwAyAPvpOt2SJfv3guuvCY/fu8EWmtWvDOcSVjz/9KVxoLFFOTgj2fv32D/tjjw3Xh5fm4w6bNtUcOknsfR93XLgJS2WADxmSfb3vg5XKj/MR0DthOi+al6gUWOru3wAbzOx9QsAvS2zk7vOAeRB66AdatEhT6dIlfBt14sSa893h009DuK9fXzPs33gDnn22umcPYaimV6+6e/fHHKPLGBysPXtg+fKaAV67933TTdW97yOPzGy9zSGVQF8GnGBmfQlBPhWofQbLs8A04CEz6wH8HbA+nYWKZJJZ6JHn5IR7qta2bx9s3lwz6CsfixbBo4+GN4VK7dtDfn7dgZ+To/H7RLV730uWhDfS1tT7TkWDP7K7V5jZtcBCwvj4g+7+jpnNAkrc/blo2Vlm9i6wF/gXd9/elIWLZJM2bcJlgPPyYMyY/Zd/9VU4Lz5Z4K9YES5nkKhLl/rH77t0aZ6fK1Ma6n1/61utr/edCn2xSCQL7NoVzr6oDPnawzpffFGzfW5u3WHfp0/LGr93D292tce+v/kmLO/Xr+aZJ0OHts7edyV9U1SkBXMPPfhkvfsNG8JQRGX4Qfi00ND4fSa/LbtnT/hUkhjgW7aEZZW978QzT9T7rqk5znIRkSZiFnrkubnhQF9te/eG8fvavfoNG8Lt/zZvrjl+f8gh9Y/fH3FE+sbvU+l9jx9fc+xbNxM/cAp0kRaubVvo3Ts8ior2X/7VV6EXn6x3v2xZOHsnUdeuyYO+X7/wRtC5c921fPnl/mPflb3vQw8Nve8bb6zufR91VNp2g6BAF4m9Dh3g7/4uPJL5/PPkYb9uXbg1YHl5zfZHHlkz6PPy4P33QyH5TZQAAAZ+SURBVHi/8UbN3ve4cTXHvtX7bloaQxeROrlDWVndB2s/+CCcOljZ+04c+1bvu2loDF1EDohZ6JEfeWS4p2ttFRXhQlZHHqnedzbQlaFFEhQXh3HiNm3Cv8XFma4ou7VrF86oUZhnB/XQRSLFxTBjRvWY8aZNYRpg+vTM1SWSKvXQRSK33LL/AcDy8jBfpCVQoItEKm9Zl+p80RBVtlGgi0TquqKzrvScXOUQ1aZN1RfPmjFDoZ5JCnSRyOzZ4avniTp1CvNlfxqiyj4KdJHI9Okwb164OYVZ+HfePB0QrYuGqLKPznIRSTB9ugI8VX36hGGWZPMlM9RDF5EDoiGq7KNAF5EDoiGq7KMhFxE5YBqiyi7qoYuINJOmPm8/pUA3swlmtsbM1pnZzCTLLzezMjN7M3pcld4yRURatuY4b7/BQDeztsBcYCIwEJhmZgOTNP2Nuw+LHg+kr0QRkZavOc7bT6WHPgJY5+7r3f1rYD5wfvpKEBGJv+Y4bz+VQO8FfJgwXRrNq+1CM1tpZk+ZWe9kKzKzGWZWYmYlZWVlB1CuiEjL1ByXlkjXQdHfAfnuPhT4A/DrZI3cfZ67F7p7YW5ubpo2LSKS/ZrjvP1UAv0jILHHnRfNq+Lu2939q2jyAeDk9JQnIhIPzXHefirnoS8DTjCzvoQgnwpcnNjAzI529+je3pwHrE5fiSIi8dDU5+03GOjuXmFm1wILgbbAg+7+jpnNAkrc/TngejM7D6gAPgUub7qSRUQkGXP3jGy4sLDQS0pKMrJtEZGWysyWu3thsmX6pqiISEwo0EVEYkKBLiISExkbQzezMiDJ5fFT0gPYlsZy0iVb64LsrU11NY7qapw41nWsuyf9Ik/GAv1gmFlJXQcFMilb64LsrU11NY7qapzWVpeGXEREYkKBLiISEy010OdluoA6ZGtdkL21qa7GUV2N06rqapFj6CIisr+W2kMXEZFaFOgiIjGR1YFuZg+a2VYze7uO5WZmc6J7na40s4IsqWusme1MuMfqj5uhpt5mtsjM3jWzd8zsn5K0afb9lWJdmdhfHc3sdTN7K6rrJ0nadDCz30T7a6mZ5WdJXRm7h6+ZtTWzN8xsQZJlzb6/Uqwrk/tro5mtira738Wr0v436e5Z+wBOAwqAt+tYfjbwImDAKGBpltQ1FljQzPvqaKAget4VeB8YmOn9lWJdmdhfBnSJnrcHlgKjarX5P8Avo+dTCffNzYa6Lgf+ozn3V8K2bwQeT/b7ysT+SrGuTO6vjUCPepan9W8yq3vo7r6YcDneupwPPOLBEqC7mR2dBXU1O3ff4u4roue7CNekr32rwGbfXynW1eyifbA7mmwfPWqfIXA+1Xffego43cwsC+rKCDPLAyYRbmKTTLPvrxTrymZp/ZvM6kBPQar3O82EU6KPzS+a2aDm3HD0UXc4oXeXKKP7q566IAP7K/qY/iawFfiDu9e5v9y9AtgJ5GRBXZDCPXybwL3A/wX21bE8I/srhbogM/sLwpvx781suZnNSLI8rX+TLT3Qs9UKwvUWTgLuA55trg2bWRfgaeAGd/+8ubbbkAbqysj+cve97j6McFvFEWY2uDm225AU6krpHr7pZGbnAFvdfXlTb6sxUqyr2fdXglPdvQCYCHzfzE5ryo219EBv8H6nmeDun1d+bHb3F4D2ZtajqbdrZu0JoVns7v+TpElG9ldDdWVqfyVsfwewCJhQa1HV/jKzdkA3YHum6/LM3MN3NHCemW0E5gPjzeyxWm0ysb8arCtD+6ty2x9F/24FngFG1GqS1r/Jlh7ozwGXRUeKRwE7vfrephljZj0rxw7NbARhPzfpf+xoe/8NrHb3u+to1uz7K5W6MrS/cs2se/T8UOBM4L1azZ4DvhM9nwK87NGRrEzWVWuMtVnu4evu/+ruee6eTzjg+bK7X1KrWbPvr1TqysT+irbb2cy6Vj4HzgJqnxmX1r/JVG4SnTFm9gThDIgeZlYK3EY4SIS7/xJ4gXCUeB1QDlyRJXVNAa4xswpgDzC1qf9jE3oqlwKrovFXgJuBPgl1ZWJ/pVJXJvbX0cCvzawt4Q3kSXdfYDXvlfvfwKNmto5wEHxqE9eUal1Zcw/fLNhfqdSVqf11FPBM1FdpBzzu7i+Z2dXQNH+T+uq/iEhMtPQhFxERiSjQRURiQoEuIhITCnQRkZhQoIuIxIQCXUQkJhToIiIx8f8B4c5AbDKWZ+0AAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "None\n"
     ]
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    " \n",
    "loss = history.history['loss']\n",
    "val_loss = history.history['val_loss']\n",
    "epochs = range(1, len(loss) + 1)\n",
    "plt.figure()\n",
    "plt.plot(epochs, loss, 'bo', label='Training loss')\n",
    "plt.plot(epochs, val_loss, 'b', label='Validation loss')\n",
    "plt.title('Training and validation loss')\n",
    "plt.legend()\n",
    "print(plt.show())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}


================================================
FILE: DeepFM/README.md
================================================
# tf.version == '2.1.0'


# run model
```python
DeepFM.ipynb

```


# 模型 summary
```python
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
Pclass (InputLayer)             [(None, 1)]          0                                            
__________________________________________________________________________________________________
Sex (InputLayer)                [(None, 1)]          0                                            
__________________________________________________________________________________________________
Cabin (InputLayer)              [(None, 1)]          0                                            
__________________________________________________________________________________________________
Embarked (InputLayer)           [(None, 1)]          0                                            
__________________________________________________________________________________________________
embedding (Embedding)           (None, 1, 64)        256         Pclass[0][0]                     
__________________________________________________________________________________________________
embedding_1 (Embedding)         (None, 1, 64)        192         Sex[0][0]                        
__________________________________________________________________________________________________
embedding_2 (Embedding)         (None, 1, 64)        9536        Cabin[0][0]                      
__________________________________________________________________________________________________
embedding_3 (Embedding)         (None, 1, 64)        320         Embarked[0][0]                   
__________________________________________________________________________________________________
concatenate (Concatenate)       (None, 1, 256)       0           embedding[0][0]                  
                                                                 embedding_1[0][0]                
                                                                 embedding_2[0][0]                
                                                                 embedding_3[0][0]                
__________________________________________________________________________________________________
Age (InputLayer)                [(None, 1)]          0                                            
__________________________________________________________________________________________________
SibSp (InputLayer)              [(None, 1)]          0                                            
__________________________________________________________________________________________________
Parch (InputLayer)              [(None, 1)]          0                                            
__________________________________________________________________________________________________
Fare (InputLayer)               [(None, 1)]          0                                            
__________________________________________________________________________________________________
concatenate_1 (Concatenate)     (None, 4)            0           Age[0][0]                        
                                                                 SibSp[0][0]                      
                                                                 Parch[0][0]                      
                                                                 Fare[0][0]                       
__________________________________________________________________________________________________
flatten (Flatten)               (None, 256)          0           concatenate[0][0]                
__________________________________________________________________________________________________
concatenate_3 (Concatenate)     (None, 260)          0           flatten[0][0]                    
                                                                 concatenate_1[0][0]              
__________________________________________________________________________________________________
concatenate_2 (Concatenate)     (None, 4, 64)        0           embedding[0][0]                  
                                                                 embedding_1[0][0]                
                                                                 embedding_2[0][0]                
                                                                 embedding_3[0][0]                
__________________________________________________________________________________________________
dnn (DNN)                       (None, 128)          49920       concatenate_3[0][0]              
__________________________________________________________________________________________________
linear (Linear)                 (None, 1)            5           concatenate[0][0]                
                                                                 concatenate_1[0][0]              
__________________________________________________________________________________________________
fm (FM)                         (None, 1)            0           concatenate_2[0][0]              
__________________________________________________________________________________________________
dense (Dense)                   (None, 1)            128         dnn[0][0]                        
__________________________________________________________________________________________________
add (Add)                       (None, 1)            0           linear[0][0]                     
                                                                 fm[0][0]                         
                                                                 dense[0][0]                      
__________________________________________________________________________________________________
dense_1 (Dense)                 (None, 1)            2           add[0][0]                        
==================================================================================================
Total params: 60,359
Trainable params: 60,359
Non-trainable params: 0
__________________________________________________________________________________________________
None

```


# 参考

```python

1. DeepFM: A Factorization-Machine based Neural Network for CTR Prediction

2. https://github.com/shenweichen/DeepCTR
```

================================================
FILE: DeepFM/train.csv
================================================
PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked
1,0,3,"Braund, Mr. Owen Harris",male,22,1,0,A/5 21171,7.25,,S
2,1,1,"Cumings, Mrs. John Bradley (Florence Briggs Thayer)",female,38,1,0,PC 17599,71.2833,C85,C
3,1,3,"Heikkinen, Miss. Laina",female,26,0,0,STON/O2. 3101282,7.925,,S
4,1,1,"Futrelle, Mrs. Jacques Heath (Lily May Peel)",female,35,1,0,113803,53.1,C123,S
5,0,3,"Allen, Mr. William Henry",male,35,0,0,373450,8.05,,S
6,0,3,"Moran, Mr. James",male,,0,0,330877,8.4583,,Q
7,0,1,"McCarthy, Mr. Timothy J",male,54,0,0,17463,51.8625,E46,S
8,0,3,"Palsson, Master. Gosta Leonard",male,2,3,1,349909,21.075,,S
9,1,3,"Johnson, Mrs. Oscar W (Elisabeth Vilhelmina Berg)",female,27,0,2,347742,11.1333,,S
10,1,2,"Nasser, Mrs. Nicholas (Adele Achem)",female,14,1,0,237736,30.0708,,C
11,1,3,"Sandstrom, Miss. Marguerite Rut",female,4,1,1,PP 9549,16.7,G6,S
12,1,1,"Bonnell, Miss. Elizabeth",female,58,0,0,113783,26.55,C103,S
13,0,3,"Saundercock, Mr. William Henry",male,20,0,0,A/5. 2151,8.05,,S
14,0,3,"Andersson, Mr. Anders Johan",male,39,1,5,347082,31.275,,S
15,0,3,"Vestrom, Miss. Hulda Amanda Adolfina",female,14,0,0,350406,7.8542,,S
16,1,2,"Hewlett, Mrs. (Mary D Kingcome) ",female,55,0,0,248706,16,,S
17,0,3,"Rice, Master. Eugene",male,2,4,1,382652,29.125,,Q
18,1,2,"Williams, Mr. Charles Eugene",male,,0,0,244373,13,,S
19,0,3,"Vander Planke, Mrs. Julius (Emelia Maria Vandemoortele)",female,31,1,0,345763,18,,S
20,1,3,"Masselmani, Mrs. Fatima",female,,0,0,2649,7.225,,C
21,0,2,"Fynney, Mr. Joseph J",male,35,0,0,239865,26,,S
22,1,2,"Beesley, Mr. Lawrence",male,34,0,0,248698,13,D56,S
23,1,3,"McGowan, Miss. Anna ""Annie""",female,15,0,0,330923,8.0292,,Q
24,1,1,"Sloper, Mr. William Thompson",male,28,0,0,113788,35.5,A6,S
25,0,3,"Palsson, Miss. Torborg Danira",female,8,3,1,349909,21.075,,S
26,1,3,"Asplund, Mrs. Carl Oscar (Selma Augusta Emilia Johansson)",female,38,1,5,347077,31.3875,,S
27,0,3,"Emir, Mr. Farred Chehab",male,,0,0,2631,7.225,,C
28,0,1,"Fortune, Mr. Charles Alexander",male,19,3,2,19950,263,C23 C25 C27,S
29,1,3,"O'Dwyer, Miss. Ellen ""Nellie""",female,,0,0,330959,7.8792,,Q
30,0,3,"Todoroff, Mr. Lalio",male,,0,0,349216,7.8958,,S
31,0,1,"Uruchurtu, Don. Manuel E",male,40,0,0,PC 17601,27.7208,,C
32,1,1,"Spencer, Mrs. William Augustus (Marie Eugenie)",female,,1,0,PC 17569,146.5208,B78,C
33,1,3,"Glynn, Miss. Mary Agatha",female,,0,0,335677,7.75,,Q
34,0,2,"Wheadon, Mr. Edward H",male,66,0,0,C.A. 24579,10.5,,S
35,0,1,"Meyer, Mr. Edgar Joseph",male,28,1,0,PC 17604,82.1708,,C
36,0,1,"Holverson, Mr. Alexander Oskar",male,42,1,0,113789,52,,S
37,1,3,"Mamee, Mr. Hanna",male,,0,0,2677,7.2292,,C
38,0,3,"Cann, Mr. Ernest Charles",male,21,0,0,A./5. 2152,8.05,,S
39,0,3,"Vander Planke, Miss. Augusta Maria",female,18,2,0,345764,18,,S
40,1,3,"Nicola-Yarred, Miss. Jamila",female,14,1,0,2651,11.2417,,C
41,0,3,"Ahlin, Mrs. Johan (Johanna Persdotter Larsson)",female,40,1,0,7546,9.475,,S
42,0,2,"Turpin, Mrs. William John Robert (Dorothy Ann Wonnacott)",female,27,1,0,11668,21,,S
43,0,3,"Kraeff, Mr. Theodor",male,,0,0,349253,7.8958,,C
44,1,2,"Laroche, Miss. Simonne Marie Anne Andree",female,3,1,2,SC/Paris 2123,41.5792,,C
45,1,3,"Devaney, Miss. Margaret Delia",female,19,0,0,330958,7.8792,,Q
46,0,3,"Rogers, Mr. William John",male,,0,0,S.C./A.4. 23567,8.05,,S
47,0,3,"Lennon, Mr. Denis",male,,1,0,370371,15.5,,Q
48,1,3,"O'Driscoll, Miss. Bridget",female,,0,0,14311,7.75,,Q
49,0,3,"Samaan, Mr. Youssef",male,,2,0,2662,21.6792,,C
50,0,3,"Arnold-Franchi, Mrs. Josef (Josefine Franchi)",female,18,1,0,349237,17.8,,S
51,0,3,"Panula, Master. Juha Niilo",male,7,4,1,3101295,39.6875,,S
52,0,3,"Nosworthy, Mr. Richard Cater",male,21,0,0,A/4. 39886,7.8,,S
53,1,1,"Harper, Mrs. Henry Sleeper (Myna Haxtun)",female,49,1,0,PC 17572,76.7292,D33,C
54,1,2,"Faunthorpe, Mrs. Lizzie (Elizabeth Anne Wilkinson)",female,29,1,0,2926,26,,S
55,0,1,"Ostby, Mr. Engelhart Cornelius",male,65,0,1,113509,61.9792,B30,C
56,1,1,"Woolner, Mr. Hugh",male,,0,0,19947,35.5,C52,S
57,1,2,"Rugg, Miss. Emily",female,21,0,0,C.A. 31026,10.5,,S
58,0,3,"Novel, Mr. Mansouer",male,28.5,0,0,2697,7.2292,,C
59,1,2,"West, Miss. Constance Mirium",female,5,1,2,C.A. 34651,27.75,,S
60,0,3,"Goodwin, Master. William Frederick",male,11,5,2,CA 2144,46.9,,S
61,0,3,"Sirayanian, Mr. Orsen",male,22,0,0,2669,7.2292,,C
62,1,1,"Icard, Miss. Amelie",female,38,0,0,113572,80,B28,
63,0,1,"Harris, Mr. Henry Birkhardt",male,45,1,0,36973,83.475,C83,S
64,0,3,"Skoog, Master. Harald",male,4,3,2,347088,27.9,,S
65,0,1,"Stewart, Mr. Albert A",male,,0,0,PC 17605,27.7208,,C
66,1,3,"Moubarek, Master. Gerios",male,,1,1,2661,15.2458,,C
67,1,2,"Nye, Mrs. (Elizabeth Ramell)",female,29,0,0,C.A. 29395,10.5,F33,S
68,0,3,"Crease, Mr. Ernest James",male,19,0,0,S.P. 3464,8.1583,,S
69,1,3,"Andersson, Miss. Erna Alexandra",female,17,4,2,3101281,7.925,,S
70,0,3,"Kink, Mr. Vincenz",male,26,2,0,315151,8.6625,,S
71,0,2,"Jenkin, Mr. Stephen Curnow",male,32,0,0,C.A. 33111,10.5,,S
72,0,3,"Goodwin, Miss. Lillian Amy",female,16,5,2,CA 2144,46.9,,S
73,0,2,"Hood, Mr. Ambrose Jr",male,21,0,0,S.O.C. 14879,73.5,,S
74,0,3,"Chronopoulos, Mr. Apostolos",male,26,1,0,2680,14.4542,,C
75,1,3,"Bing, Mr. Lee",male,32,0,0,1601,56.4958,,S
76,0,3,"Moen, Mr. Sigurd Hansen",male,25,0,0,348123,7.65,F G73,S
77,0,3,"Staneff, Mr. Ivan",male,,0,0,349208,7.8958,,S
78,0,3,"Moutal, Mr. Rahamin Haim",male,,0,0,374746,8.05,,S
79,1,2,"Caldwell, Master. Alden Gates",male,0.83,0,2,248738,29,,S
80,1,3,"Dowdell, Miss. Elizabeth",female,30,0,0,364516,12.475,,S
81,0,3,"Waelens, Mr. Achille",male,22,0,0,345767,9,,S
82,1,3,"Sheerlinck, Mr. Jan Baptist",male,29,0,0,345779,9.5,,S
83,1,3,"McDermot
Download .txt
gitextract_0m5oq6rv/

├── AFM/
│   ├── AFMLayer.py
│   ├── AFM_Model.py
│   ├── README.md
│   ├── data.py
│   ├── data_generator.py
│   ├── master.sh
│   ├── preprocess.py
│   └── train_AFM_model.py
├── BST/
│   ├── README.md
│   ├── bst_model.py
│   ├── din.py
│   └── transformer.py
├── BilinearFFM/
│   ├── BilinearFFM.py
│   ├── BilinearInteraction.py
│   ├── README.md
│   ├── data.py
│   ├── data_generator.py
│   ├── master.sh
│   ├── preprocess.py
│   └── train_BilinearFFM_model.py
├── DeepFM/
│   ├── DeepFM.ipynb
│   ├── README.md
│   └── train.csv
├── ESMM/
│   ├── README.md
│   ├── data/
│   │   └── esmm_raw_sample_data
│   ├── master.sh
│   ├── run_train_esmm_model.sh
│   ├── split_train_val.py
│   ├── split_train_val.sh
│   ├── tar_model.py
│   ├── train_esmm_finetune.py
│   └── write_tfrecord.py
├── MIND/
│   ├── CapsuleLayer.py
│   ├── README.md
│   ├── data.py
│   ├── data_generator.py
│   ├── master.sh
│   ├── mind.py
│   ├── predict.py
│   ├── preprocess.py
│   └── train_mind.py
├── MMoE/
│   └── README.md
├── NFM/
│   ├── NFM.ipynb
│   ├── README.md
│   └── train.csv
├── README.md
├── Wide&Deep/
│   ├── README.md
│   └── wide_and_deep.py
├── Word2vec/
│   ├── README.md
│   ├── data/
│   │   └── w2v_order_seq
│   ├── train_w2v_model.sh
│   └── word2vec
├── YouTubeNet/
│   ├── README.md
│   ├── SequencePoolingLayer.py
│   ├── YouTubeNet.py
│   ├── data.py
│   ├── data_generator.py
│   ├── load_YouTubeNet_model_to_predict.py
│   ├── master.sh
│   ├── preprocess.py
│   └── train_YouTubeNet_model.py
└── vgg16_figure_search_annoy/
    ├── README.md
    ├── build_figure_ann.py
    ├── download_jd_figures.py
    ├── extract_figure_feature.py
    ├── search_topN_figure.py
    └── threadings_download_txt.py
Download .txt
SYMBOL INDEX (115 symbols across 28 files)

FILE: AFM/AFMLayer.py
  class AFMLayer (line 11) | class AFMLayer(Layer):
    method __init__ (line 37) | def __init__(self, attention_factor=4, l2_reg_w=0, dropout_rate=0, see...
    method build (line 44) | def build(self, input_shape):
    method call (line 86) | def call(self, inputs, training=None, **kwargs):
    method compute_output_shape (line 118) | def compute_output_shape(self, input_shape):
    method get_config (line 125) | def get_config(self, ):

FILE: AFM/AFM_Model.py
  function AFM (line 13) | def AFM(

FILE: AFM/data_generator.py
  function init_output (line 6) | def init_output():
  function file_generator (line 19) | def file_generator(input_path, batch_size):

FILE: AFM/preprocess.py
  function gen_data_set (line 6) | def gen_data_set(data, negsample=0):
  function gen_model_input (line 36) | def gen_model_input(train_set,user_profile,seq_max_len):

FILE: BST/bst_model.py
  function bst_model (line 14) | def bst_model(sparse_input_length = 1, \

FILE: BST/din.py
  function din_padding_mask (line 7) | def din_padding_mask(seq):
  class LocalActivationUnit (line 16) | class LocalActivationUnit(tf.keras.layers.Layer):
    method __init__ (line 18) | def __init__(self, d_model, middle_units, dropout_rate, **kwargs):
    method build (line 26) | def build(self, input_shape):
    method call (line 37) | def call(self, inputs, training=None, **kwargs):
    method compute_output_shape (line 50) | def compute_output_shape(self, input_shape):
    method get_config (line 54) | def get_config(self):
  class DinAttentionLayer (line 64) | class DinAttentionLayer(tf.keras.layers.Layer):
    method __init__ (line 65) | def __init__(self, d_model, middle_units, dropout_rate, **kwargs):
    method call (line 75) | def call(self, inputs, **kwargs):
    method compute_output_shape (line 90) | def compute_output_shape(self, input_shape):
    method get_config (line 94) | def get_config(self):

FILE: BST/transformer.py
  class Encoder (line 9) | class Encoder(tf.keras.layers.Layer):
    method __init__ (line 10) | def __init__(self, n_layers, d_model, num_heads, middle_units,
    method call (line 24) | def call(self, inputs, **kwargs):
  class EncoderLayer (line 37) | class EncoderLayer(tf.keras.layers.Layer):
    method __init__ (line 38) | def __init__(self, d_model, num_heads, middle_units, \
    method call (line 53) | def call(self, inputs, mask, **kwargs):
  class LayerNormalization (line 70) | class LayerNormalization(tf.keras.layers.Layer):
    method __init__ (line 71) | def __init__(self, epsilon=1e-6, **kwargs):
    method build (line 75) | def build(self, input_shape):
    method call (line 82) | def call(self, x):
    method compute_output_shape (line 87) | def compute_output_shape(self, input_shape):
  function point_wise_feed_forward_network (line 94) | def point_wise_feed_forward_network(d_model, middle_units):
  function scaled_dot_product_attention (line 103) | def scaled_dot_product_attention(q, k, v, mask):
  class MultiHeadAttention (line 120) | class MultiHeadAttention(tf.keras.layers.Layer):
    method __init__ (line 121) | def __init__(self, d_model, num_heads, **kwargs):
    method split_heads (line 140) | def split_heads(self, x, batch_size):
    method call (line 145) | def call(self, inputs, **kwargs):
  function padding_mask (line 177) | def padding_mask(seq):
  class PositionalEncoding (line 188) | class PositionalEncoding(tf.keras.layers.Layer):
    method __init__ (line 189) | def __init__(self, sequence_len=None, embedding_dim=None, **kwargs):
    method call (line 194) | def call(self, inputs):
    method compute_output_shape (line 210) | def compute_output_shape(self, input_shape):

FILE: BilinearFFM/BilinearFFM.py
  function BilinearFFM (line 13) | def BilinearFFM(

FILE: BilinearFFM/BilinearInteraction.py
  class BilinearInteraction (line 9) | class BilinearInteraction(Layer):
    method __init__ (line 23) | def __init__(self, bilinear_type="each", seed=1024, **kwargs):
    method build (line 29) | def build(self, input_shape):
    method call (line 51) | def call(self, inputs, **kwargs):
    method compute_output_shape (line 72) | def compute_output_shape(self, input_shape):
    method get_config (line 78) | def get_config(self, ):

FILE: BilinearFFM/data_generator.py
  function init_output (line 6) | def init_output():
  function file_generator (line 19) | def file_generator(input_path, batch_size):

FILE: BilinearFFM/preprocess.py
  function gen_data_set (line 6) | def gen_data_set(data, negsample=0):
  function gen_model_input (line 36) | def gen_model_input(train_set,user_profile,seq_max_len):

FILE: ESMM/split_train_val.py
  function split_train_val (line 5) | def split_train_val(sample_path, train_path, val_path, summary_path, tra...

FILE: ESMM/tar_model.py
  function tar (line 9) | def tar(input_paths, output_path):
  function md5_file (line 18) | def md5_file(name):

FILE: ESMM/train_esmm_finetune.py
  function parse_example (line 10) | def parse_example(proto):
  function get_tfrecord_dataset (line 28) | def get_tfrecord_dataset(tf_path, batch_size = None, num_parallel_calls ...
  function train_finetune (line 40) | def train_finetune(train_path, val_path, model_path, \
  function build_model (line 105) | def build_model():

FILE: ESMM/write_tfrecord.py
  function _parse_line (line 5) | def _parse_line(line):
  function _to_float_feature (line 15) | def _to_float_feature(value_list):
  function _to_int64_feature (line 19) | def _to_int64_feature(value):
  function serialize_example (line 23) | def serialize_example(pin_vec, sku_vec, ctr_label, cvr_label):
  function make_tfrecord (line 39) | def make_tfrecord(input_path, tf_path):

FILE: MIND/CapsuleLayer.py
  class SequencePoolingLayer (line 7) | class SequencePoolingLayer(Layer):
    method __init__ (line 26) | def __init__(self, mode='mean', supports_masking=False, **kwargs):
    method build (line 36) | def build(self, input_shape):
    method call (line 41) | def call(self, seq_value_len_list, mask=None, **kwargs):
    method compute_output_shape (line 73) | def compute_output_shape(self, input_shape):
    method compute_mask (line 79) | def compute_mask(self, inputs, mask):
    method get_config (line 82) | def get_config(self, ):
  class LabelAwareAttention (line 89) | class LabelAwareAttention(Layer):
    method __init__ (line 90) | def __init__(self, k_max, pow_p=1, **kwargs):
    method build (line 95) | def build(self, input_shape):
    method call (line 100) | def call(self, inputs, training=None, **kwargs):
    method compute_output_shape (line 124) | def compute_output_shape(self, input_shape):
    method get_config (line 127) | def get_config(self, ):
  class CapsuleLayer (line 134) | class CapsuleLayer(Layer):
    method __init__ (line 135) | def __init__(self, input_units, out_units, max_len, k_max, iteration_t...
    method build (line 145) | def build(self, input_shape):
    method call (line 154) | def call(self, inputs, **kwargs):
    method compute_output_shape (line 177) | def compute_output_shape(self, input_shape):
    method get_config (line 180) | def get_config(self, ):
  function squash (line 188) | def squash(inputs):

FILE: MIND/data_generator.py
  function init_output (line 6) | def init_output():
  function file_generator (line 22) | def file_generator(input_path, batch_size):

FILE: MIND/mind.py
  function tile_user_otherfeat (line 13) | def tile_user_otherfeat(user_other_feature, k_max):
  function mind (line 17) | def mind(

FILE: MIND/preprocess.py
  function gen_data_set (line 6) | def gen_data_set(data, negsample=0):
  function gen_model_input (line 37) | def gen_model_input(train_set,user_profile,seq_max_len):

FILE: Wide&Deep/wide_and_deep.py
  function wide_and_deep (line 9) | def wide_and_deep(

FILE: YouTubeNet/SequencePoolingLayer.py
  class SequencePoolingLayer (line 8) | class SequencePoolingLayer(Layer):
    method __init__ (line 10) | def __init__(self, mode="mean", support_mask=True, sequence_mask_lengt...
    method build (line 23) | def build(self, input_shape):
    method call (line 27) | def call(self, input_hist_seq_list, **kwargs):
    method compute_output_shape (line 78) | def compute_output_shape(self, input_shape):
    method config (line 82) | def config(self):

FILE: YouTubeNet/YouTubeNet.py
  function YouTubeNet (line 13) | def YouTubeNet(

FILE: YouTubeNet/data_generator.py
  function init_output (line 6) | def init_output():
  function file_generator (line 22) | def file_generator(input_path, batch_size):

FILE: YouTubeNet/preprocess.py
  function gen_data_set (line 6) | def gen_data_set(data, negsample=0):
  function gen_model_input (line 36) | def gen_model_input(train_set,user_profile,seq_max_len):

FILE: vgg16_figure_search_annoy/build_figure_ann.py
  function build_ann (line 9) | def build_ann(name_path=None, \

FILE: vgg16_figure_search_annoy/download_jd_figures.py
  function download_images (line 30) | def download_images(inx, key):
  function main (line 69) | def main():

FILE: vgg16_figure_search_annoy/search_topN_figure.py
  function load_ann (line 10) | def load_ann(ann_path=None, index_to_name_dict_path=None, dim=64):
  function query_ann (line 19) | def query_ann(ann=None, index_to_name_dict=None, query_vec=None, topN=No...

FILE: vgg16_figure_search_annoy/threadings_download_txt.py
  function download_image (line 9) | def download_image(url, filename):
  function execute_thread (line 16) | def execute_thread(i):
  function main (line 22) | def main():
Condensed preview — 67 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (3,151K chars).
[
  {
    "path": "AFM/AFMLayer.py",
    "chars": 5256,
    "preview": "import tensorflow as tf\nfrom tensorflow.keras.layers import Layer\nfrom tensorflow.keras import backend as K\nimport itert"
  },
  {
    "path": "AFM/AFM_Model.py",
    "chars": 2542,
    "preview": "#-*- coding:utf-8 -*-\n\n\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Input, Embedding, concatenate, Dense"
  },
  {
    "path": "AFM/README.md",
    "chars": 3928,
    "preview": "# tf.version == '2.1.0'\n\n# Data format : \n\n```python\n说明:\n\n(1)第1列:user id;\n(2)第2列:user gender id;\n(3)第3列:user age id;\n(4)"
  },
  {
    "path": "AFM/data.py",
    "chars": 3979,
    "preview": "#-*- coding:utf-8 -*-\n\n# https://github.com/shenweichen/DeepMatch/blob/master/examples/colab_MovieLen1M_YoutubeDNN.ipynb"
  },
  {
    "path": "AFM/data_generator.py",
    "chars": 1599,
    "preview": "#-*- coding:utf-8 -*-\n\nimport numpy as np\n\n\ndef init_output():\n    user_id = []\n    gender = []\n    age = []\n    occupat"
  },
  {
    "path": "AFM/master.sh",
    "chars": 43,
    "preview": "python data.py \n\npython train_AFM_model.py\n"
  },
  {
    "path": "AFM/preprocess.py",
    "chars": 2049,
    "preview": "import random\nimport numpy as np\nfrom tqdm import tqdm\nfrom tensorflow.python.keras.preprocessing.sequence import pad_se"
  },
  {
    "path": "AFM/train_AFM_model.py",
    "chars": 1690,
    "preview": "#-*- coding:utf-8 -*-\n\nimport tensorflow as tf\nfrom tensorflow.keras.optimizers import Adam\n\nfrom data_generator import "
  },
  {
    "path": "BST/README.md",
    "chars": 8202,
    "preview": "# tf.version == '2.1.0'\n\n# 运行\n\n+ python bst_model.py \n\n\n# 模型 summary\n\n```python\n________________________________________"
  },
  {
    "path": "BST/bst_model.py",
    "chars": 5702,
    "preview": "import tensorflow as tf\nfrom tensorflow.keras.layers import Input, Embedding, concatenate, Flatten, Dense, Dropout\n\nfrom"
  },
  {
    "path": "BST/din.py",
    "chars": 3410,
    "preview": "import numpy as np\nimport tensorflow as tf\nfrom tensorflow.keras import backend as K\n\n\n\ndef din_padding_mask(seq):\n    #"
  },
  {
    "path": "BST/transformer.py",
    "chars": 7588,
    "preview": "import numpy as np\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras import backend as K\n\n\n# \"A"
  },
  {
    "path": "BilinearFFM/BilinearFFM.py",
    "chars": 2384,
    "preview": "#-*- coding:utf-8 -*-\n\n\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Input, Embedding, concatenate, Dense"
  },
  {
    "path": "BilinearFFM/BilinearInteraction.py",
    "chars": 3408,
    "preview": "import tensorflow as tf\nfrom tensorflow.keras.layers import Layer\nfrom tensorflow.keras import backend as K\nimport itert"
  },
  {
    "path": "BilinearFFM/README.md",
    "chars": 3990,
    "preview": "# tf.version == '2.1.0'\n\n# Data format : \n\n```python\n说明:\n\n(1)第1列:user id;\n(2)第2列:user gender id;\n(3)第3列:user age id;\n(4)"
  },
  {
    "path": "BilinearFFM/data.py",
    "chars": 3979,
    "preview": "#-*- coding:utf-8 -*-\n\n# https://github.com/shenweichen/DeepMatch/blob/master/examples/colab_MovieLen1M_YoutubeDNN.ipynb"
  },
  {
    "path": "BilinearFFM/data_generator.py",
    "chars": 1599,
    "preview": "#-*- coding:utf-8 -*-\n\nimport numpy as np\n\n\ndef init_output():\n    user_id = []\n    gender = []\n    age = []\n    occupat"
  },
  {
    "path": "BilinearFFM/master.sh",
    "chars": 51,
    "preview": "python data.py \n\npython train_BilinearFFM_model.py\n"
  },
  {
    "path": "BilinearFFM/preprocess.py",
    "chars": 2049,
    "preview": "import random\nimport numpy as np\nfrom tqdm import tqdm\nfrom tensorflow.python.keras.preprocessing.sequence import pad_se"
  },
  {
    "path": "BilinearFFM/train_BilinearFFM_model.py",
    "chars": 1724,
    "preview": "#-*- coding:utf-8 -*-\n\nimport tensorflow as tf\nfrom tensorflow.keras.optimizers import Adam\n\nfrom data_generator import "
  },
  {
    "path": "DeepFM/DeepFM.ipynb",
    "chars": 119743,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 1,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n "
  },
  {
    "path": "DeepFM/README.md",
    "chars": 6548,
    "preview": "# tf.version == '2.1.0'\n\n\n# run model\n```python\nDeepFM.ipynb\n\n```\n\n\n# 模型 summary\n```python\n_____________________________"
  },
  {
    "path": "DeepFM/train.csv",
    "chars": 60302,
    "preview": "PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked\n1,0,3,\"Braund, Mr. Owen Harris\",male,22,"
  },
  {
    "path": "ESMM/README.md",
    "chars": 2950,
    "preview": "# tf.version == '2.1.0'\n\n# Data format : \n\n```python\n说明:\n\n(1)第1列:ctr 的值,0表示曝光后未点击,1表示曝光后点击;\n(2)第2列:ctcvr 的值,0表示(曝光后未点击、或"
  },
  {
    "path": "ESMM/data/esmm_raw_sample_data",
    "chars": 2434245,
    "preview": "0,0,-0.861100,0.113800,-0.142900,0.067500,0.666300,-0.942800,-1.392300,-0.373300,0.173300,0.935200,-0.116300,0.143900,-0"
  },
  {
    "path": "ESMM/master.sh",
    "chars": 125,
    "preview": "today=`date '+%Y-%m-%d %H:%M:%S'`\necho $today\n\n\n\nsh split_train_val.sh\n\npython write_tfrecord.py\n\nsh run_train_esmm_mode"
  },
  {
    "path": "ESMM/run_train_esmm_model.sh",
    "chars": 325,
    "preview": "train_data_path=data/train_data.tfrecord\nval_data_path=data/val_data.tfrecord\nmodel_path=__model/esmm_finetune.model\ntra"
  },
  {
    "path": "ESMM/split_train_val.py",
    "chars": 950,
    "preview": "import sys\nimport numpy as np\n\n\ndef split_train_val(sample_path, train_path, val_path, summary_path, train_percent = Non"
  },
  {
    "path": "ESMM/split_train_val.sh",
    "chars": 598,
    "preview": "raw_sample_data_path=data/esmm_raw_sample_data\nsample_data_path=data/esmm_sample_data\n\n# shuffle\necho `date`\necho 'Start"
  },
  {
    "path": "ESMM/tar_model.py",
    "chars": 1209,
    "preview": "import sys\nimport datetime\nimport tarfile\nfrom hashlib import md5\nimport time\nimport os\n\n\ndef tar(input_paths, output_pa"
  },
  {
    "path": "ESMM/train_esmm_finetune.py",
    "chars": 4087,
    "preview": "import sys\nimport time\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Input, Dense, Lamb"
  },
  {
    "path": "ESMM/write_tfrecord.py",
    "chars": 2061,
    "preview": "import tensorflow as tf\nimport numpy as np\n\n\ndef _parse_line(line):\n    buf = line.split(',')\n    ctr_label = int(buf[0]"
  },
  {
    "path": "MIND/CapsuleLayer.py",
    "chars": 7844,
    "preview": "import tensorflow as tf\n\nfrom tensorflow.keras.layers import Layer\nfrom tensorflow.keras.initializers import RandomNorma"
  },
  {
    "path": "MIND/README.md",
    "chars": 8286,
    "preview": "# tf.__version__ == '2.1.0'\n\n\n# 运行\n``` shell\nsh master.sh\n\n```\n\n\n# mind's summary\n\n```python\n\nModel: \"model_1\"\n_________"
  },
  {
    "path": "MIND/data.py",
    "chars": 4017,
    "preview": "#-*- coding:utf-8 -*-\n\n# https://github.com/shenweichen/DeepMatch/blob/master/examples/colab_MovieLen1M_YoutubeDNN.ipynb"
  },
  {
    "path": "MIND/data_generator.py",
    "chars": 2241,
    "preview": "#-*- coding:utf-8 -*-\n\nimport numpy as np\n\n\ndef init_output():\n    user_id = []\n    gender = []\n    age = []\n    occupat"
  },
  {
    "path": "MIND/master.sh",
    "chars": 55,
    "preview": "python data.py\n\npython train_mind.py\n\npython predict.py"
  },
  {
    "path": "MIND/mind.py",
    "chars": 6628,
    "preview": "# create model\n\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Input, Embedding, concatenate, Flatten, Dens"
  },
  {
    "path": "MIND/predict.py",
    "chars": 1923,
    "preview": "import tensorflow as tf\nimport numpy as np\nfrom tensorflow.keras.models import Model\n\nfrom mind import mind\nfrom data_ge"
  },
  {
    "path": "MIND/preprocess.py",
    "chars": 2050,
    "preview": "import random\nimport numpy as np\nfrom tqdm import tqdm\nfrom tensorflow.python.keras.preprocessing.sequence import pad_se"
  },
  {
    "path": "MIND/train_mind.py",
    "chars": 1743,
    "preview": "import os\nimport tensorflow as tf\nfrom data_generator import file_generator\nfrom mind import mind\n\nfrom tensorflow.keras"
  },
  {
    "path": "MMoE/README.md",
    "chars": 90,
    "preview": "# 参考\n\n[https://github.com/drawbridge/keras-mmoe](https://github.com/drawbridge/keras-mmoe)"
  },
  {
    "path": "NFM/NFM.ipynb",
    "chars": 132533,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 44,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n"
  },
  {
    "path": "NFM/README.md",
    "chars": 6634,
    "preview": "# tf.version == '2.1.0'\n\n\n# run model\n```python\nNFM.ipynb\n\n```\n\n\n# 模型 summary\n```python\n________________________________"
  },
  {
    "path": "NFM/train.csv",
    "chars": 60302,
    "preview": "PassengerId,Survived,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked\n1,0,3,\"Braund, Mr. Owen Harris\",male,22,"
  },
  {
    "path": "README.md",
    "chars": 4210,
    "preview": "# 点击预估模型\n\n\n## 1. Recall\n\n| 算法        | 论文    |  公众号或知乎文章介绍  |\n| --------    | -----  | ----            |\n| Word2vec     "
  },
  {
    "path": "Wide&Deep/README.md",
    "chars": 10396,
    "preview": "# tf.__version__ == '2.1.0'\n\n\n# Data format:\n\n```python\n\n\"user_id\",\"gender\",\"age\",\"item_id\",\"click_count\",\"sales_count\","
  },
  {
    "path": "Wide&Deep/wide_and_deep.py",
    "chars": 3731,
    "preview": "import tensorflow as tf\n\nfrom tensorflow.keras.models import Model\nfrom tensorflow.keras.layers import Input, Embedding,"
  },
  {
    "path": "Word2vec/README.md",
    "chars": 113,
    "preview": "# 添加权限\n\n```shell\nchmod +x train_w2v_model.sh\n\nchmod +x word2vec\n\n```\n\n# 运行\n\n```shell\n\nsh train_w2v_model.sh\n```\n\n"
  },
  {
    "path": "Word2vec/data/w2v_order_seq",
    "chars": 137749,
    "preview": "1062015 1187782 2179315 2210183 2617504 2945853 4389188 630867 7280688 1196619906 31004597695 31004616702 100006902552 1"
  },
  {
    "path": "Word2vec/train_w2v_model.sh",
    "chars": 582,
    "preview": "# trainData: skus, split by space\n\ntrainData=data/w2v_order_seq\noutputData=data/w2v_order_item_vec_128dim\n\n\n\nsize=128\nwi"
  },
  {
    "path": "YouTubeNet/README.md",
    "chars": 6936,
    "preview": "# tf.__version__ == '2.1.0'\n\n\n# 运行\n``` shell\nsh master.sh\n\n```\n\n\n# YouTubeNet's summary\n\n```python\n\nModel: \"model_1\"\n___"
  },
  {
    "path": "YouTubeNet/SequencePoolingLayer.py",
    "chars": 2763,
    "preview": "#-*- coding:utf-8 -*-\n\n\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Lambda, Layer\n\n\nclass SequencePoolin"
  },
  {
    "path": "YouTubeNet/YouTubeNet.py",
    "chars": 4826,
    "preview": "#-*- coding:utf-8 -*-\n\n\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Input, Embedding, concatenate, Dense"
  },
  {
    "path": "YouTubeNet/data.py",
    "chars": 4017,
    "preview": "#-*- coding:utf-8 -*-\n\n# https://github.com/shenweichen/DeepMatch/blob/master/examples/colab_MovieLen1M_YoutubeDNN.ipynb"
  },
  {
    "path": "YouTubeNet/data_generator.py",
    "chars": 2241,
    "preview": "#-*- coding:utf-8 -*-\n\nimport numpy as np\n\n\ndef init_output():\n    user_id = []\n    gender = []\n    age = []\n    occupat"
  },
  {
    "path": "YouTubeNet/load_YouTubeNet_model_to_predict.py",
    "chars": 3344,
    "preview": "#-*- coding:utf-8 -*-\n\nimport tensorflow as tf\nimport numpy as np\nfrom tensorflow.keras.models import Model\n\nfrom YouTub"
  },
  {
    "path": "YouTubeNet/master.sh",
    "chars": 164,
    "preview": "\n# 1. data preprocession\npython data.py\n\n\n# 2. train mdoel\npython train_YouTubeNet_model.py\n\n\n# 3. load model to predict"
  },
  {
    "path": "YouTubeNet/preprocess.py",
    "chars": 2049,
    "preview": "import random\nimport numpy as np\nfrom tqdm import tqdm\nfrom tensorflow.python.keras.preprocessing.sequence import pad_se"
  },
  {
    "path": "YouTubeNet/train_YouTubeNet_model.py",
    "chars": 1740,
    "preview": "#-*- coding:utf-8 -*-\n\nimport tensorflow as tf\nfrom tensorflow.keras.optimizers import Adam\n\nfrom data_generator import "
  },
  {
    "path": "vgg16_figure_search_annoy/README.md",
    "chars": 674,
    "preview": "# vgg16 model extract figure feature, Annoy to search similar figures\n\n### 推荐阅读:[Annoy最近邻检索技术之 “图片检索”](https://zhuanlan."
  },
  {
    "path": "vgg16_figure_search_annoy/build_figure_ann.py",
    "chars": 1558,
    "preview": "# encoding:utf-8\nfrom annoy import AnnoyIndex\nimport pickle\nimport numpy as np\nnp.random.seed(20200601)\nimport sys, time"
  },
  {
    "path": "vgg16_figure_search_annoy/download_jd_figures.py",
    "chars": 2746,
    "preview": "# encoding=\"utf-8\"\n\nfrom requests_html import HTMLSession\nimport re\nimport os\nimport time\nimport threading\nimport urllib"
  },
  {
    "path": "vgg16_figure_search_annoy/extract_figure_feature.py",
    "chars": 942,
    "preview": "from tensorflow.keras.applications.vgg16 import VGG16\nfrom tensorflow.keras.preprocessing import image\nfrom tensorflow.k"
  },
  {
    "path": "vgg16_figure_search_annoy/search_topN_figure.py",
    "chars": 2202,
    "preview": "# encoding:utf-8\nfrom annoy import AnnoyIndex\nimport numpy as np\nnp.random.seed(20200601)\nimport pickle\nimport sys\nfrom "
  },
  {
    "path": "vgg16_figure_search_annoy/threadings_download_txt.py",
    "chars": 580,
    "preview": "# http://www.uml.org.cn/python/201901221.asp\n\n\nimport threading\nimport urllib.request\nimport time\n\n\ndef download_image(u"
  }
]

// ... and 1 more files (download for full content)

About this extraction

This page contains the full source code of the wziji/deep_ctr GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 67 files (3.0 MB), approximately 784.5k tokens, and a symbol index with 115 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!