Repository: pplonski/keras2cpp
Branch: master
Commit: ce407cc06ca9
Files: 14
Total size: 98.7 KB
Directory structure:
gitextract_rvc94csq/
├── LICENSE
├── README.md
├── dump_to_simple_cpp.py
├── example/
│ ├── dumped.nnet
│ ├── mnist_cnn_one_iteration.py
│ ├── my_nn_arch.json
│ └── my_nn_weights.h5
├── example_main.cc
├── keras_model.cc
├── keras_model.h
├── test_compare.py
├── test_run.sh
├── test_run_cnn.cc
└── test_run_cnn.py
================================================
FILE CONTENTS
================================================
================================================
FILE: LICENSE
================================================
The MIT License (MIT)
Copyright (c) 2016 Piotr
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================================
FILE: README.md
================================================
# keras2cpp
This is a bunch of code to port Keras neural network model into pure C++. Neural network weights and architecture are stored in plain text file and input is presented as `vector<vector<vector<float> > >` in case of image. The code is prepared to support simple Convolutional network (from MNIST example) but can be easily extended. There are implemented only ReLU and Softmax activations.
It is working with the Theano backend.
## Usage
1. Save your network weights and architecture.
2. Dump network structure to plain text file with `dump_to_simple_cpp.py` script.
3. Use network with code from `keras_model.h` and `keras_model.cc` files - see example below.
## Example
1. Run one iteration of simple CNN on MNIST data with `example/mnist_cnn_one_iteration.py` script. It will produce files with architecture `example/my_nn_arch.json` and weights in HDF5 format `example/my_nn_weights.h5`.
2. Dump network to plain text file `python dump_to_simple_cpp.py -a example/my_nn_arch.json -w example/my_nn_weights.h5 -o example/dumped.nnet`.
3. Compile example `g++ -std=c++11 keras_model.cc example_main.cc` - see code in `example_main.cc`.
4. Run binary `./a.out` - you shoul get the same output as in step one from Keras.
## Testing
If you want to test dumping for your network, please use `test_run.sh` script. Please provide there your network architecture and weights. The script do following job:
1. Dump network into text file.
2. Generate random sample.
3. Compute predictions from keras and keras2cpp on generated sample.
4. Compare predictions.
## Similar repositories
- Keras to C++ with usage of Tensorflow C API https://github.com/aljabr0/from-keras-to-c
================================================
FILE: dump_to_simple_cpp.py
================================================
import numpy as np
np.random.seed(1337)
from keras.models import Sequential, model_from_json
import json
import argparse
np.set_printoptions(threshold=np.inf)
parser = argparse.ArgumentParser(description='This is a simple script to dump Keras model into simple format suitable for porting into pure C++ model')
parser.add_argument('-a', '--architecture', help="JSON with model architecture", required=True)
parser.add_argument('-w', '--weights', help="Model weights in HDF5 format", required=True)
parser.add_argument('-o', '--output', help="Ouput file name", required=True)
parser.add_argument('-v', '--verbose', help="Verbose", required=False)
args = parser.parse_args()
print 'Read architecture from', args.architecture
print 'Read weights from', args.weights
print 'Writing to', args.output
arch = open(args.architecture).read()
model = model_from_json(arch)
model.load_weights(args.weights)
model.compile(loss='categorical_crossentropy', optimizer='adadelta')
arch = json.loads(arch)
with open(args.output, 'w') as fout:
fout.write('layers ' + str(len(model.layers)) + '\n')
layers = []
for ind, l in enumerate(arch["config"]):
if args.verbose:
print ind, l
fout.write('layer ' + str(ind) + ' ' + l['class_name'] + '\n')
if args.verbose:
print str(ind), l['class_name']
layers += [l['class_name']]
if l['class_name'] == 'Convolution2D':
#fout.write(str(l['config']['nb_filter']) + ' ' + str(l['config']['nb_col']) + ' ' + str(l['config']['nb_row']) + ' ')
#if 'batch_input_shape' in l['config']:
# fout.write(str(l['config']['batch_input_shape'][1]) + ' ' + str(l['config']['batch_input_shape'][2]) + ' ' + str(l['config']['batch_input_shape'][3]))
#fout.write('\n')
W = model.layers[ind].get_weights()[0]
if args.verbose:
print W.shape
fout.write(str(W.shape[0]) + ' ' + str(W.shape[1]) + ' ' + str(W.shape[2]) + ' ' + str(W.shape[3]) + ' ' + l['config']['border_mode'] + '\n')
for i in range(W.shape[0]):
for j in range(W.shape[1]):
for k in range(W.shape[2]):
fout.write(str(W[i,j,k]) + '\n')
fout.write(str(model.layers[ind].get_weights()[1]) + '\n')
if l['class_name'] == 'Activation':
fout.write(l['config']['activation'] + '\n')
if l['class_name'] == 'MaxPooling2D':
fout.write(str(l['config']['pool_size'][0]) + ' ' + str(l['config']['pool_size'][1]) + '\n')
#if l['class_name'] == 'Flatten':
# print l['config']['name']
if l['class_name'] == 'Dense':
#fout.write(str(l['config']['output_dim']) + '\n')
W = model.layers[ind].get_weights()[0]
if args.verbose:
print W.shape
fout.write(str(W.shape[0]) + ' ' + str(W.shape[1]) + '\n')
for w in W:
fout.write(str(w) + '\n')
fout.write(str(model.layers[ind].get_weights()[1]) + '\n')
================================================
FILE: example/dumped.nnet
================================================
layers 12
layer 0 Convolution2D
4 1 3 3 same
[ 0.13240439 -0.39469701 -0.05886053]
[-0.16273086 0.27228925 0.70811206]
[-0.0382412 0.91429299 0.72160912]
[ 0.41376248 -0.13718128 -0.55548537]
[ 0.06595665 -0.43903521 0.04035483]
[ 0.34481722 0.63220054 0.07781094]
[ 0.41419399 -0.13306008 0.72000468]
[ 0.28247169 0.10319189 0.50113165]
[-0.14225948 0.33037826 -0.4037863 ]
[ 0.21228614 -0.69273335 0.45505175]
[ 0.63414061 -0.75804955 -0.62078679]
[-0.5811435 -0.20503305 0.65040439]
[ 0.04864343 -0.02648391 -0.00558628 0.06567492]
layer 1 Activation
relu
layer 2 Convolution2D
4 4 3 3 same
[-0.10421877 -0.02785102 0.38203281]
[ 0.2905634 0.15197834 0.04869499]
[-0.09692651 -0.30311567 -0.26422366]
[-0.00276754 -0.19394568 -0.39375487]
[ 0.06125383 0.13863547 0.20455685]
[ 0.01164767 -0.23414099 -0.00473231]
[-0.06518493 -0.19672309 0.14905775]
[ 0.15651399 0.2891407 0.38651308]
[ 0.1900083 -0.37353948 -0.21590027]
[-0.03375062 -0.04178833 -0.2734594 ]
[-0.45986223 -0.19825426 -0.0696836 ]
[ 0.05288092 0.21991098 -0.36576423]
[-0.01610908 0.11497457 -0.61316365]
[-0.01072869 0.14255261 -0.4019627 ]
[ 0.38004246 0.19172479 0.06467063]
[ 0.59239095 0.29410571 0.20647256]
[ 0.09890065 0.51240301 0.27240333]
[-0.102652 -0.17938389 0.4384366 ]
[ 0.08311749 -0.21888258 -0.61088938]
[ 0.26572421 0.08327031 0.01123066]
[-0.13556297 -0.17235672 -0.00744808]
[-0.58282799 -0.2371998 -0.02106583]
[-0.374268 -0.43444765 -0.41671872]
[-0.48330817 -0.07184441 -0.24444637]
[-0.60360354 -0.47681275 -0.34594122]
[ 0.20064028 0.17831512 -0.02612569]
[ 0.45589581 0.4922587 -0.06326807]
[-0.29128221 0.09931409 0.24801998]
[-0.06581956 0.19527058 0.2431576 ]
[ 0.45251682 -0.0852524 0.35469437]
[ 0.11536153 -0.55935317 0.05506861]
[-0.17236997 -0.33490059 -0.68386561]
[-0.19986324 0.06262327 -0.37508488]
[ 0.13902199 -0.30721149 0.18993607]
[-0.13360961 -0.13865279 -0.38592908]
[-0.03786371 0.2630626 0.14453529]
[-0.06631006 -0.4063293 -0.2363375 ]
[ 0.19549578 -0.00670204 -0.32390189]
[-0.09911008 -0.36811453 -0.29338896]
[-0.27110478 0.43789902 0.40986046]
[-0.2690371 0.42322639 0.17070536]
[ 0.21714966 0.11392772 -0.21328911]
[ 0.3063466 0.04640941 -0.02356246]
[ 0.11048113 0.24086125 -0.04183687]
[ 0.14844583 0.25013149 -0.21424711]
[ 0.37809837 0.09550973 0.29315791]
[-0.05270186 0.12204736 0.32245034]
[ 0.35050899 0.36823237 -0.04089836]
[-0.12815939 -0.17673898 0.0116122 -0.07211183]
layer 3 Activation
relu
layer 4 MaxPooling2D
2 2
layer 5 Dropout
layer 6 Flatten
layer 7 Dense
784 6
[-0.03906742 0.06698083 -0.08050424 -0.00319093 0.01833775 0.00991163]
[-0.01495006 0.03293357 0.0197208 -0.04033855 -0.03371154 -0.04916968]
[-0.0560285 -0.06582659 0.00927044 -0.02810042 -0.04538313 -0.01811428]
[-0.04213877 -0.06204742 0.00786292 -0.08450737 0.06187303 0.02497161]
[ 0.04896151 0.02935885 0.09386393 -0.04570646 0.05239167 -0.08469187]
[ 0.01835042 0.04916618 0.12470287 0.00891534 -0.04932225 0.04197544]
[ 0.06273581 0.02127488 0.0401251 -0.06060422 -0.13253379 0.05847419]
[ 0.12982321 0.1010593 0.2134286 -0.08732843 -0.12440202 -0.0320878 ]
[ 0.07623585 0.0563375 0.07737941 -0.0296917 -0.14714883 -0.02166067]
[ 0.16959247 0.11041992 0.19505848 0.00311863 -0.04162547 0.04950376]
[ 0.0729512 0.07153659 0.07287914 -0.09814236 -0.11967845 -0.01919617]
[ 0.03707012 0.09931304 -0.03107041 0.02655093 -0.01263645 -0.08081397]
[ 0.04423667 -0.00867222 0.01667016 -0.04046617 -0.05045989 -0.07810739]
[ 0.00049676 0.03291544 -0.05188211 -0.03523895 -0.0487449 0.00134809]
[ 0.01925284 0.04327489 0.07676191 -0.06858168 0.05246273 0.08637736]
[-0.03913829 0.04089814 -0.04338754 -0.06232202 0.03502903 -0.00492453]
[-0.01337897 0.08926103 -0.04653127 0.00476701 0.08402187 -0.0500262 ]
[-0.0044559 0.00838482 -0.04894469 0.06226534 0.03247495 0.05359224]
[ 0.11980098 -0.08101542 0.07371085 0.10186744 0.0188344 0.02669876]
[ 0.19923729 0.04178629 0.10129342 0.00315007 -0.02827051 -0.03766487]
[ 0.07194332 -0.06360228 -0.02254308 0.08250529 -0.15655711 0.02502301]
[ 0.09679929 0.05437199 0.04281991 -0.00609906 -0.16727532 0.03721406]
[ 0.00803989 0.08278463 0.06190167 -0.00692337 -0.08304318 0.02458697]
[ 0.08096614 0.16214547 0.11075521 -0.05861256 -0.0690367 -0.03747399]
[ 0.21584366 0.20894378 0.20348011 -0.03105162 -0.08168395 -0.10425199]
[ 0.10633728 0.10531231 0.05532461 0.00492275 -0.01648618 -0.02858951]
[ 0.07343981 0.0795544 0.00406283 -0.1350463 0.09071051 -0.06666093]
[-0.04112834 0.0801154 -0.02094875 0.04578911 -0.08394425 0.04739556]
[ 0.04340751 -0.04323349 -0.03977324 0.029741 -0.03849278 0.09510033]
[-0.07096316 0.07336214 0.01099949 0.04040894 0.07377584 -0.00709548]
[ 0.07882775 -0.07016723 -0.07656394 -0.06225074 0.03239924 0.0848458 ]
[-0.03906078 -0.07604289 -0.07036381 0.05022649 0.06826855 0.16075383]
[ 0.05868277 -0.0029423 -0.09052019 0.08513133 0.06857975 0.0548847 ]
[ 0.07210821 0.028421 0.03118186 0.02777781 -0.06508817 0.03011957]
[ 0.02577195 -0.0021653 -0.04538134 0.03983229 -0.04444971 0.02814031]
[ 0.00989027 -0.01719115 -0.05992389 0.04526545 -0.06981328 0.05890024]
[ 0.01345126 -0.03591003 -0.01485783 0.00824048 -0.06265052 0.08473093]
[ 0.02871292 0.02361192 0.04635924 -0.00073332 -0.11096975 0.03686479]
[-0.05461308 0.13399795 -0.04151184 -0.01471005 -0.03385759 -0.02356914]
[-0.07658672 0.08487934 0.06285738 -0.11803006 0.02261236 0.00323268]
[-0.05935308 0.1054464 0.07836062 -0.06944308 0.02371275 -0.17641465]
[-0.06491436 0.06459507 0.05346636 -0.02125865 -0.00230505 -0.03430885]
[-0.02145559 0.04702919 0.04046459 -0.03417374 0.00791908 -0.05758655]
[-0.12696458 -0.08001456 -0.08423816 -0.03338545 0.03814339 0.04256224]
[-0.06056114 -0.00051539 -0.04495636 -0.00070647 0.0030581 0.02429797]
[-0.07350745 -0.02889902 -0.04967271 -0.07581174 -0.04283836 0.03076509]
[-0.02569569 -0.09465789 -0.00548012 0.04420721 -0.02736876 -0.00089581]
[-0.05284071 -0.07590015 0.02237807 -0.02403216 0.00089716 0.00054248]
[-0.014353 -0.11837757 -0.01349876 0.00988213 -0.01841527 0.00264359]
[-0.05818343 -0.09152924 -0.03935111 0.0623118 -0.00294162 -0.01540728]
[-0.03307559 -0.10525655 -0.02241764 0.02028148 0.02561427 0.00384658]
[-0.02743094 -0.05859468 -0.01221527 -0.00461497 0.027101 0.02186194]
[-0.10393144 0.00701244 -0.02056715 -0.04508474 -0.06323299 0.04851193]
[-0.12013954 -0.03573343 0.04121689 -0.06994867 -0.01653158 -0.02954953]
[-0.11202008 0.00544483 0.04861621 -0.11717936 0.00877294 0.05966207]
[-0.10761394 -0.12025759 0.09506065 -0.09846672 -0.06385981 -0.00706128]
[-0.04523962 -0.05461986 0.00158466 0.05548372 -0.00193037 -0.05045158]
[-0.06085176 -0.05391665 -0.01233329 -0.01210866 -0.08770373 0.03171697]
[-0.13669041 -0.10556038 -0.0083451 -0.03938511 -0.14809524 -0.0092752 ]
[-0.03547235 0.00632177 -0.10543172 0.01324754 -0.05205651 -0.05102921]
[-0.044781 -0.06328677 0.03889268 -0.01418422 -0.01351375 -0.02115388]
[-0.02551365 -0.05330036 -0.06344371 -0.07193805 0.02519399 0.01447381]
[-0.0156537 -0.06582764 -0.03264342 -0.00252054 0.03280666 -0.03166911]
[-0.04884146 -0.09733901 -0.07943926 -0.00302826 -0.04236387 -0.06903206]
[-0.09653606 -0.0540468 -0.05871857 0.01528029 -0.0458906 -0.00639062]
[-0.12743062 -0.06528249 -0.15508506 -0.10875963 -0.02613571 0.03022596]
[-0.10855629 -0.07260169 -0.07472604 0.00578795 0.02758324 0.06854044]
[-0.02120412 -0.05679055 0.09603125 0.0175001 -0.09669294 -0.07805043]
[-0.09110473 -0.06463159 0.06585391 0.00330143 -0.08988428 -0.01761026]
[ 0.02224932 -0.0066196 0.13158537 -0.09908723 -0.10756366 -0.02889675]
[-0.0445697 -0.0108994 -0.08682603 0.05705853 -0.00622619 0.00903386]
[-0.12899421 -0.01250589 -0.11728453 -0.10747638 -0.0724315 0.06194278]
[-0.08139972 0.02385198 -0.01709059 -0.01669967 -0.1100228 -0.00961974]
[-0.0432987 -0.11630281 0.11211184 -0.07596958 -0.00239594 -0.06798765]
[-0.07364786 -0.04755174 -0.02912027 -0.00833957 0.04815742 -0.18895435]
[-0.0347453 0.01868596 -0.0147765 -0.0446967 0.0478966 -0.12075885]
[-0.06177768 -0.04607397 -0.01522131 -0.02732979 0.04139465 -0.02353299]
[-0.133301 -0.00655711 -0.03392529 -0.11689362 -0.0883434 0.07187817]
[-0.06189934 0.0524984 -0.11142992 -0.1036801 0.02576936 0.07690011]
[ 0.02531234 -0.01139269 -0.12611471 -0.09724379 0.04059971 0.04154509]
[-0.01147614 -0.04213808 -0.11520851 -0.01658878 0.08237385 0.04917121]
[-0.0494155 -0.05302509 0.06465708 0.05598482 0.00447672 -0.09627396]
[-0.04844014 -0.00902561 0.18258154 0.11114652 -0.15210725 -0.18368633]
[ 0.04977537 -0.04202075 0.04880532 0.09028839 -0.07794982 -0.0135695 ]
[ 0.0737197 0.02134143 0.03950929 -0.0462447 -0.06542954 -0.04404024]
[-0.11784814 0.05312875 -0.12023604 0.03744375 0.01453782 0.04277182]
[-0.0362859 0.04788436 -0.03567404 -0.05353031 0.10789248 -0.11958148]
[-0.03160278 0.0486227 0.07829643 -0.02335911 -0.00032572 -0.19590537]
[-0.09877094 0.05633143 0.03377883 0.03512298 0.05049034 -0.13365418]
[ 0.00762662 0.01745776 0.09184489 -0.02832299 0.03992055 0.0698674 ]
[-0.00207301 0.07375712 0.02974467 -0.07909034 0.04247067 0.10213128]
[ 0.00723242 0.01831096 -0.08343405 -0.15346451 0.04382225 0.01230231]
[ 0.10217623 0.01167687 0.00540794 -0.02553524 0.05552289 -0.01947977]
[ 0.10517532 -0.00784067 -0.08542134 -0.11327109 0.09972257 -0.03549052]
[ 0.05870317 0.11366854 -0.07332972 -0.04271904 0.11874869 -0.13107273]
[ 0.06873229 0.09667565 0.03740055 -0.01186924 0.06541399 -0.15281917]
[-0.00027914 -0.02707825 0.12996851 0.02023274 -0.10725679 -0.12667494]
[ 0.05440604 0.03363384 0.12773462 -0.0225022 -0.11709606 -0.08246228]
[-0.07478495 -0.01820215 -0.06289016 0.00661487 -0.02890409 0.07456721]
[ 0.05315993 -0.00774269 -0.01374839 0.0367431 -0.00135519 -0.06750096]
[-0.03885182 0.07873096 0.02371075 0.02000866 0.16509888 -0.07409362]
[ 0.02051857 0.0620492 -0.00213286 0.10097467 0.06414621 -0.13120499]
[ 0.06285419 0.05325737 0.04649446 0.14846779 0.04864058 -0.0202658 ]
[ 0.13457911 -0.05248253 -0.05584732 0.0658607 -0.00522313 -0.05087093]
[ 0.12267354 0.05394118 -0.06588896 -0.01165381 0.04335695 -0.0288139 ]
[ 0.14390378 0.10954358 -0.04838676 -0.02016603 0.03786809 -0.05666351]
[ 0.14462852 0.06843998 0.02745682 -0.05879001 0.04786909 -0.07078739]
[ 0.12860374 0.0628712 -0.04283768 -0.02921913 0.08406699 0.00698227]
[ 0.00307443 0.07492833 0.00517122 0.02832262 0.0014128 -0.03315274]
[-0.0366415 0.18652259 0.09298028 0.0114279 -0.05174063 -0.01810233]
[ 0.06353159 0.02525968 0.10372189 0.02731322 -0.09390891 0.06205925]
[ 0.07241279 0.01047202 0.08987788 0.02021185 0.02901615 -0.00858951]
[-0.06564526 0.02128737 -0.02443759 -0.07290538 -0.01963917 -0.05623203]
[-0.10410903 0.04836389 -0.07412664 -0.01642562 0.01443276 0.02051037]
[ 0.00953599 -0.03415989 0.00981615 0.04472892 0.06925561 0.10029602]
[ 0.01554269 -0.03229945 -0.01203572 0.12803426 0.10587206 0.03710327]
[ 0.11929016 -0.05155375 -0.00730276 0.05756969 0.05656901 -0.10001393]
[ 0.13459331 -0.028423 -0.16193444 0.02671673 0.04575602 -0.06020094]
[ 0.03446808 0.06116208 -0.10255814 -0.0269738 0.06089609 -0.05435224]
[ 0.15360135 0.07809708 -0.07819074 -0.0441137 0.10515328 -0.04624659]
[ 0.12455522 -0.00274724 -0.03359824 -0.06094872 0.06885184 0.01613074]
[ 0.05241013 0.03465469 0.0544696 0.02889933 -0.07364102 0.01123893]
[-0.01323363 0.05176437 0.02000826 0.11144416 -0.13222131 0.04050557]
[ 0.01396541 0.13095745 0.02711691 -0.03186902 -0.10700434 0.05891183]
[ 0.03830038 -0.10976595 0.05091108 0.07822291 0.00051992 0.07743204]
[ 0.15484604 -0.01745577 -0.12746601 0.04840966 -0.00309741 0.03579752]
[ 0.02922957 0.05066233 0.04701658 -0.05638792 0.05541473 0.03763079]
[-0.04514857 0.02194637 -0.02062281 0.03265412 -0.07245879 0.08555832]
[ 0.06741564 -0.08316622 -0.03128064 0.04865873 0.01566836 0.12975709]
[-0.02963226 -0.10759581 -0.01098184 -0.02771188 -0.02080985 0.13046084]
[ 0.03023411 -0.07968377 -0.01689306 -0.01264298 -0.10188069 -0.02617635]
[-0.0503275 0.06735608 -0.08736092 0.05937959 -0.00524964 -0.0202806 ]
[-0.0342137 0.10130821 -0.06394593 0.0444742 -0.00839805 -0.03509626]
[ 0.03010297 -0.05151312 -0.04433104 0.03548456 0.03975218 0.007585 ]
[ 0.00854354 -0.02776004 0.05805048 0.06378321 -0.08829919 0.03157675]
[ 0.04181591 0.00249214 0.08059046 0.09673768 -0.19465317 0.04820249]
[ 0.0700449 0.03535937 0.05061055 0.09118696 -0.21229593 -0.02301093]
[ 0.03088937 0.01046988 0.03040583 0.04241165 -0.17937416 0.03388511]
[ 0.11100613 -0.0860538 -0.04198904 0.1901321 0.00919448 0.04254381]
[ 0.13026188 -0.11809216 0.03116922 0.14583004 0.05218115 0.03234338]
[ 0.05639847 -0.0844645 -0.08791742 0.06192544 -0.03236663 0.07176448]
[-0.03110873 0.03326483 -0.09342306 0.00719841 -0.02181715 0.13209145]
[ 0.02926968 -0.06007582 0.01810193 -0.021138 -0.10274869 0.14875817]
[-0.01407827 -0.03772659 0.00997541 0.00261242 -0.04543411 0.14778784]
[-0.00864227 -0.01674397 0.01345735 0.03611076 -0.13224488 0.03311513]
[-0.06951351 -0.0015919 0.02223685 0.01239971 -0.04158155 0.00971505]
[-0.09682007 0.01379257 -0.00054124 0.00312394 -0.01066519 -0.03672417]
[ 0.05549898 -0.08749791 0.04858857 0.06707751 0.01995481 -0.0036293 ]
[ 0.09252531 -0.01970577 0.08262435 0.02110152 -0.10228407 0.05909617]
[-0.05498303 -0.02594341 -0.0035772 0.05643748 -0.17565481 -0.02449322]
[ 0.04412676 -0.06540321 0.10419527 0.11233973 -0.19555743 0.10897983]
[ 0.15166388 0.01486791 0.01769827 0.12870763 0.00991944 0.00663956]
[ 0.10962525 -0.00414004 -0.09437564 0.15096906 0.08112747 -0.01150652]
[ 0.02475246 -0.03095607 -0.00947252 0.1118011 -0.02173202 0.07033352]
[ 0.04599787 0.06665192 -0.06099332 -0.06268867 0.05021445 0.01119815]
[-0.02999268 0.04205724 -0.11774008 -0.03122759 -0.04538905 0.16326749]
[ 0.04934228 -0.0953588 0.01141531 0.06725168 -0.11418029 0.04892549]
[-0.06355075 -0.1086745 -0.00195693 0.14663446 -0.07739544 0.10336433]
[-0.02612478 -0.01453772 -0.03547798 0.044515 -0.02110003 0.10322534]
[-0.05150209 -0.0180339 0.00939813 -0.01876376 -0.05246141 0.04234632]
[-0.08762245 -0.11673234 0.05118603 0.01297363 -0.01001009 0.10524361]
[-0.08567178 -0.14249459 0.13776262 0.01439468 -0.14940049 0.07299089]
[-0.06279238 -0.10569572 0.09934897 0.04906239 -0.16361082 0.06625141]
[ 0.05229718 -0.09052116 0.13404104 0.04930389 -0.17941342 0.04353089]
[-0.02686829 -0.12129239 0.04753835 0.15620217 -0.10866011 -0.02461487]
[ 0.03397669 0.06308611 0.01684876 0.11957835 0.01646716 -0.12103692]
[ 0.13224643 -0.04496242 0.05914445 0.12491267 0.07710338 0.05791619]
[ 0.0412988 -0.0768477 -0.06245616 0.04562742 0.06254521 -0.06498001]
[ 0.03063324 0.03106824 -0.02559988 0.06763268 -0.0651234 0.05155628]
[ 0.0418902 -0.07840691 0.02341092 0.035845 0.03472321 0.05307869]
[ 0.01396064 -0.00728493 0.01153168 -0.07698955 0.05941084 0.13373546]
[-0.06957011 0.07046435 -0.11192854 -0.06017029 -0.05490563 0.03300064]
[-0.14343464 0.08020713 0.01546191 -0.04416481 -0.00722431 -0.01022952]
[-0.09709038 0.01148857 -0.09649117 -0.05650258 -0.0684473 0.04289802]
[-0.09163192 -0.00832356 -0.00961536 -0.04997683 -0.08259019 0.01100062]
[-0.17413211 -0.03966004 0.00783409 -0.00765945 -0.00658657 -0.04461117]
[-0.07055373 0.024131 0.03841666 -0.11819246 -0.166894 0.05523549]
[-0.07620355 -0.07380182 0.01279752 -0.12449677 -0.01981781 -0.0294143 ]
[ 0.04515965 -0.12187946 -0.09689888 -0.13681799 0.06321786 -0.12640236]
[-0.04023914 0.00545566 -0.07380436 -0.03177956 0.04133968 -0.09878676]
[ 0.03876712 0.04780795 0.06613705 -0.0798581 -0.07041597 0.02009777]
[-0.01466717 -0.04095791 -0.06117681 0.017744 -0.02853148 0.02935289]
[-0.06306019 -0.0588049 -0.07670173 -0.02984815 -0.04393519 -0.01071882]
[-0.01691479 -0.03707276 -0.04221364 -0.03778722 -0.00076601 0.06874673]
[-0.07801819 -0.06891399 0.06429972 -0.00189043 0.03712595 0.0220933 ]
[ 0.02979842 -0.11590263 -0.14524511 -0.03976246 -0.0851257 0.03161072]
[-0.00075146 -0.05422279 -0.16947828 -0.09824593 0.07455433 -0.09075713]
[-0.03539257 -0.0077349 -0.07038279 -0.02245235 0.05691826 0.03691422]
[-0.10431595 -0.06712238 -0.18116839 0.01729527 0.05096266 -0.01168822]
[-0.00102106 -0.12141351 -0.15861224 -0.10181027 0.07429059 -0.10902476]
[ 0.00775752 -0.04790719 -0.07350548 -0.06977136 0.08977472 -0.10640195]
[-0.01784684 0.01781068 -0.10203142 0.04107929 -0.02521095 -0.05587351]
[-0.05395092 0.01020161 0.06169144 -0.0250671 -0.01564677 -0.03906799]
[ 0.04229081 0.01736699 -0.04419591 0.02952868 -0.06702921 0.0220938 ]
[ 0.06377815 -0.01866848 -0.00872547 0.05797683 0.05467021 0.0118074 ]
[-0.02050604 -0.0856597 -0.07597944 -0.00963104 -0.04709589 -0.00769084]
[-0.00340328 0.07873099 -0.0219969 -0.05687838 0.06580792 0.00297902]
[ 0.04022289 0.00317212 0.04879058 0.03770782 -0.03577699 -0.03475757]
[-0.00706839 0.00817533 -0.03345953 0.08339515 0.06095733 -0.08682188]
[ 0.02722257 0.02425286 -0.0345313 0.08355992 0.0522751 -0.0449243 ]
[ 0.0099801 -0.01105665 -0.0232964 -0.02217771 0.03242119 0.06061608]
[ 0.08399611 -0.02406195 0.02454117 0.07061335 -0.03615051 -0.02929047]
[-0.03101282 0.02415061 -0.03012299 0.04266731 0.02002877 0.00579005]
[ 0.08879535 0.02304467 -0.02114121 0.04518966 -0.06163095 -0.0645394 ]
[ 0.0874828 -0.01495793 0.02253911 0.07239142 -0.00243468 -0.05371052]
[-0.02884641 -0.04605388 0.08200453 0.04786541 0.01582255 -0.01059281]
[-0.06646664 0.02130977 -0.02211558 -0.06502445 0.02839461 -0.00296204]
[-0.0063191 0.02135059 -0.03310223 -0.0723725 -0.01088556 0.02342578]
[ 0.03307476 -0.00205816 -0.06744242 -0.00648706 0.0194298 0.0109417 ]
[ 0.07308613 -0.04531072 -0.06914005 -0.02438866 0.03880366 0.0083587 ]
[ 0.02373182 0.04986393 0.0046019 -0.05207612 -0.00573088 0.07560703]
[-0.00279333 -0.00783953 0.08252762 0.07336356 0.00140709 -0.0343411 ]
[-0.04199142 -0.00874342 -0.03191872 0.05077265 -0.02291996 0.06468597]
[ 0.03074639 0.00323415 0.07363073 0.01344377 -0.0344721 -0.07490882]
[ 0.05864273 0.06454518 -0.06300256 0.03239371 -0.00249341 0.02659346]
[-0.0344055 0.06376082 -0.02887102 0.08223609 -0.03262004 0.02870097]
[ 0.01010473 -0.0090417 -0.01612562 0.08783603 -0.08671906 -0.02188782]
[ 0.07311525 -0.02435566 0.06883435 0.03148881 -0.11076438 -0.04429966]
[ 0.06303325 0.01264509 0.14135678 0.01972907 0.00872923 -0.07572689]
[ 0.04516434 0.01409933 0.12629771 -0.07746307 -0.11591296 0.01541979]
[ 0.11733777 0.05183874 0.02602149 -0.09596024 0.00783244 -0.01830325]
[-0.05513093 -0.0005425 -0.05468076 -0.05329437 -0.02899629 -0.03885645]
[-0.02990598 0.05032407 -0.02790986 0.01764168 0.07079449 0.06969906]
[ 0.07217807 -0.0403676 0.04199512 -0.04355459 -0.00424795 0.03450686]
[ 0.01203644 -0.07774626 0.05507602 -0.04055535 -0.07002122 0.01328743]
[-0.04818821 -0.01758004 0.05088845 -0.00749813 0.04058031 0.10315325]
[ 0.06443591 -0.05923147 0.02365677 0.00316905 0.03135337 0.13334037]
[ 0.06809917 0.01315044 -0.01585105 0.04918423 0.00850209 0.00988356]
[ 0.0736881 0.01316321 -0.09731589 0.01891903 0.00801433 0.12068653]
[ 0.09107485 -0.05991253 -0.04163438 0.01160709 -0.10218006 0.13151526]
[ 0.00759288 -0.08449205 -0.16844754 0.01386801 -0.09983791 0.08254603]
[ 0.00546961 -0.12577212 -0.2370593 0.00632897 -0.05404766 0.21107449]
[ 0.04833199 -0.15973663 -0.12746678 0.01278205 -0.00261998 0.09800395]
[ 0.08713182 -0.03916506 -0.06439253 -0.0138543 0.05141735 -0.08583855]
[ 0.05299447 0.14401291 0.01017158 -0.04149881 0.0276635 -0.0918515 ]
[ 0.00038663 0.02805049 0.06326583 -0.07504413 -0.04173167 -0.10145886]
[ 0.05976153 -0.01742465 0.06341676 0.05602439 -0.07059325 -0.09018681]
[ 0.01108022 -0.03864975 0.02851808 -0.06720294 -0.0641618 0.0656224 ]
[ 0.02856216 0.07055982 -0.0516327 0.06126515 -0.06335178 0.06992488]
[-0.02264741 0.01859816 -0.08388671 0.01682108 -0.0259085 0.08928236]
[-0.1091122 0.01057275 0.0226247 -0.03718273 0.00135818 -0.03461534]
[-0.04477059 -0.07221311 -0.03108195 -0.00183363 0.01866931 0.03458804]
[ 0.00115266 -0.06592103 -0.13075645 -0.07985996 -0.10431947 0.06145226]
[-0.01576591 0.00800673 -0.09192831 -0.01888154 0.02111752 0.06716861]
[-0.03456827 0.06472316 -0.21096472 -0.03178708 -0.04704238 0.12340169]
[-0.00635043 -0.05227181 -0.09891172 0.05748896 0.02813034 0.09038568]
[ 0.01646154 -0.15380681 -0.06830674 0.00648034 -0.06387944 0.01933319]
[-0.06033361 -0.09666492 -0.0347028 0.06034534 -0.0975316 0.0525537 ]
[-0.03687568 -0.09934153 -0.08847664 0.08158989 0.00141661 -0.01972735]
[-0.00287533 0.04690229 -0.02634276 0.00471038 -0.01305853 -0.07336445]
[-0.08552708 -0.08550128 0.14672536 -0.14150299 -0.09082367 -0.01985498]
[ 0.02511594 0.03920835 -0.08183341 -0.00175885 -0.02925517 -0.06423401]
[ 0.04209926 -0.08868414 0.04160867 -0.02163641 0.02538342 -0.07373472]
[-0.1010308 -0.09864283 0.00304896 0.04347901 -0.06171658 0.0357643 ]
[ 0.00675949 0.02017584 -0.11227395 -0.07224586 -0.06970908 0.02627237]
[-0.01575015 -0.0068797 -0.02497365 -0.05909395 -0.11599399 0.00620197]
[-0.00582996 -0.10990653 0.00341506 -0.07777496 -0.12476213 -0.09949174]
[-0.1146011 -0.07505476 -0.09607791 -0.03088069 -0.16680256 -0.03121797]
[-0.0869001 -0.0560614 -0.13494053 -0.02908197 -0.10780253 0.03288222]
[-0.04528867 -0.10762697 -0.08480581 0.07087144 -0.02079575 -0.04462719]
[-0.15111348 -0.16765293 0.02092774 0.0520954 0.08606736 0.00502715]
[-0.07411221 -0.10411596 0.03319677 0.08239169 0.10579952 0.02374489]
[-0.00763384 -0.12334859 0.08693864 0.10098316 -0.03707935 -0.00459614]
[-0.07790284 -0.07505847 0.11518796 0.02719008 -0.0498294 0.02567977]
[-0.01085752 -0.02108232 0.1166356 -0.07947682 -0.03691655 -0.08533392]
[ 0.01284698 0.04319083 0.09651497 -0.11431589 0.01230511 -0.00311542]
[ 0.01906602 -0.08105581 -0.05718938 0.03989894 -0.00492739 -0.07123473]
[-0.01209347 0.05918923 -0.00647449 -0.02324463 -0.04660026 -0.0172042 ]
[-0.03998321 -0.05774198 0.00606982 -0.01544447 -0.01927958 0.0078071 ]
[-0.16435644 -0.04937918 0.0327552 -0.08888155 -0.04324656 -0.10150621]
[-0.1195085 -0.09279716 0.05081519 -0.07332635 -0.01160537 -0.11926854]
[-0.1242918 -0.0516049 0.09920215 -0.15690064 -0.01110534 -0.16299208]
[-0.11686368 -0.03242994 -0.01932491 -0.10553797 -0.06686535 -0.1331961 ]
[-0.14782013 -0.08558808 -0.0254719 0.05517821 -0.10985656 -0.06093979]
[-0.12791874 -0.18109022 -0.01371315 0.1069378 0.01094283 -0.05869334]
[-0.07214297 -0.1171359 0.04347658 0.11057278 -0.00463003 -0.08695465]
[-0.12860602 -0.10304763 0.12716153 0.18252239 0.04018192 -0.07817193]
[-0.01159176 -0.02519655 0.16372627 0.01329133 -0.02916689 -0.09670461]
[-0.12614983 -0.07885019 0.18875229 -0.17262574 -0.08638599 0.03756535]
[ 0.00875268 -0.10847837 0.06044701 -0.09786192 0.00160857 -0.02058518]
[ 0.06909539 -0.04950211 -0.00288423 0.02066973 -0.05376922 -0.07762389]
[-0.0743156 -0.0328237 -0.049174 -0.05797269 -0.07078674 -0.00918986]
[-0.08920635 -0.0545126 0.00686068 -0.05586634 0.04931222 -0.03772788]
[-0.10032107 0.05828014 0.06278351 -0.07885226 0.09543183 -0.19087875]
[-0.11047766 0.00621466 0.07614939 -0.1074705 0.09285117 -0.13230141]
[-0.10786755 -0.10159543 0.07893702 -0.16805226 0.11080868 -0.1021629 ]
[-0.20107403 0.01537381 0.05700956 -0.21065055 -0.1344308 -0.0115444 ]
[-0.18496245 0.08148756 -0.15201768 -0.13335752 -0.02321573 -0.011833 ]
[-0.11402065 -0.06084957 -0.03581726 -0.03327167 0.03821984 -0.04533262]
[-0.01624171 -0.0779098 0.04551651 0.01745802 -0.02982147 -0.12480359]
[ 0.01769885 -0.04275391 0.15404399 0.16352718 -0.00707891 -0.2204005 ]
[ 0.01117152 0.05379866 0.24829371 0.17850298 -0.02290346 -0.08285065]
[-0.041535 0.02230919 0.11049987 -0.09049127 -0.13301167 -0.0365572 ]
[-0.03630528 0.01687351 0.00366041 -0.0079483 0.01041406 0.03271633]
[-0.04517176 0.06207936 0.03069755 0.06896499 0.01395736 -0.04435897]
[-0.09688669 -0.08676144 -0.05385501 -0.12382041 0.08324644 -0.03318273]
[-0.13742276 0.02406721 0.04640939 -0.01621931 0.06602018 -0.05023581]
[-0.07473897 0.04026653 0.06028711 -0.10495933 0.1385542 -0.1276679 ]
[-0.10651979 0.03429695 0.04538415 -0.10229702 0.08979802 -0.04196034]
[-0.13032119 -0.04043769 0.11250041 -0.1974812 0.14628579 0.02734789]
[-0.22818984 0.11965807 -0.03218153 -0.2079038 0.16446579 0.06393486]
[-0.07006577 0.10467544 0.02071472 -0.18452962 0.06919638 0.03106326]
[-0.03559869 0.02725672 0.03675464 -0.20351875 0.0232321 -0.04322459]
[-0.05047734 0.0274868 -0.00218126 -0.08532499 -0.10196719 -0.06309893]
[-0.04747172 0.14822915 0.23301753 0.04975881 -0.0221993 -0.13814203]
[ 0.11581982 0.1191036 0.3581551 0.00564652 0.05509095 -0.15107015]
[ 0.10870871 0.06199756 -0.00569341 -0.00700572 0.01480637 -0.01425133]
[ 0.07747076 -0.09602888 0.04439135 -0.01127195 0.03467308 -0.07501526]
[ 0.02426844 -0.05566151 0.00642693 -0.05767361 -0.08362609 0.02029405]
[-0.09816962 -0.02658927 -0.01496939 -0.10540568 0.0186364 0.08163155]
[-0.12812944 0.06017309 0.05948135 0.0422884 0.02789039 -0.04322787]
[ 0.00902574 0.07005555 0.04662625 -0.00793589 0.11771545 -0.09565743]
[ 0.00804521 0.05894405 0.09210335 -0.01404389 0.10033886 -0.00780966]
[ 0.05670874 -0.05379384 0.08547706 -0.01229658 0.12814467 0.03154401]
[ 0.07123813 0.15409248 -0.08224371 -0.16866289 0.06741117 0.09393467]
[ 0.0585053 0.13926491 0.00874373 -0.17216343 0.01771168 0.05755534]
[-0.08185123 -0.05465635 0.02915151 -0.10413721 0.05234181 0.07561123]
[-0.04534185 0.03318586 0.09185749 -0.09925884 -0.00177414 0.09427547]
[-0.00602874 0.0983023 0.16971703 -0.05565074 -0.06988478 -0.0038655 ]
[-0.06408705 0.06344847 0.18636818 -0.01032426 -0.0274127 0.03033029]
[ 0.05786566 -0.03625295 -0.00456099 0.07288183 0.06470373 -0.05072778]
[ 8.11063126e-02 -5.44092171e-02 5.30217998e-02 -5.36613427e-02
-9.26311463e-02 -5.50363911e-05]
[ -3.53034362e-02 7.34884739e-02 -2.10967101e-02 8.20038840e-05
-5.33712730e-02 9.64356810e-02]
[ 0.05584727 -0.07564959 -0.15007757 0.05104481 0.02999229 0.12481848]
[-0.0232985 -0.06109238 0.03707458 0.08997541 -0.02453282 -0.06030597]
[-0.00600583 0.03801842 0.00207624 -0.00619094 0.08539897 -0.02032562]
[ 0.05329441 0.01990469 0.07880558 0.03468892 0.08287051 -0.12279335]
[ 0.17008583 -0.02420184 0.10956061 0.03707284 0.08192559 -0.13542266]
[ 0.00689564 0.16682151 -0.0718221 -0.01286904 -0.01786523 -0.05141586]
[ 0.01732595 0.02000139 -0.09730851 -0.13509241 0.01723163 -0.04194546]
[-0.00428402 -0.08683665 -0.01248466 -0.1095397 -0.03168366 0.00840248]
[ 0.03923729 -0.03976212 0.06191822 0.02809906 -0.00890674 0.06491708]
[-0.00244019 -0.02708972 0.04302517 0.02165691 -0.11158521 0.06028541]
[ 0.06191679 0.0346404 -0.01934946 0.05341758 -0.10207887 -0.02903006]
[ 0.18077523 0.04433471 0.00292609 0.08369821 -0.06648031 0.00327894]
[ 0.06779093 -0.02453737 0.03696441 0.10139662 0.0057073 0.06533521]
[-0.08305582 0.00392416 0.03467768 0.04304846 -0.02512644 0.00148449]
[ 0.0237844 -0.044475 -0.13727956 0.03278318 -0.02362916 0.23397957]
[-0.09663746 -0.11980815 0.00401888 0.04943704 -0.07798276 0.11937261]
[ 0.01367956 -0.04242942 0.05436371 0.06366228 -0.10846025 0.01615124]
[ 0.04725935 -0.03958574 0.09791607 0.0699836 -0.05288411 0.01660228]
[ 0.06723899 0.05828278 0.13796987 0.12947816 -0.0848249 -0.07867571]
[ 0.09004601 0.11790036 0.07024731 0.03698653 -0.08884452 -0.05824902]
[ 0.0608949 0.12994705 -0.03104216 0.04728473 -0.00590138 -0.06956135]
[ 0.10000875 0.0184104 -0.05483288 0.05167157 -0.01115394 0.08897478]
[ 0.09150535 0.02289857 -0.0336893 0.03180741 -0.0349074 0.06696738]
[ 0.11248624 -0.12722918 -0.04288004 0.0402756 -0.02649304 0.07953005]
[ 0.12766029 -0.04065711 -0.04956887 0.10743555 -0.02775034 -0.04729911]
[ 0.15002055 -0.06368937 -0.10485335 0.0755353 -0.06773707 0.02015029]
[-0.02768589 -0.03655818 0.02317311 0.00530616 0.08075526 0.02742437]
[-0.05100085 -0.01623904 -0.02910977 0.02831373 -0.00286221 -0.06059257]
[-0.01781596 -0.07093476 -0.05223223 0.06764248 0.05424672 0.09459551]
[-0.0613246 -0.06580082 -0.01659304 0.02639491 0.01538513 0.02701483]
[-0.00202238 -0.11767528 0.02013254 0.00271924 -0.05775877 0.08950236]
[-0.02002476 -0.02891426 0.05198368 0.04681492 -0.141717 0.04950737]
[-0.00093731 -0.00549905 0.07312111 0.04672351 -0.0818921 0.02488073]
[ 0.04781969 0.06862619 -0.00383235 0.00413803 -0.07412501 -0.01267266]
[ 0.05001335 0.13050383 -0.05133763 0.08162124 -0.04806522 -0.08597045]
[ 0.04842478 0.0747736 -0.0505995 0.03143326 0.00115336 -0.07796593]
[ 0.08966991 0.00203328 -0.15589206 0.03598305 0.08351294 0.04372056]
[ 0.06886587 -0.06364698 -0.1483838 0.10187615 0.07967211 0.00440404]
[ 0.1710425 -0.09286346 -0.08643477 0.07932941 0.00098841 -0.02698534]
[-0.02433348 0.04172625 0.05226521 0.00533783 0.05660186 -0.03382249]
[ 0.02411989 0.06922956 0.00014566 -0.03333139 -0.01545451 -0.03078027]
[-0.07563546 -0.0329041 -0.0369438 0.01354204 -0.04418798 0.07539021]
[ 0.05991993 -0.01107481 -0.07549372 -0.05445573 -0.03596645 0.16255426]
[-0.01958104 -0.09585484 0.01032085 -0.00696564 0.00264835 0.00019101]
[-0.05710143 -0.07729269 0.01816921 0.12392949 -0.05271572 0.03375493]
[-0.06439681 -0.04730009 0.01827502 0.01467785 -0.06931528 0.06886269]
[-0.12057208 -0.04666622 0.05454067 -0.00199253 -0.02283627 -0.01716528]
[-0.08040415 -0.02855227 0.01631452 -0.01662366 -0.02278931 -0.01928622]
[-0.12489829 -0.00914164 -0.03296525 -0.08766359 -0.06126074 -0.08506618]
[-0.08119691 0.07733595 -0.01320531 -0.05800965 0.06421506 -0.12369088]
[ 0.02596982 0.07830025 -0.08262688 -0.04046429 0.10851005 -0.03363471]
[-0.00607743 0.03464349 -0.00704505 0.06975123 0.17377134 -0.04914957]
[ 0.04593702 -0.08941098 -0.08495176 -0.02143899 0.03919432 -0.02518678]
[-0.03638298 -0.0792419 -0.08857468 0.05829025 0.02684897 -0.04104811]
[-0.00682425 -0.08390704 -0.0186637 -0.06851941 0.00654578 -0.0746432 ]
[-0.01915401 -0.0474208 0.05034585 -0.03819183 -0.01545872 0.0060174 ]
[-0.00467668 -0.07371803 0.04443135 0.02383116 -0.01439601 -0.05091343]
[-0.0306581 0.01656961 0.04239885 -0.09936392 0.03832189 0.10983934]
[-0.10524937 -0.09753571 -0.01143074 -0.10657636 -0.03071666 0.01464091]
[-0.05763919 -0.04856155 -0.02525212 -0.17501558 -0.02490427 0.01284005]
[-0.13940194 -0.11334521 -0.0966932 -0.03793513 -0.05040701 0.01652947]
[-0.15320279 -0.06436833 -0.0271381 -0.02419725 -0.03192935 0.00675967]
[-0.128921 -0.246473 -0.00221251 0.01958969 -0.04113596 -0.06354882]
[-0.00943925 -0.13827267 -0.10736376 -0.11224121 -0.03527088 0.01452933]
[-0.05145054 -0.07337625 -0.09671575 -0.12290615 0.08592106 -0.05064091]
[-0.12557201 -0.06504656 -0.11401266 0.01201283 0.10882793 -0.045607 ]
[ 0.00047309 0.05889156 -0.07642809 -0.05545363 0.04504377 -0.06428661]
[ 3.79813090e-02 7.34318001e-03 -3.93701484e-05 4.99136522e-02
-4.93356064e-02 2.73512211e-02]
[-0.03118901 -0.01200502 -0.01040418 0.03281386 0.07756227 0.0802449 ]
[ 0.01769503 0.05020934 -0.04753393 0.05950855 -0.07089911 -0.04519154]
[ 0.0443761 0.00151219 -0.07124611 -0.00134795 -0.02688657 0.06165159]
[ 0.04120163 -0.02048206 -0.00353376 0.02275546 0.04434854 -0.01490622]
[ 0.08151054 -0.0254981 -0.04692166 -0.07468412 0.06345793 0.0210215 ]
[-0.03123961 -0.08669706 0.0343455 0.07274178 0.01763093 0.0685271 ]
[-0.06190425 0.01233341 -0.05290114 -0.03021464 -0.07366998 -0.02637992]
[ 0.10249367 -0.02711898 0.04624803 0.06023959 0.00280379 0.05387839]
[-0.00551062 -0.01144353 0.00691785 0.07808731 0.00190173 0.03206439]
[ 0.08520673 0.04504014 0.00060269 0.01337086 -0.02961163 -0.00405518]
[ 0.08741169 -0.00704759 0.10708498 -0.05164173 -0.01963125 -0.08817019]
[ 0.05587859 0.04446787 0.04108514 0.0769354 0.04998524 -0.06743234]
[-0.02597478 0.04292867 0.04225266 -0.01232554 0.00249567 -0.04766686]
[-0.04557613 -0.06089809 0.00499027 0.01751164 -0.03621696 -0.03983154]
[ 0.01469613 -0.04527127 -0.06079877 -0.00719674 -0.07625341 -0.01494268]
[-0.08249182 0.04329181 0.07580782 -0.07192215 -0.05699712 -0.08432283]
[ 0.07269385 0.03542061 -0.00866005 0.07044493 0.00493744 0.03871492]
[-0.00480785 0.04063975 0.04584704 0.03682789 -0.07058839 -0.06206352]
[-0.0248164 0.07458694 -0.07981014 -0.00428526 0.05047368 -0.03993575]
[-0.05692981 0.01625342 -0.12072356 -0.03726922 -0.03386045 0.00283743]
[-0.02498297 0.07712871 0.01916334 0.03441772 0.06204141 -0.08501631]
[-0.1055309 0.04614376 0.0120049 0.12080041 -0.04429146 -0.03026236]
[ 0.04406772 0.02364833 0.08039239 0.03211306 -0.06560377 -0.0514333 ]
[-0.06159844 0.17557676 0.03713775 0.05793579 -0.05753848 -0.04570324]
[-0.06796383 0.2176924 0.02069011 0.00361046 -0.08295631 -0.05174478]
[ 0.02155103 0.1026372 -0.00995247 0.03972985 -0.08807784 -0.06252069]
[-0.09874402 0.04289055 0.05443001 -0.05626391 -0.03924071 -0.00148924]
[ 0.05142415 0.10364208 -0.06108316 0.04274806 -0.04291839 -0.02966257]
[ 0.07196663 0.00560528 -0.04168062 0.00132644 0.07660192 0.00333562]
[ 0.00013542 0.03840497 -0.05121634 -0.03069201 -0.06030351 -0.03031237]
[-0.04191826 -0.05238188 0.03718313 -0.08851282 0.05312361 0.09282121]
[ 0.02798723 -0.0438569 -0.02080902 -0.01092144 -0.05213049 0.01647401]
[-0.0165485 -0.0365198 -0.09573365 0.04300163 -0.09133686 0.04295415]
[-0.00216795 -0.09183475 0.04201121 0.07137991 -0.02369085 0.06095942]
[-0.01534411 0.01977289 -0.00932657 0.07173731 0.04840126 0.04417225]
[-0.074004 -0.16776744 -0.07289654 0.06248488 -0.04314394 0.04261868]
[-0.03569035 -0.08790234 -0.00229335 0.01375446 0.02520715 0.07956246]
[-0.04862782 0.06932572 -0.01669175 -0.01396045 -0.04241837 0.03965073]
[ 0.05289885 0.117036 0.03062664 -0.03531889 0.04497956 0.01854477]
[-0.00769493 0.04494339 -0.03419326 -0.04736501 -0.04951311 -0.03950291]
[ 0.02906445 -0.04711517 -0.0454364 -0.07976387 0.04484801 0.03170849]
[-0.04130537 -0.03226221 -0.03359691 -0.08511429 -0.00897318 -0.0983621 ]
[ 0.03924594 -0.02916579 -0.06255648 -0.02740895 -0.0377712 0.05868147]
[-0.0329107 -0.01577795 -0.00565499 0.01373254 -0.00711384 -0.08724231]
[-0.00495188 -0.00154927 -0.03164452 0.05839494 -0.01135823 0.07375232]
[-0.04731026 0.01787225 -0.12093417 0.03104019 0.0728953 0.09238551]
[ 0.0130134 -0.06200435 -0.10512212 0.05057555 -0.06693162 0.10173878]
[ 0.05626862 -0.18613601 -0.23153642 -0.03318107 0.04771827 0.1001261 ]
[ 0.06938198 -0.18837951 -0.189422 0.05777368 0.00080361 0.16981642]
[-0.00789124 -0.20842065 -0.17656729 0.01014619 -0.1255115 0.1304343 ]
[-0.04647372 -0.26197448 -0.08452164 0.02480382 -0.16322449 0.02428417]
[ 0.02942501 -0.23362935 0.06261629 0.0492407 -0.06489585 -0.02855127]
[ 0.10123306 -0.0937614 0.11829423 -0.05942305 -0.0638321 -0.00767653]
[ 0.03515032 -0.06457788 0.27184325 -0.08996352 -0.16159408 -0.01089757]
[-0.07372177 -0.0090925 0.15810782 -0.1326541 -0.03745582 -0.00502236]
[-0.02260247 -0.06947938 0.15360887 -0.00746727 -0.05963999 -0.11189066]
[-0.09204135 0.02806653 -0.01532012 0.03676529 -0.00136964 0.05402471]
[-0.04058576 0.06207782 0.00441554 -0.06069969 0.06625783 -0.08017676]
[-0.03886466 0.06770531 -0.10361184 -0.07814255 0.06383438 -0.06547377]
[-0.07178563 -0.06216462 -0.0826185 -0.01841482 0.00875618 0.08441661]
[ 0.00514941 -0.13517484 -0.14302835 -0.05045532 -0.16351539 0.03881734]
[ 0.06038361 -0.10357808 -0.26816589 0.02287425 -0.10810837 0.10897718]
[-0.00469722 -0.16116744 -0.23600103 -0.01966639 -0.06966556 0.09714681]
[ 0.00323771 -0.26162848 -0.12383876 -0.01363802 -0.01340331 0.07708447]
[ 0.03174062 -0.31414208 0.01348246 -0.01330318 -0.04444208 0.08569098]
[-0.0372158 -0.29225239 0.08628132 -0.01368504 -0.0604109 -0.00511595]
[-0.03769875 -0.13700223 0.16922884 0.02004614 -0.1250495 -0.10593883]
[ 0.00898364 -0.05084655 0.23351337 -0.06909657 -0.18425065 -0.07234947]
[ 0.01060263 -0.07058497 0.27787721 -0.23051545 -0.13470556 -0.03363731]
[ 0.05730317 -0.09995969 0.20089324 -0.17654829 -0.08293732 0.04954606]
[ 4.92709056e-02 3.30025293e-02 -4.64077712e-05 -9.06658247e-02
-1.36768771e-02 -6.39049038e-02]
[ 0.01839902 -0.06888157 -0.03553672 0.0634606 0.01123822 0.00511846]
[ 0.04469841 0.02552879 -0.03906132 -0.07057896 -0.00561385 -0.05002847]
[-0.0266485 -0.04922157 -0.12058857 -0.03268656 0.00513991 0.06110254]
[ 0.01578491 -0.09232703 -0.12197913 0.03432452 -0.07400176 0.13910373]
[ 0.0259084 -0.13964759 -0.27677381 0.05767421 -0.15414582 0.19262943]
[ 0.02410723 -0.20292684 -0.20245947 -0.02236412 -0.10315355 0.16249098]
[ 0.0020604 -0.1993482 -0.19578572 0.02237512 -0.02299276 0.11855898]
[-0.01057898 -0.33508417 -0.06773041 -0.00834153 -0.08541429 -0.0064417 ]
[-0.04980646 -0.15825674 0.09098524 0.01356584 -0.11530922 -0.05147254]
[-0.0138434 -0.14391924 0.18955825 -0.00921918 -0.1295817 0.0038868 ]
[-0.11109211 -0.05249695 0.19168107 -0.05689886 -0.09197777 0.07958837]
[-0.11979652 -0.05910861 0.26285806 -0.1363706 -0.13857168 0.03443174]
[-0.07265623 -0.08622662 0.28751302 -0.08210479 -0.12759681 0.02263789]
[-0.08330109 -0.09738417 -0.01186525 0.01900351 -0.03624653 -0.0469721 ]
[-0.06011249 0.00646945 -0.02051205 -0.08167917 -0.00371891 -0.02599418]
[ 0.00410909 -0.08946645 -0.05230439 -0.07754093 -0.03394083 0.04376241]
[-0.08882749 -0.05604849 -0.06795415 -0.03447219 -0.06124071 0.03394322]
[-0.01954887 -0.15327506 -0.11060452 0.07712906 -0.06319876 0.00760951]
[ 0.00530279 -0.192081 -0.15943152 -0.00636008 -0.06855785 0.09275523]
[-0.00188405 -0.17166963 -0.20277663 -0.05625352 -0.12082655 0.01291196]
[-0.07836711 -0.20520724 -0.11228264 -0.01602561 -0.17870305 0.1060649 ]
[-0.10098472 -0.2474573 0.08438078 -0.02799721 -0.20860061 0.06785383]
[-0.05343942 -0.09845677 0.15171385 -0.01145344 -0.17055483 0.04457689]
[-0.03875558 -0.12813036 0.15463637 -0.03472103 -0.14800641 -0.0263137 ]
[-0.0291168 -0.03924006 0.16405782 -0.09091195 -0.05825635 0.02404475]
[-0.12416057 -0.06044127 -0.01125654 -0.04537313 -0.06262229 0.13447532]
[-0.09889081 -0.1556879 0.02548652 -0.02218553 -0.05300447 0.01801485]
[ 0.06525732 0.03582808 0.10920662 0.0644175 -0.0022815 -0.07423581]
[ 0.07823211 -0.02774064 -0.03544968 0.06075738 0.0553913 -0.05242233]
[ 0.00123041 -0.08780491 -0.03489149 -0.08350885 -0.04109978 -0.0534561 ]
[-0.1149976 0.0177169 -0.10534903 -0.04952307 0.02255956 -0.05585439]
[-0.0871532 -0.12651753 -0.02503573 -0.04426679 -0.10098818 -0.12002897]
[-0.11770146 -0.12337702 -0.13315602 -0.13782211 -0.03842288 0.03779097]
[-0.15434819 -0.19969378 -0.0533765 -0.09697416 0.03262857 0.13533725]
[-0.15810129 -0.03373437 -0.01062032 -0.11541918 0.09179009 0.17744531]
[-0.02556911 -0.02208714 -0.02074615 -0.17632903 0.13939235 0.05378716]
[-0.00859233 0.0665143 0.08448694 -0.15354057 -0.11360283 0.07342695]
[ 0.03170392 0.01226973 0.06161458 -0.10677943 -0.09538125 0.02955556]
[ 0.01637571 0.02291869 -0.00486508 -0.10978864 0.03196978 -0.00119864]
[-0.01123583 -0.10243765 -0.10375879 0.03156911 -0.00312853 -0.10900303]
[ 0.01390369 -0.04178841 -0.08232404 -0.01377539 -0.04055586 0.06383023]
[ 0.08507586 -0.06774803 -0.04459132 0.03536529 -0.06980737 -0.08377846]
[ 1.83119476e-02 -2.20378788e-05 -4.70558703e-02 -7.15567619e-02
-5.27981557e-02 -9.16424915e-02]
[-0.03484254 0.04082989 0.00369357 -0.10849848 0.03957698 -0.05245356]
[-0.09881279 0.01745884 -0.11592711 -0.09311091 0.10624602 -0.08507068]
[-0.11038984 -0.06384718 -0.16289976 -0.1089168 -0.03206481 0.03839753]
[ 0.02752457 -0.00907848 -0.12478393 -0.10536714 0.07663055 0.11762825]
[-0.08858506 -0.06922511 -0.05703901 -0.04144585 0.10630862 0.13778536]
[-0.02427102 -0.0106544 -0.08189183 -0.11311279 0.13016868 0.1357471 ]
[-0.02505998 0.08577369 -0.00567453 -0.14956892 0.02537528 0.12645851]
[ 0.06954842 0.10040838 0.00845767 -0.18121123 -0.11243245 0.05786859]
[-0.04285837 -0.02493123 -0.0229837 -0.16576533 -0.03796538 -0.05438682]
[ 0.05705964 -0.04449693 -0.11074064 -0.04516108 -0.01780244 -0.12226619]
[ 0.03289556 0.04605675 -0.04799105 0.04821578 0.0250967 -0.08030977]
[ 0.06883781 -0.07500488 -0.11142156 0.17076261 0.00628945 -0.01696148]
[ 0.06842478 -0.11456108 0.0262215 -0.01819566 0.06242902 -0.03736234]
[-0.08205154 -0.07980271 -0.02614263 0.00192974 -0.05448024 -0.05101029]
[-0.07054337 0.02778933 -0.05840603 -0.02397775 0.06783634 -0.02285083]
[ 0.03394033 0.02750584 -0.06408812 -0.06291281 0.03478998 -0.02012466]
[ 0.00486957 0.0586816 -0.11000957 -0.11570497 0.14010166 0.01395124]
[-0.01512818 0.03020184 -0.08333265 -0.12020911 0.09289031 0.11576611]
[-0.04718693 -0.10320611 -0.12724544 -0.13121018 0.16500162 0.16109243]
[ 0.0115124 -0.02343783 -0.11267095 -0.15439481 0.16024344 0.12133294]
[-0.00924116 -0.03999001 -0.02101704 -0.05275147 0.05602563 0.15410069]
[ 0.049056 0.01651078 0.00811297 0.04126582 0.02335404 0.05333965]
[ 5.76205105e-02 9.63858794e-03 8.02360773e-02 5.70908412e-02
1.22992657e-02 -6.92026006e-05]
[ 0.14396952 0.00972819 -0.04323345 0.06040709 0.04499682 0.01710355]
[ 0.08502851 0.08263845 -0.08156404 0.06395511 0.00099856 -0.10946572]
[ 0.10113055 -0.10002855 0.01485239 0.09571408 -0.01877613 0.03666028]
[ 0.01146812 -0.06143152 -0.09492616 0.12954885 -0.00065532 -0.04009742]
[ 0.01266196 0.0819984 0.047096 -0.02935063 0.02346306 -0.0196944 ]
[-0.05512476 -0.01332361 -0.06817065 0.02236891 -0.10267596 0.09189562]
[-0.083919 -0.03605526 -0.02932199 -0.02719379 0.03852296 0.07188715]
[ 0.00600483 -0.02655954 0.06322473 -0.15873766 0.04515571 0.09676984]
[-0.01862936 0.03461575 0.03958502 -0.02610168 0.01909079 0.11907221]
[ 0.05270803 -0.01999879 0.08816964 -0.03710591 0.03879414 0.11751094]
[ 0.05399841 -0.02176004 0.02660166 0.07033206 -0.00021014 0.06643411]
[ 0.08539724 -0.04394952 -0.01331997 0.04805927 -0.00060164 0.12453128]
[ 0.15583102 -0.01959829 0.11361287 0.13277112 -0.05170732 0.08145317]
[ 0.17910284 0.05089704 0.00528917 0.11413533 -0.05781568 0.10377657]
[ 0.08245552 0.10090733 -0.03479505 0.07159761 -0.05795054 0.01136379]
[ 0.0389607 0.05964078 -0.07295477 0.1281978 -0.02180543 0.04948157]
[ 0.1229486 -0.07319279 -0.05997281 0.0844834 -0.10655858 0.0894669 ]
[ 0.10541876 -0.05354195 0.00028371 0.08102857 0.04574807 0.05318305]
[ -6.65398408e-03 7.25593865e-02 -2.95798054e-05 -2.36942172e-02
-1.78858750e-02 -5.77889122e-02]
[-0.05108573 -0.12077842 -0.00262608 0.04244742 -0.05529458 0.04374898]
[ 0.01275766 -0.08263015 -0.00399919 0.06124596 0.02406947 0.07931511]
[-0.02696937 -0.12176222 0.03671923 0.00481847 -0.1173773 0.06500722]
[ 0.00104482 -0.02869925 0.02321416 -0.00014381 -0.08755617 0.08489241]
[ 0.03704666 -0.00397471 0.06798993 0.05955146 -0.12600242 -0.00396218]
[ 0.0788386 0.01444275 0.10939837 0.01515214 -0.15604103 0.00290358]
[ 0.09905066 0.01313802 0.08741435 0.0919837 -0.13316198 0.04869229]
[ 0.13331567 0.03671496 0.07464004 0.01444172 -0.09916789 0.03276154]
[ 0.08488057 0.00595172 0.00338763 0.12884003 -0.15842536 0.03127812]
[ 0.07457466 -0.08717185 -0.04588981 0.08657996 -0.05788819 0.08176958]
[ 0.07824977 -0.06394845 -0.04273864 0.10819834 -0.00835586 0.01183544]
[ 0.02201765 0.02059264 0.00864112 0.06972165 0.05695596 0.08902124]
[-0.01544811 -0.04681647 -0.04810363 0.10080346 -0.02184247 0.02373272]
[ 0.01104253 0.02739184 0.08262788 0.02268107 0.04819253 -0.00672995]
[-0.07060029 -0.0334276 0.02591504 -0.00728519 0.04244732 0.02189313]
[ 0.01874634 0.03752931 -0.12131198 0.05733044 -0.1290354 0.10302898]
[-0.0681495 0.04893824 0.01150581 -0.02956781 -0.12430092 0.04321539]
[ 0.01895567 -0.03944237 0.07842116 0.05792803 -0.0446355 0.06940101]
[-0.02470793 0.02999764 0.0454248 0.02464112 -0.03739325 0.08014808]
[-0.09868927 -0.01630697 0.08584633 0.05076698 -0.0462268 0.09092548]
[-0.00414749 0.07093184 0.02632718 0.07258747 -0.07827809 0.04215058]
[-0.01547547 -0.00078162 0.04392213 0.0094031 -0.0615246 0.02153521]
[ 0.01396228 0.08888188 -0.01357533 0.02293648 0.00327463 -0.02963009]
[ 0.08573545 0.00517351 0.00172319 0.15778035 0.05273307 0.04189184]
[ 0.02804283 0.02621911 0.00711008 0.03531394 0.10086582 0.05177085]
[-0.03802873 -0.04024641 0.02080545 0.10669859 -0.02659208 0.08730263]
[-0.00162489 -0.01129435 -0.02461695 0.00053571 0.003333 0.02515261]
[-0.05390078 0.13838756 -0.03254241 -0.07774678 -0.01354587 -0.06902446]
[ 0.00385146 0.00226919 -0.03537548 -0.05522361 0.02386935 0.01565781]
[-0.01194499 0.06804617 -0.08183085 0.01053591 -0.087484 -0.04261578]
[-0.05986556 -0.01158776 0.00818894 -0.02896479 0.04920187 0.02398697]
[-0.16522004 0.01502649 -0.03463824 -0.14270571 0.03288426 0.03527961]
[-0.0461283 -0.10288361 -0.00793323 0.00420822 -0.02979923 0.03530048]
[-0.15853932 -0.06390239 0.02377473 -0.04934523 -0.00271464 0.01473798]
[-0.11196984 -0.01549585 0.00038989 -0.05385035 -0.06825149 -0.0008967 ]
[-0.13769379 -0.02528614 -0.04569443 -0.02795361 0.00405588 -0.08145855]
[-0.05022296 0.01584918 -0.01828052 -0.0534338 0.02049299 -0.04450252]
[-0.0434696 0.04685648 0.0577606 -0.09859888 0.0667012 -0.12948473]
[ 0.02107473 0.10285812 -0.07627611 -0.03532748 0.11291386 -0.11796299]
[ 0.01511768 0.01312367 -0.03995832 -0.0690352 0.06163998 -0.08577722]
[-0.04931452 0.0947738 0.02728596 -0.0716171 -0.08045644 -0.0952751 ]
[ 0.01380203 -0.06715139 -0.00573948 0.02941873 -0.02511296 -0.06128195]
[ 0.02050374 -0.05374135 -0.00234578 0.0395844 -0.00243795 0.08625361]
[ 0.04770144 0.08085485 -0.07950226 0.01149448 0.05483034 0.08702312]
[ 0.01026897 0.05670335 0.087634 0.10011385 0.04838173 -0.04394186]
[ 0.14458971 -0.07542491 0.02961972 0.03574933 -0.1111038 0.03269218]
[ 0.11596359 -0.03247185 0.05235395 -0.0600836 -0.06662521 0.0349807 ]
[ 0.10323525 0.01158974 0.1745612 0.00085632 -0.13926363 0.02639571]
[ 0.00666897 0.07704429 0.11052977 0.04496747 -0.12271487 -0.01487018]
[ 0.08590381 0.06928587 0.13328457 -0.03483072 -0.16092479 -0.08771002]
[ 0.15286379 0.06087631 0.14771245 -0.09948061 -0.07283711 -0.13333012]
[ 0.20099333 0.09366754 0.08945104 -0.05917395 -0.08368565 -0.00934734]
[ 0.04065054 0.00358804 -0.01808859 -0.02077919 0.0212387 -0.08033948]
[ 0.08313444 0.08640381 -0.00160073 -0.02923647 -0.03433564 0.06350448]
[-0.02020158 -0.05308037 0.02985067 -0.03921992 -0.03118743 0.03228926]
[-0.06683315 0.03690299 -0.00696427 0.03112697 -0.02011655 0.01792667]
[ 0.05010377 -0.07447167 -0.00803838 -0.01391037 0.05990709 -0.04307606]
[ 0.00203963 -0.00067048 0.05554594 0.07511309 0.06928112 -0.04575794]
[ 0.03747524 -0.06519578 -0.04098183 -0.040111 0.00267218 0.0914906 ]
[ 0.17611992 0.0696538 0.07026698 0.08044767 -0.04419657 0.10615183]
[ 0.06416123 0.0410319 -0.05785089 0.09637652 -0.0672601 0.07314874]
[ 0.05260746 0.04596101 -0.00825373 0.00708481 -0.1175796 0.06192071]
[ 0.07304839 0.14654826 0.0156169 0.05453049 -0.1017371 0.02541858]
[ 0.02187769 0.07340283 0.08459067 0.025456 -0.16767789 0.00702915]
[ 0.09360152 0.17671131 0.08358616 -0.07923687 0.02500546 -0.04286424]
[-0.0473164 0.1387646 -0.00739351 0.00696312 -0.06374584 -0.07322901]
[ 0.00139427 0.1119215 0.06723893 0.03898719 0.07849869 -0.04315067]
[-0.06237566 0.1041767 0.075775 -0.08432952 0.00538813 -0.08120499]
[-0.05326359 0.04191865 -0.05832578 -0.0892785 -0.01206145 -0.02470583]
[ 0.02981657 0.0460008 0.04505556 -0.06547883 0.06372488 0.03433738]
[ 0.02344203 0.07300235 0.03939307 -0.03525153 -0.00249784 0.06868308]
[-0.02707121 -0.01241245 0.03510102 -0.10612556 -0.05960274 -0.05593782]
[ 0.03699509 0.06634322 -0.0383116 -0.0911908 -0.02236472 0.08784393]
[-0.03526923 -0.00932716 0.03537239 -0.02251676 0.08861982 0.03043855]
[-0.02106779 0.10961135 0.07732417 -0.02145649 0.0029918 -0.02276084]
[-0.01878962 0.07995333 -0.02555212 0.05929609 -0.03645477 -0.03330115]
[-0.00434183 0.05053777 0.01905963 0.03820831 0.00653686 -0.00958953]
[-0.05529717 0.02882817 0.04341602 -0.02230118 0.05950479 0.02378485]
[-0.04353496 0.04051781 0.00497058 0.01333212 0.12444106 -0.06053591]
[-0.03716992 0.05778565 -0.03715798 -0.13301019 0.10687227 -0.08828854]
[-0.04016401 -0.00630084 0.00936579 0.01140711 -0.03411772 0.00126085]
[-0.06845575 0.0690188 0.09891833 -0.13741623 0.00302645 -0.05983498]
[-0.10868749 -0.04376632 -0.02390319 0.01988708 0.03176391 0.00045184]
[-0.04874357 -0.00131884 0.06689507 -0.00192092 -0.0231897 -0.04948901]
[-0.04148264 0.03610456 -0.07548529 0.02080825 -0.10622376 -0.00255498]
[-0.13710162 -0.04210557 -0.02226113 -0.07067752 0.01055378 0.01697848]
[ 0.02569921 0.06085956 0.09240148 -0.02902308 -0.00673824 -0.05969966]
[-0.00299338 0.04747175 0.11120199 -0.0211431 -0.04272969 -0.00016657]
[-0.02298886 0.00853862 0.06887483 -0.04512478 0.0101483 -0.08736951]
[-0.0625679 0.06749941 -0.01505921 0.07652339 -0.02619012 -0.06052216]
[ -2.54116729e-02 8.55673701e-02 -8.18568049e-04 -1.87312253e-05
6.77334964e-02 -1.17370918e-01]
[-0.12737264 0.17444804 0.00369026 -0.09669803 0.10768119 -0.10697158]
[-0.14050037 0.18770652 -0.09177223 -0.02691669 0.01427296 -0.09950489]
[-0.10784652 0.11378074 -0.03330221 -0.13316873 0.08811253 0.00634981]
[-0.03255222 0.07455693 0.08990514 -0.02002538 -0.08740292 -0.01427763]
[-0.13758343 0.00388414 0.05839653 -0.06891268 -0.09607254 0.00150393]
[-0.00805338 -0.00641551 0.10732695 0.00825777 -0.09013713 -0.09018692]
[ 0.00634707 0.03748396 -0.04069734 -0.01871664 0.02729673 -0.07527903]
[-0.1054848 -0.05112318 0.00019624 0.002072 -0.06174811 -0.06164182]
[-0.13917063 -0.06504253 0.03217362 0.08001704 0.01362474 -0.00809617]
[-0.1513183 0.03943324 0.09324314 0.02170712 0.0524634 -0.12414398]
[ 0.01536033 -0.02915555 0.06137927 -0.04326545 0.05121922 -0.05635652]
[-0.01775774 0.00028998 0.15172511 -0.04325743 0.08811694 -0.10613346]
[ 0.02413365 0.10830846 -0.0262336 -0.07824454 -0.05339748 -0.05170796]
[-0.00627984 0.19934681 -0.13096158 -0.14640474 -0.05769848 -0.03953461]
[ 0.01103548 0.16765593 -0.09106661 -0.14263006 0.11891822 -0.07948778]
[-0.05621858 0.13540868 -0.10386599 0.02738478 0.04738396 0.00300363]
[ 0.03961071 0.05910172 -0.01274582 -0.05281831 0.0127644 -0.08616414]
[-0.0197908 0.04225454 0.02377734 -0.03850439 -0.17292088 -0.13920163]
[-0.07787354 -0.11750272 0.10362615 -0.05911998 -0.1646657 -0.1171345 ]
[-0.05528364 -0.06021412 0.05757664 0.03915655 -0.02556406 0.00693995]
[ 0.03379917 -0.00753838 -0.00580442 0.05051072 0.06037804 -0.05273503]
[-0.01339275 -0.10471368 0.00201666 -0.04782136 0.0287921 -0.07339031]
[-0.06092492 0.00549107 0.05160923 -0.02539773 0.01820634 -0.12731265]
[-0.07634158 -0.00296087 0.07508548 -0.03017025 0.04381514 -0.20406139]
[-0.03333542 0.11771797 0.10484347 -0.03779281 0.1043502 -0.15738863]
[ 0.00725201 0.00533028 0.12898615 0.01564245 0.100384 0.0501099 ]
[-0.0838731 0.2192701 0.04951849 -0.16150431 -0.08024314 -0.00244305]
[-0.05749775 0.13449143 -0.17180091 0.01945794 0.06970881 -0.01357811]
[ 0.09988805 0.12277343 -0.1578104 -0.02371587 0.15475827 0.05720674]
[ 0.0939876 0.09390745 -0.08190562 -0.01558519 0.16216686 0.0577699 ]
[ 0.02930515 0.14202896 -0.0498645 0.09410011 -0.0905753 -0.09945953]
[-0.02268049 0.02339502 0.03950382 -0.0517937 -0.11349598 -0.08833327]
[ 0.08516674 -0.01251649 0.06132861 -0.03194287 -0.08439503 0.04220618]
[-0.01899105 -0.1045885 0.077949 -0.01958727 0.02808487 0.05675743]
[-0.02527532 -0.05968197 -0.00986177 -0.09452567 0.04833149 -0.03405481]
[-0.00804637 0.04327626 -0.00891493 0.0404686 0.02445388 0.02132383]
[-0.10686646 0.16725922 0.05255186 0.07999355 0.08804691 -0.23960486]
[-0.06965735 0.09843512 0.07647634 0.03474835 0.07674371 -0.15451641]
[ 0.03860416 0.07051504 0.10006531 0.13294117 0.05803839 -0.05538081]
[ 0.06737103 0.03752292 0.04055005 0.06125275 -0.01747492 0.11705406]
[-0.08042438 0.22490498 -0.07153002 -0.00587896 -0.03975414 0.0298453 ]
[ 0.11747472 0.07065479 -0.18580684 0.01055077 -0.00294106 -0.09013598]
[ 0.16344641 0.09852085 -0.02280455 0.06350011 0.10644891 -0.07841627]
[ 0.04090171 0.0539231 0.03191345 0.08975121 0.07354202 -0.05085464]
[ 0.06949573 0.02994953 0.11174608 0.14790806 0.01851488 -0.03612482]
[ 0.00102438 -0.03996995 0.04026007 0.00085368 -0.09988602 0.07002623]
[ 0.09183715 0.06067352 -0.05980634 -0.01438377 -0.09251653 0.11715078]
[-0.00386922 -0.10703073 0.07787523 0.02702085 0.08162405 -0.0717581 ]
[-0.07778512 -0.06356189 0.05408532 -0.07408235 -0.09237519 0.0507946 ]
[-0.07788907 0.04865259 -0.1389758 0.0692102 0.05811313 0.07969563]
[ 0.07354986 -0.03002382 0.0805547 0.2104647 0.02959449 -0.17809092]
[ 0.08451089 0.06469505 0.09785924 0.11381891 -0.0139894 -0.03917938]
[ 0.13918306 -0.00193001 0.09200746 0.16640072 -0.06110357 -0.10224576]
[ 0.16877943 -0.02561516 0.00987721 0.08890694 -0.10235725 -0.02215788]
[ 0.02761451 0.1507888 -0.18001552 0.00864334 -0.06495474 -0.03670621]
[ 0.11245865 0.04967188 -0.07628254 0.03296975 0.04564534 -0.17311384]
[ 0.07160819 0.09903321 -0.04564312 0.02617879 0.00071424 -0.09162144]
[ 0.03731268 0.07952689 0.08011412 0.1758664 0.07917697 -0.14210425]
[ 0.00843566 -0.01549498 0.10385882 0.12704365 -0.00305744 -0.13962168]
[ 0.03056654 0.01458561 0.05228808 0.03149624 -0.14009424 0.07575957]
[ 0.03985079 -0.02655031 -0.12152576 0.07518446 0.03604449 0.06937316]
[-0.04599643 -0.05598318 -0.07937098 -0.00095775 0.03727443 0.01474119]
[ 0.04567473 0.01753204 -0.06741867 -0.06794508 -0.10253347 0.04230488]
[ 0.04462462 0.01107366 -0.08735126 0.06885814 0.03471483 0.01292751]
[ 0.00354577 -0.10288887 0.04419841 0.070103 0.09000523 -0.00721082]
[ 0.03796019 0.00535762 -0.0348375 0.12293369 -0.00022215 -0.04892039]
[ 0.1114773 -0.02669583 0.02275812 0.13235387 -0.06369523 -0.14881867]
[ 0.06753314 0.09824273 -0.04225203 -0.00379097 -0.0951763 -0.12207072]
[-0.03115888 0.20993769 0.03000314 -0.09485421 -0.0499488 -0.14387868]
[-0.04423478 -0.00696093 0.03348728 0.03983349 0.04265827 -0.11249048]
[-0.04539219 0.01402776 0.01623834 0.12802741 0.04629254 -0.07377387]
[-0.06900846 -0.00894165 0.1098303 0.11562915 -0.05746885 -0.01682089]
[ 0.01336491 -0.06197079 0.0806931 0.17345402 0.00053334 0.02890994]
[ 0.09189578 0.014551 -0.01489017 0.10595137 -0.00745042 -0.03500335]
[ 0.12131631 -0.04129434 -0.02708133 0.20030521 -0.01266863 -0.00490865]
[ 0.06412533 -0.07589801 -0.07554442 0.09276785 0.0441382 0.0288954 ]
[ 0.07487261 -0.05734253 -0.00203311 -0.0025931 -0.01919963 0.11948007]
[ 0.06480353 -0.07415973 -0.1177642 0.01564089 0.06338034 0.09492156]
[ 0.00965498 -0.02282054 0.06747879 0.05969336 0.01784871 0.02390275]
[-0.02003992 0.00335185 -0.04478868 0.03818144 -0.0128112 0.01287017]
[-0.04065459 -0.02603371 -0.02571965 0.03965176 -0.09204208 -0.03935078]
[-0.04766786 0.1082798 -0.01631091 0.00945273 -0.05310407 -0.02537967]
[-0.12904282 0.07982964 -0.02384206 -0.03279427 0.04572337 -0.15440276]
[-0.09009448 0.01916551 0.05756131 0.02075357 0.03775262 -0.06985762]
[-0.00639182 0.01354402 0.11129715 0.1652976 -0.0169815 0.00177091]
[ 0.01051939 0.01672464 0.01024784 0.10512043 -0.04685514 -0.01185065]
[ 0.12266921 -0.05120954 0.09076186 0.11903457 -0.02679647 0.06785123]
[ 0.19854587 -0.117159 -0.06767677 0.107568 -0.08477832 0.02565571]
[ 0.14433281 -0.03654353 -0.03057992 0.13306198 0.02715246 0.01270683]
[-0.04808082 0.03179479 0.03737011 0.04716397 0.00207957 -0.00929967]
[-0.02339549 -0.06695335 -0.0845779 -0.05967411 -0.00260638 0.13282208]
[ 0.06550924 0.00990529 -0.06323579 0.06805041 -0.01212648 0.18428057]
[-0.01365907 -0.04004657 -0.05474987 0.00434948 -0.12250189 0.1412147 ]
[-0.07735828 0.00387685 -0.02766323 0.026003 -0.02776339 0.04378674]
[-0.03873186 -0.02842149 -0.00020299 -0.05387518 -0.02083273 0.06436604]
[-0.04522833 0.0598625 -0.04362006 -0.019587 0.02535262 -0.00996901]
[-0.21301249 0.00997172 -0.02179454 -0.06571198 0.01145528 -0.10527243]
[-0.10750125 -0.08637699 0.02337158 0.04361615 0.00198748 0.0242748 ]
[-0.0601953 0.02206491 0.14650838 0.02445182 -0.06929463 0.04244595]
[-0.07250794 0.00332874 0.1676188 -0.04659299 -0.01623251 -0.00857764]
[ 0.12495214 -0.06444436 0.15579903 0.12007456 0.02670288 -0.0063992 ]
[ 0.08592647 -0.08190694 -0.00897303 0.0336335 0.04025805 -0.02552569]
[ 0.10188218 -0.00358185 -0.05461309 0.01297983 0.04758385 -0.06159959]
[ 0.01163076 -0.04872379 -0.06203174 0.02760573 -0.05980317 -0.04863126]
[ 0.03646083 -0.07539698 0.01410214 -0.0709289 -0.08435534 0.04741009]
[ 0.00470874 -0.06506602 -0.08467245 -0.02716547 -0.01306493 0.01949632]
[-0.00986365 -0.05028993 0.01529244 -0.07219611 -0.0805352 0.00282447]
[ 0.00424298 0.07829684 0.05605292 -0.01308339 0.0289993 0.02352933]
[-0.01316212 -0.00870331 -0.06443617 -0.05398602 0.03620742 0.03515423]
[-0.10057551 0.1030671 -0.13465314 -0.0894946 0.00834181 -0.08463646]
[-0.14764331 0.12393782 -0.07032378 -0.22490115 0.00489908 -0.04571831]
[-0.07405813 0.03586433 0.01903501 -0.14590773 0.05224146 0.02445566]
[-0.12631416 0.06733298 0.08068271 -0.05959218 0.06698295 0.08723918]
[-0.00939874 0.11931802 -0.01462062 -0.04392191 0.02908198 -0.00908836]
[ 0.07750757 0.06759952 -0.06454051 0.07551301 0.0246576 -0.05272722]
[-0.03650379 0.00216671 0.05520288 0.05594345 0.10305379 -0.01561263]
[ 0.06591907 -0.00408788 -0.00857377 0.08742893 -0.06154362 -0.00725429]
[ 0.0087805 -0.04396345 0.03727139 -0.06167185 0.06584529 0.05273139]
[ 0.02052966 -0.05413838 -0.07269059 -0.07978906 0.00905581 0.08197328]
[-0.013017 -0.08677133 -0.03951392 -0.05108723 0.03192541 0.00130963]
[ 0.02195974 0.01471327 -0.041569 -0.12173618 0.05705425 0.04977322]
[ 0.03240337 0.02462504 -0.04132796 -0.1322763 -0.02947292 0.06270567]
[-0.13428003 -0.05802382 -0.05476358 -0.08751541 -0.08120316 -0.06807917]
[-0.11075317 0.03772194 -0.14089853 -0.15388365 0.0321135 -0.04277958]
[-0.00949441 -0.06845162 -0.2019366 -0.07418422 0.01424402 -0.12506512]
[-0.0238974 -0.02651614 -0.18607937 -0.04616588 0.05341812 -0.04659864]
[-0.05222721 -0.03627942 -0.03076171 -0.07781958 -0.03211349 -0.07249965]
[-0.03002287 0.0497722 0.03813708 -0.03756813 0.09162919 0.0187449 ]
[-0.08644168 -0.07621741 -0.01266451 -0.10259596 -0.05855577 -0.07815471]
[-0.08951465 0.0759464 -0.02579235 -0.0925295 -0.04764029 0.07599392]
[ 0.01192029 0.07872926 -0.00607419 -0.03133556 -0.04657785 -0.02395956]
[-0.01545587 0.05089727 -0.05282832 0.07024089 0.02201224 0.00413834]
[-0.02432071 -0.08790541 0.04755025 -0.08306738 -0.001583 0.0578929 ]
[ 0.00583684 0.02882309 0.0197444 0.06982818 -0.03355337 -0.00238172]
[ 0.01387064 0.06662367 0.0507287 0.00070776 0.06749933 -0.02065942]
[ 0.02084344 0.01149368 -0.07983389 -0.05000533 -0.00404061 -0.06852283]
[-0.0201003 -0.03622016 -0.03217192 -0.04475833 -0.08523814 0.00879569]
[-0.0176454 -0.07809188 -0.06454822 -0.02917211 0.03038744 -0.09238425]
[-0.00707183 -0.06940052 -0.14832528 -0.01780364 -0.00063082 -0.07328011]
[-0.0002371 0.05508889 -0.12647893 -0.03493948 0.10342947 -0.07538689]
[ 0.04803584 0.02707655 -0.00957456 0.05970713 0.0101542 0.00793176]
[-0.09571744 -0.07614208 -0.05025434 -0.02070732 0.00741233 -0.07638752]
[-0.05636562 -0.01113752 -0.05606962 -0.01220792 0.09568168 0.09122528]
[ 0.06123243 0.03073761 0.05206808 -0.02998531 0.05541601 0.0519431 ]
[ 0.01458779 -0.05481991 -0.03217587 0.04852049 -0.02968468 0.01751289]
[-0.08481023 -0.03858817 -0.08736905 -0.07551552 -0.0264373 -0.00050544]
[-0.0509212 0.21145831 0.02917097 -0.01802795 -0.04289549 -0.11567215]
layer 8 Activation
relu
layer 9 Dropout
layer 10 Dense
6 10
[-0.23694457 -0.26670206 0.5543111 -0.21170883 0.14601716 -0.41281283
0.5825159 -0.61968309 -0.19427252 -0.33199763]
[-0.52713776 0.65657002 -0.45421809 -0.71776617 0.57379711 -0.51777285
0.51330757 -0.64001513 -0.2265407 -0.15891935]
[ 0.31625733 -0.79113078 -0.53154278 -0.37023902 -0.448488 0.24989481
0.26083246 -0.65155941 0.05267058 -0.56062621]
[ 0.49898064 -0.5559637 0.3755132 -0.38629225 -0.48913252 -0.5279811
0.27443361 -0.63590944 -0.41930044 -0.63219696]
[-0.16015369 -0.84459484 -0.03653246 -0.20003419 0.74036539 -0.1439731
-0.30219018 -0.09053656 -0.10922217 0.69687885]
[-0.44375637 -0.60822946 0.24794576 0.37305298 -0.44981813 0.12429843
-0.7043792 -0.54847002 -0.0192339 -0.58583635]
[-0.64684528 0.31195766 -0.46403262 0.07613805 -0.43853578 0.09954185
-0.82225633 0.90443391 0.60570228 0.24354704]
layer 11 Activation
softmax
================================================
FILE: example/mnist_cnn_one_iteration.py
================================================
'''
Save CNN network and one sample of train data.
Run one iteration of training of convnet on the MNIST dataset.
'''
from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.utils import np_utils
batch_size = 128
nb_classes = 10
nb_epoch = 1
# input image dimensions
img_rows, img_cols = 28, 28
# number of convolutional filters to use
nb_filters = 4
# size of pooling area for max pooling
nb_pool = 2
# convolution kernel size
nb_conv = 3
# the data, shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(X_train.shape[0], 1, img_rows, img_cols)
X_test = X_test.reshape(X_test.shape[0], 1, img_rows, img_cols)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Convolution2D(nb_filters, nb_conv, nb_conv, border_mode='same',
input_shape=(1, img_rows, img_cols)))
model.add(Activation('relu'))
model.add(Convolution2D(nb_filters, nb_conv, nb_conv, border_mode='same'))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(6))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='adadelta')
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch,
verbose=1, validation_data=(X_test, Y_test))
# store model
with open('./my_nn_arch.json', 'w') as fout:
fout.write(model.to_json())
model.save_weights('./my_nn_weights.h5', overwrite=True)
# store one sample in text file
with open("./sample_mnist.dat", "w") as fin:
fin.write("1 28 28\n")
a = X_train[0,0]
for b in a:
fin.write(str(b)+'\n')
# get prediction on saved sample
# c++ output should be the same ;)
print('Prediction on saved sample:')
print(str(model.predict(X_train[:1])))
# on my pc I got:
#[[ 0.03729606 0.00783805 0.06588034 0.21728528 0.01093729 0.34730983
# 0.01350389 0.02174525 0.26624694 0.01195715]]
================================================
FILE: example/my_nn_arch.json
================================================
{"class_name": "Sequential", "config": [{"class_name": "Convolution2D", "config": {"b_regularizer": null, "W_constraint": null, "b_constraint": null, "name": "convolution2d_1", "activity_regularizer": null, "trainable": true, "dim_ordering": "th", "nb_col": 3, "subsample": [1, 1], "init": "glorot_uniform", "nb_filter": 4, "input_dtype": "float32", "border_mode": "same", "batch_input_shape": [null, 1, 28, 28], "W_regularizer": null, "activation": "linear", "nb_row": 3}}, {"class_name": "Activation", "config": {"activation": "relu", "trainable": true, "name": "activation_1"}}, {"class_name": "Convolution2D", "config": {"W_constraint": null, "b_constraint": null, "name": "convolution2d_2", "activity_regularizer": null, "trainable": true, "dim_ordering": "th", "nb_col": 3, "subsample": [1, 1], "init": "glorot_uniform", "nb_filter": 4, "border_mode": "same", "b_regularizer": null, "W_regularizer": null, "activation": "linear", "nb_row": 3}}, {"class_name": "Activation", "config": {"activation": "relu", "trainable": true, "name": "activation_2"}}, {"class_name": "MaxPooling2D", "config": {"name": "maxpooling2d_1", "trainable": true, "dim_ordering": "th", "pool_size": [2, 2], "strides": [2, 2], "border_mode": "valid"}}, {"class_name": "Dropout", "config": {"p": 0.25, "trainable": true, "name": "dropout_1"}}, {"class_name": "Flatten", "config": {"trainable": true, "name": "flatten_1"}}, {"class_name": "Dense", "config": {"W_constraint": null, "b_constraint": null, "name": "dense_1", "activity_regularizer": null, "trainable": true, "init": "glorot_uniform", "bias": true, "input_dim": null, "b_regularizer": null, "W_regularizer": null, "activation": "linear", "output_dim": 6}}, {"class_name": "Activation", "config": {"activation": "relu", "trainable": true, "name": "activation_3"}}, {"class_name": "Dropout", "config": {"p": 0.5, "trainable": true, "name": "dropout_2"}}, {"class_name": "Dense", "config": {"W_constraint": null, "b_constraint": null, "name": "dense_2", "activity_regularizer": null, "trainable": true, "init": "glorot_uniform", "bias": true, "input_dim": null, "b_regularizer": null, "W_regularizer": null, "activation": "linear", "output_dim": 10}}, {"class_name": "Activation", "config": {"activation": "softmax", "trainable": true, "name": "activation_4"}}]}
================================================
FILE: example_main.cc
================================================
#include "keras_model.h"
#include <iostream>
using namespace std;
using namespace keras;
// Step 1
// Dump keras model and input sample into text files
// python dump_to_simple_cpp.py -a example/my_nn_arch.json -w example/my_nn_weights.h5 -o example/dumped.nnet
// Step 2
// Use text files in c++ example. To compile:
// g++ keras_model.cc example_main.cc
// To execute:
// a.out
int main() {
cout << "This is simple example with Keras neural network model loading into C++.\n"
<< "Keras model will be used in C++ for prediction only." << endl;
DataChunk *sample = new DataChunk2D();
sample->read_from_file("./example/sample_mnist.dat");
std::cout << sample->get_3d().size() << std::endl;
KerasModel m("./example/dumped.nnet", true);
m.compute_output(sample);
delete sample;
return 0;
}
================================================
FILE: keras_model.cc
================================================
#include "keras_model.h"
#include <iostream>
#include <fstream>
#include <algorithm>
#include <math.h>
using namespace std;
std::vector<float> keras::read_1d_array(std::ifstream &fin, int cols) {
vector<float> arr;
float tmp_float;
char tmp_char;
fin >> tmp_char; // for '['
for(int n = 0; n < cols; ++n) {
fin >> tmp_float;
arr.push_back(tmp_float);
}
fin >> tmp_char; // for ']'
return arr;
}
void keras::DataChunk2D::read_from_file(const std::string &fname) {
ifstream fin(fname.c_str());
fin >> m_depth >> m_rows >> m_cols;
for(int d = 0; d < m_depth; ++d) {
vector<vector<float> > tmp_single_depth;
for(int r = 0; r < m_rows; ++r) {
vector<float> tmp_row = keras::read_1d_array(fin, m_cols);
tmp_single_depth.push_back(tmp_row);
}
data.push_back(tmp_single_depth);
}
fin.close();
}
void keras::LayerConv2D::load_weights(std::ifstream &fin) {
char tmp_char = ' ';
string tmp_str = "";
float tmp_float;
bool skip = false;
fin >> m_kernels_cnt >> m_depth >> m_rows >> m_cols >> m_border_mode;
if (m_border_mode == "[") { m_border_mode = "valid"; skip = true; }
//cout << "LayerConv2D " << m_kernels_cnt << "x" << m_depth << "x" << m_rows <<
// "x" << m_cols << " border_mode " << m_border_mode << endl;
// reading kernel weights
for(int k = 0; k < m_kernels_cnt; ++k) {
vector<vector<vector<float> > > tmp_depths;
for(int d = 0; d < m_depth; ++d) {
vector<vector<float> > tmp_single_depth;
for(int r = 0; r < m_rows; ++r) {
if (!skip) { fin >> tmp_char; } // for '['
else { skip = false; }
vector<float> tmp_row;
for(int c = 0; c < m_cols; ++c) {
fin >> tmp_float;
tmp_row.push_back(tmp_float);
}
fin >> tmp_char; // for ']'
tmp_single_depth.push_back(tmp_row);
}
tmp_depths.push_back(tmp_single_depth);
}
m_kernels.push_back(tmp_depths);
}
// reading kernel biases
fin >> tmp_char; // for '['
for(int k = 0; k < m_kernels_cnt; ++k) {
fin >> tmp_float;
m_bias.push_back(tmp_float);
}
fin >> tmp_char; // for ']'
}
void keras::LayerActivation::load_weights(std::ifstream &fin) {
fin >> m_activation_type;
//cout << "Activation type " << m_activation_type << endl;
}
void keras::LayerMaxPooling::load_weights(std::ifstream &fin) {
fin >> m_pool_x >> m_pool_y;
//cout << "MaxPooling " << m_pool_x << "x" << m_pool_y << endl;
}
void keras::LayerDense::load_weights(std::ifstream &fin) {
fin >> m_input_cnt >> m_neurons;
float tmp_float;
char tmp_char = ' ';
for(int i = 0; i < m_input_cnt; ++i) {
vector<float> tmp_n;
fin >> tmp_char; // for '['
for(int n = 0; n < m_neurons; ++n) {
fin >> tmp_float;
tmp_n.push_back(tmp_float);
}
fin >> tmp_char; // for ']'
m_weights.push_back(tmp_n);
}
//cout << "weights " << m_weights.size() << endl;
fin >> tmp_char; // for '['
for(int n = 0; n < m_neurons; ++n) {
fin >> tmp_float;
m_bias.push_back(tmp_float);
}
fin >> tmp_char; // for ']'
//cout << "bias " << m_bias.size() << endl;
}
keras::KerasModel::KerasModel(const string &input_fname, bool verbose)
: m_verbose(verbose) {
load_weights(input_fname);
}
keras::DataChunk* keras::LayerFlatten::compute_output(keras::DataChunk* dc) {
vector<vector<vector<float> > > im = dc->get_3d();
size_t csize = im[0].size();
size_t rsize = im[0][0].size();
size_t size = im.size() * csize * rsize;
keras::DataChunkFlat *out = new DataChunkFlat(size);
float * y_ret = out->get_1d_rw().data();
for(size_t i = 0, dst = 0; i < im.size(); ++i) {
for(size_t j = 0; j < csize; ++j) {
float * row = im[i][j].data();
for(size_t k = 0; k < rsize; ++k) {
y_ret[dst++] = row[k];
}
}
}
return out;
}
keras::DataChunk* keras::LayerMaxPooling::compute_output(keras::DataChunk* dc) {
vector<vector<vector<float> > > im = dc->get_3d();
vector<vector<vector<float> > > y_ret;
for(unsigned int i = 0; i < im.size(); ++i) {
vector<vector<float> > tmp_y;
for(unsigned int j = 0; j < (unsigned int)(im[0].size()/m_pool_x); ++j) {
tmp_y.push_back(vector<float>((int)(im[0][0].size()/m_pool_y), 0.0));
}
y_ret.push_back(tmp_y);
}
for(unsigned int d = 0; d < y_ret.size(); ++d) {
for(unsigned int x = 0; x < y_ret[0].size(); ++x) {
unsigned int start_x = x*m_pool_x;
unsigned int end_x = start_x + m_pool_x;
for(unsigned int y = 0; y < y_ret[0][0].size(); ++y) {
unsigned int start_y = y*m_pool_y;
unsigned int end_y = start_y + m_pool_y;
vector<float> values;
for(unsigned int i = start_x; i < end_x; ++i) {
for(unsigned int j = start_y; j < end_y; ++j) {
values.push_back(im[d][i][j]);
}
}
y_ret[d][x][y] = *max_element(values.begin(), values.end());
}
}
}
keras::DataChunk *out = new keras::DataChunk2D();
out->set_data(y_ret);
return out;
}
void keras::missing_activation_impl(const string &act) {
cout << "Activation " << act << " not defined!" << endl;
cout << "Please add its implementation before use." << endl;
exit(1);
}
keras::DataChunk* keras::LayerActivation::compute_output(keras::DataChunk* dc) {
if (dc->get_data_dim() == 3) {
vector<vector<vector<float> > > y = dc->get_3d();
if(m_activation_type == "relu") {
for(unsigned int i = 0; i < y.size(); ++i) {
for(unsigned int j = 0; j < y[0].size(); ++j) {
for(unsigned int k = 0; k < y[0][0].size(); ++k) {
if(y[i][j][k] < 0) y[i][j][k] = 0;
}
}
}
keras::DataChunk *out = new keras::DataChunk2D();
out->set_data(y);
return out;
} else {
keras::missing_activation_impl(m_activation_type);
}
} else if (dc->get_data_dim() == 1) { // flat data, use 1D
vector<float> y = dc->get_1d();
if(m_activation_type == "relu") {
for(unsigned int k = 0; k < y.size(); ++k) {
if(y[k] < 0) y[k] = 0;
}
} else if(m_activation_type == "softmax") {
float sum = 0.0;
for(unsigned int k = 0; k < y.size(); ++k) {
y[k] = exp(y[k]);
sum += y[k];
}
for(unsigned int k = 0; k < y.size(); ++k) {
y[k] /= sum;
}
} else if(m_activation_type == "sigmoid") {
for(unsigned int k = 0; k < y.size(); ++k) {
y[k] = 1/(1+exp(-y[k]));
}
} else if(m_activation_type == "tanh") {
for(unsigned int k = 0; k < y.size(); ++k) {
y[k] = tanh(y[k]);
}
} else {
keras::missing_activation_impl(m_activation_type);
}
keras::DataChunk *out = new DataChunkFlat();
out->set_data(y);
return out;
} else { throw "data dim not supported"; }
return dc;
}
// with border mode = valid
std::vector< std::vector<float> > keras::conv_single_depth_valid(
std::vector< std::vector<float> > const & im,
std::vector< std::vector<float> > const & k)
{
size_t k1_size = k.size(), k2_size = k[0].size();
unsigned int st_x = (k1_size - 1) >> 1;
unsigned int st_y = (k2_size - 1) >> 1;
std::vector< std::vector<float> > y(im.size() - 2*st_x, vector<float>(im[0].size() - 2*st_y, 0));
for(unsigned int i = st_x; i < im.size()-st_x; ++i) {
for(unsigned int j = st_y; j < im[0].size()-st_y; ++j) {
float sum = 0;
for(unsigned int k1 = 0; k1 < k.size(); ++k1) {
//const float * k_data = k[k1_size-k1-1].data();
//const float * im_data = im[i-st_x+k1].data();
for(unsigned int k2 = 0; k2 < k[0].size(); ++k2) {
sum += k[k1_size-k1-1][k2_size-k2-1] * im[i-st_x+k1][j-st_y+k2];
}
}
y[i-st_x][j-st_y] = sum;
}
}
return y;
}
// with border mode = same
std::vector< std::vector<float> > keras::conv_single_depth_same(
std::vector< std::vector<float> > const & im,
std::vector< std::vector<float> > const & k)
{
size_t k1_size = k.size(), k2_size = k[0].size();
unsigned int st_x = (k1_size - 1) >> 1;
unsigned int st_y = (k2_size - 1) >> 1;
size_t max_imc = im.size() - 1;
size_t max_imr = im[0].size() - 1;
std::vector< std::vector<float> > y(im.size(), vector<float>(im[0].size(), 0));
for(unsigned int i = 0; i < im.size(); ++i) {
for(unsigned int j = 0; j < im[0].size(); ++j) {
float sum = 0;
for(unsigned int k1 = 0; k1 < k.size(); ++k1) {
//const float * k_data = k[k1_size-k1-1].data(); // it is not working ...
//const float * im_data = im[i-st_x+k1].data();
for(unsigned int k2 = 0; k2 < k[0].size(); ++k2) {
if(i-st_x+k1 < 0) continue;
if(i-st_x+k1 > max_imc) continue;
if(j-st_y+k2 < 0) continue;
if(j-st_y+k2 > max_imr) continue;
sum += k[k1_size-k1-1][k2_size-k2-1] * im[i-st_x+k1][j-st_y+k2];
}
}
y[i][j] = sum;
}
}
return y;
}
keras::DataChunk* keras::LayerConv2D::compute_output(keras::DataChunk* dc) {
unsigned int st_x = (m_kernels[0][0].size()-1) >> 1;
unsigned int st_y = (m_kernels[0][0][0].size()-1) >> 1;
vector< vector< vector<float> > > y_ret;
auto const & im = dc->get_3d();
size_t size_x = (m_border_mode == "valid")? im[0].size() - 2 * st_x : im[0].size();
size_t size_y = (m_border_mode == "valid")? im[0][0].size() - 2 * st_y: im[0][0].size();
for(unsigned int i = 0; i < m_kernels.size(); ++i) { // depth
vector<vector<float> > tmp;
tmp.reserve(size_x);
for(unsigned int j = 0; j < size_x; ++j) { // rows
tmp.emplace_back(vector<float>(size_y, 0.0));
}
y_ret.push_back(tmp);
}
for(unsigned int j = 0; j < m_kernels.size(); ++j) { // loop over kernels
for(unsigned int m = 0; m < im.size(); ++m) { // loope over image depth
vector<vector<float> > tmp_w = (m_border_mode == "valid")?
keras::conv_single_depth_valid(im[m], m_kernels[j][m]) :
keras::conv_single_depth_same(im[m], m_kernels[j][m]);
for(unsigned int x = 0; x < tmp_w.size(); ++x) {
for(unsigned int y = 0; y < tmp_w[0].size(); ++y) {
y_ret[j][x][y] += tmp_w[x][y];
}
}
}
for(unsigned int x = 0; x < y_ret[0].size(); ++x) {
for(unsigned int y = 0; y < y_ret[0][0].size(); ++y) {
y_ret[j][x][y] += m_bias[j];
}
}
}
keras::DataChunk *out = new keras::DataChunk2D();
out->set_data(y_ret);
return out;
}
keras::DataChunk* keras::LayerDense::compute_output(keras::DataChunk* dc) {
//cout << "weights: input size " << m_weights.size() << endl;
//cout << "weights: neurons size " << m_weights[0].size() << endl;
//cout << "bias " << m_bias.size() << endl;
size_t size = m_weights[0].size();
size_t size8 = size >> 3;
keras::DataChunkFlat *out = new DataChunkFlat(size, 0);
float * y_ret = out->get_1d_rw().data();
auto const & im = dc->get_1d();
for (size_t j = 0; j < m_weights.size(); ++j) { // iter over input
const float * w = m_weights[j].data();
float p = im[j];
size_t k = 0;
for (size_t i = 0; i < size8; ++i) { // iter over neurons
y_ret[k] += w[k] * p; // vectorize if you can
y_ret[k+1] += w[k+1] * p;
y_ret[k+2] += w[k+2] * p;
y_ret[k+3] += w[k+3] * p;
y_ret[k+4] += w[k+4] * p;
y_ret[k+5] += w[k+5] * p;
y_ret[k+6] += w[k+6] * p;
y_ret[k+7] += w[k+7] * p;
k += 8;
}
while (k < size) { y_ret[k] += w[k] * p; ++k; }
}
for (size_t i = 0; i < size; ++i) { // add biases
y_ret[i] += m_bias[i];
}
return out;
}
std::vector<float> keras::KerasModel::compute_output(keras::DataChunk *dc) {
//cout << endl << "KerasModel compute output" << endl;
//cout << "Input data size:" << endl;
dc->show_name();
keras::DataChunk *inp = dc;
keras::DataChunk *out = 0;
for(int l = 0; l < (int)m_layers.size(); ++l) {
//cout << "Processing layer " << m_layers[l]->get_name() << endl;
out = m_layers[l]->compute_output(inp);
//cout << "Input" << endl;
//inp->show_name();
//cout << "Output" << endl;
//out->show_name();
if(inp != dc) delete inp;
//delete inp;
inp = 0L;
inp = out;
}
std::vector<float> flat_out = out->get_1d();
out->show_values();
delete out;
return flat_out;
}
void keras::KerasModel::load_weights(const string &input_fname) {
if(m_verbose) cout << "Reading model from " << input_fname << endl;
ifstream fin(input_fname.c_str());
string layer_type = "";
string tmp_str = "";
int tmp_int = 0;
fin >> tmp_str >> m_layers_cnt;
if(m_verbose) cout << "Layers " << m_layers_cnt << endl;
for(int layer = 0; layer < m_layers_cnt; ++layer) { // iterate over layers
fin >> tmp_str >> tmp_int >> layer_type;
if(m_verbose) cout << "Layer " << tmp_int << " " << layer_type << endl;
Layer *l = 0L;
if(layer_type == "Convolution2D") {
l = new LayerConv2D();
} else if(layer_type == "Activation") {
l = new LayerActivation();
} else if(layer_type == "MaxPooling2D") {
l = new LayerMaxPooling();
} else if(layer_type == "Flatten") {
l = new LayerFlatten();
} else if(layer_type == "Dense") {
l = new LayerDense();
} else if(layer_type == "Dropout") {
continue; // we dont need dropout layer in prediciton mode
}
if(l == 0L) {
cout << "Layer is empty, maybe it is not defined? Cannot define network." << endl;
return;
}
l->load_weights(fin);
m_layers.push_back(l);
}
fin.close();
}
keras::KerasModel::~KerasModel() {
for(int i = 0; i < (int)m_layers.size(); ++i) {
delete m_layers[i];
}
}
int keras::KerasModel::get_output_length() const
{
int i = m_layers.size() - 1;
while ((i > 0) && (m_layers[i]->get_output_units() == 0)) --i;
return m_layers[i]->get_output_units();
}
================================================
FILE: keras_model.h
================================================
#ifndef KERAS_MODEL__H
#define KERAS_MODEL__H
#include <string>
#include <vector>
#include <fstream>
#include <iostream>
namespace keras
{
std::vector<float> read_1d_array(std::ifstream &fin, int cols);
void missing_activation_impl(const std::string &act);
std::vector< std::vector<float> > conv_single_depth_valid(std::vector< std::vector<float> > const & im, std::vector< std::vector<float> > const & k);
std::vector< std::vector<float> > conv_single_depth_same(std::vector< std::vector<float> > const & im, std::vector< std::vector<float> > const & k);
class DataChunk;
class DataChunk2D;
class DataChunkFlat;
class Layer;
class LayerFlatten;
class LayerMaxPooling;
class LayerActivation;
class LayerConv2D;
class LayerDense;
class KerasModel;
}
class keras::DataChunk {
public:
virtual ~DataChunk() {}
virtual size_t get_data_dim(void) const { return 0; }
virtual std::vector<float> const & get_1d() const { throw "not implemented"; };
virtual std::vector<std::vector<std::vector<float> > > const & get_3d() const { throw "not implemented"; };
virtual void set_data(std::vector<std::vector<std::vector<float> > > const &) {};
virtual void set_data(std::vector<float> const &) {};
//virtual unsigned int get_count();
virtual void read_from_file(const std::string &fname) {};
virtual void show_name() = 0;
virtual void show_values() = 0;
};
class keras::DataChunk2D : public keras::DataChunk {
public:
std::vector< std::vector< std::vector<float> > > const & get_3d() const { return data; };
virtual void set_data(std::vector<std::vector<std::vector<float> > > const & d) { data = d; };
size_t get_data_dim(void) const { return 3; }
void show_name() {
std::cout << "DataChunk2D " << data.size() << "x" << data[0].size() << "x" << data[0][0].size() << std::endl;
}
void show_values() {
std::cout << "DataChunk2D values:" << std::endl;
for(size_t i = 0; i < data.size(); ++i) {
std::cout << "Kernel " << i << std::endl;
for(size_t j = 0; j < data[0].size(); ++j) {
for(size_t k = 0; k < data[0][0].size(); ++k) {
std::cout << data[i][j][k] << " ";
}
std::cout << std::endl;
}
}
}
//unsigned int get_count() {
// return data.size()*data[0].size()*data[0][0].size();
//}
void read_from_file(const std::string &fname);
std::vector<std::vector<std::vector<float> > > data; // depth, rows, cols
int m_depth;
int m_rows;
int m_cols;
};
class keras::DataChunkFlat : public keras::DataChunk {
public:
DataChunkFlat(size_t size) : f(size) { }
DataChunkFlat(size_t size, float init) : f(size, init) { }
DataChunkFlat(void) { }
std::vector<float> f;
std::vector<float> & get_1d_rw() { return f; }
std::vector<float> const & get_1d() const { return f; }
void set_data(std::vector<float> const & d) { f = d; };
size_t get_data_dim(void) const { return 1; }
void show_name() {
std::cout << "DataChunkFlat " << f.size() << std::endl;
}
void show_values() {
std::cout << "DataChunkFlat values:" << std::endl;
for(size_t i = 0; i < f.size(); ++i) std::cout << f[i] << " ";
std::cout << std::endl;
}
void read_from_file(const std::string &fname) {};
//unsigned int get_count() { return f.size(); }
};
class keras::Layer {
public:
virtual void load_weights(std::ifstream &fin) = 0;
virtual keras::DataChunk* compute_output(keras::DataChunk*) = 0;
Layer(std::string name) : m_name(name) {}
virtual ~Layer() {}
virtual unsigned int get_input_rows() const = 0;
virtual unsigned int get_input_cols() const = 0;
virtual unsigned int get_output_units() const = 0;
std::string get_name() { return m_name; }
std::string m_name;
};
class keras::LayerFlatten : public Layer {
public:
LayerFlatten() : Layer("Flatten") {}
void load_weights(std::ifstream &fin) {};
keras::DataChunk* compute_output(keras::DataChunk*);
virtual unsigned int get_input_rows() const { return 0; } // look for the value in the preceding layer
virtual unsigned int get_input_cols() const { return 0; } // same as for rows
virtual unsigned int get_output_units() const { return 0; }
};
class keras::LayerMaxPooling : public Layer {
public:
LayerMaxPooling() : Layer("MaxPooling2D") {};
void load_weights(std::ifstream &fin);
keras::DataChunk* compute_output(keras::DataChunk*);
virtual unsigned int get_input_rows() const { return 0; } // look for the value in the preceding layer
virtual unsigned int get_input_cols() const { return 0; } // same as for rows
virtual unsigned int get_output_units() const { return 0; }
int m_pool_x;
int m_pool_y;
};
class keras::LayerActivation : public Layer {
public:
LayerActivation() : Layer("Activation") {}
void load_weights(std::ifstream &fin);
keras::DataChunk* compute_output(keras::DataChunk*);
virtual unsigned int get_input_rows() const { return 0; } // look for the value in the preceding layer
virtual unsigned int get_input_cols() const { return 0; } // same as for rows
virtual unsigned int get_output_units() const { return 0; }
std::string m_activation_type;
};
class keras::LayerConv2D : public Layer {
public:
LayerConv2D() : Layer("Conv2D") {}
void load_weights(std::ifstream &fin);
keras::DataChunk* compute_output(keras::DataChunk*);
std::vector<std::vector<std::vector<std::vector<float> > > > m_kernels; // kernel, depth, rows, cols
std::vector<float> m_bias; // kernel
virtual unsigned int get_input_rows() const { return m_rows; }
virtual unsigned int get_input_cols() const { return m_cols; }
virtual unsigned int get_output_units() const { return m_kernels_cnt; }
std::string m_border_mode;
int m_kernels_cnt;
int m_depth;
int m_rows;
int m_cols;
};
class keras::LayerDense : public Layer {
public:
LayerDense() : Layer("Dense") {}
void load_weights(std::ifstream &fin);
keras::DataChunk* compute_output(keras::DataChunk*);
std::vector<std::vector<float> > m_weights; //input, neuron
std::vector<float> m_bias; // neuron
virtual unsigned int get_input_rows() const { return 1; } // flat, just one row
virtual unsigned int get_input_cols() const { return m_input_cnt; }
virtual unsigned int get_output_units() const { return m_neurons; }
int m_input_cnt;
int m_neurons;
};
class keras::KerasModel {
public:
KerasModel(const std::string &input_fname, bool verbose);
~KerasModel();
std::vector<float> compute_output(keras::DataChunk *dc);
unsigned int get_input_rows() const { return m_layers.front()->get_input_rows(); }
unsigned int get_input_cols() const { return m_layers.front()->get_input_cols(); }
int get_output_length() const;
private:
void load_weights(const std::string &input_fname);
int m_layers_cnt; // number of layers
std::vector<Layer *> m_layers; // container with layers
bool m_verbose;
};
#endif
================================================
FILE: test_compare.py
================================================
import numpy as np
np.random.seed(1336)
import json
import argparse
import sys
def get_numbers_from_file(fname):
r = []
with open(fname) as fin:
for line in fin:
tmp = line.split()
r = [float(t) for t in tmp]
return np.array(r)
np.set_printoptions(threshold=np.inf)
parser = argparse.ArgumentParser(description='This is a simple script compare predictions from keras and keras2cpp.')
parser.add_argument('-k', '--keras_response', help="Response from Keras (test_run_cnn.py)", required=True)
parser.add_argument('-c', '--keras2cpp_response', help="Response from Keras2cpp (test_run_cnn.cc)", required=True)
args = parser.parse_args()
keras_output = get_numbers_from_file(args.keras_response)
keras2cpp_output = get_numbers_from_file(args.keras2cpp_response)
if len(keras_output) != len(keras2cpp_output):
print "Different output dimensions"
sys.exit(1)
sub = np.sum(np.abs(keras_output - keras2cpp_output))
if sub < 1e-6:
print 'Test: [DONE]'
print 'Dump is working correctly.'
sys.exit(0)
else:
print 'Test: [ERROR]'
print 'The output from Keras and Keras2cpp are different.'
print 'Difference value:', sub
sys.exit(1)
================================================
FILE: test_run.sh
================================================
#!/bin/bash
echo 'Test for CNN dumping'
# Parameters
INPUT_ARCH="example/my_nn_arch.json"
INPUT_WEIGHTS="example/my_nn_weights.h5"
DUMPED_CNN="test_cnn.dumped"
DATA_SAMPLE="test_random_input.dat"
KERAS_OUTPUT="test_keras_output.dat"
KERAS2CPP_OUTPUT="test_keras2cpp_output.dat"
TEST_BIN="test_bin"
echo 'Test, step 1'
echo 'Dump network into plain text file' $DUMPED_CNN
python dump_to_simple_cpp.py -a $INPUT_ARCH -w $INPUT_WEIGHTS -o $DUMPED_CNN
echo 'Test, step 2'
echo 'Generate random input sample and save in' $DATA_SAMPLE
echo 'Compute ouput on generated sample with Keras and store predictions for comparison'
python test_run_cnn.py -a $INPUT_ARCH -w $INPUT_WEIGHTS -d $DATA_SAMPLE -o $KERAS_OUTPUT
echo 'Test, step 3'
echo 'Compile keras2cpp code'
g++ -std=c++11 test_run_cnn.cc keras_model.cc -o $TEST_BIN
echo 'Run predictions with dumped network and random data sample from step 2'
./$TEST_BIN $DUMPED_CNN $DATA_SAMPLE $KERAS2CPP_OUTPUT
echo 'Test, step 4'
echo 'Compare Keras and Keras2cpp outputs'
python test_compare.py --keras_response $KERAS_OUTPUT --keras2cpp_response $KERAS2CPP_OUTPUT
# Clean
echo 'Cleaning after test'
rm $DUMPED_CNN
rm $DATA_SAMPLE
rm $KERAS_OUTPUT
rm $KERAS2CPP_OUTPUT
rm $TEST_BIN
# used only if you log hidden layers output in test_run_cnn.py file
#rm test_layer_*.output
================================================
FILE: test_run_cnn.cc
================================================
#include "keras_model.h"
#include <iostream>
using namespace std;
using namespace keras;
int main(int argc, char *argv[]) {
if(argc != 4) {
cout << "Wrong input, going to exit." << endl;
cout << "There should be arguments: dumped_cnn_file input_sample output_file." << endl;
return -1;
}
string dumped_cnn = argv[1];
string input_data = argv[2];
string response_file = argv[3];
cout << "Testing network from " << dumped_cnn << " on data from " << input_data << endl;
// Input data sample
DataChunk *sample = new DataChunk2D();
sample->read_from_file(input_data);
// Construct network
KerasModel m(dumped_cnn, false);
std::vector<float> response = m.compute_output(sample);
// clean sample
delete sample;
// save response into file
ofstream fout(response_file);
for(unsigned int i = 0; i < response.size(); i++) {
fout << response[i] << " ";
}
fout.close();
return 0;
}
================================================
FILE: test_run_cnn.py
================================================
import numpy as np
np.random.seed(1336)
from keras.models import Sequential, model_from_json
import json
import argparse
from keras import backend as K
np.set_printoptions(threshold=np.inf)
parser = argparse.ArgumentParser(description='This is a simple script to run Keras model from saved architecture and weights.\
This script also creates a input data sample for c++ run_net.')
parser.add_argument('-a', '--architecture', help="JSON with model architecture", required=True)
parser.add_argument('-w', '--weights', help="Model weights in HDF5 format", required=True)
parser.add_argument('-d', '--data_sample', help="File where to write random data sample", required=True)
parser.add_argument('-o', '--output', help="File where to write network outpu", required=True)
parser.add_argument('-v', '--verbose', help="Verbose", required=False)
args = parser.parse_args()
print 'Verbose', args.verbose
print 'Read architecture from', args.architecture
print 'Read weights from', args.weights
arch = open(args.architecture).read()
model = model_from_json(arch)
model.load_weights(args.weights)
model.compile(loss='categorical_crossentropy', optimizer='adadelta')
arch = json.loads(arch)
print 'There are', str(len(model.layers)), 'layers in your network, is it good?'
print 'I think yes :)'
first_layer = arch["config"][0]["config"]
input_shape = first_layer["batch_input_shape"]
print "Input shape of your network", input_shape
print "Generate random input for testing purposes"
random_input = np.random.rand(1, input_shape[1], input_shape[2], input_shape[3])
print "Random input shape", random_input.shape
response = model.predict(random_input)[0]
if args.verbose:
print '-'*50
print 'Prediction from Keras'
print response
print '-'*50
# save response to the file
with open(args.output, "w") as fin:
fin.write(' '.join([str(r) for r in response]))
# store one sample in text file
# this code is working for input_shape[1] == 1
if input_shape[1] != 1:
print '-'*50
print 'Sorry but below code can be a buggy for image depth > 1 !!!'
print '-'*50
# save random data sample into file
with open(args.data_sample, "w") as fin:
print "Save to", args.data_sample, "sample shape", str(input_shape[1]) + " " + str(input_shape[2]) + " " + str(input_shape[3])
fin.write(str(input_shape[1]) + " " + str(input_shape[2]) + " " + str(input_shape[3]) + "\n")
a = random_input[0,0]
for b in a:
fin.write(str(b)+'\n')
# Get layers output (for debuging)
'''
for l in xrange(len(model.layers)):
with open('test_layer_' + str(l) + '.output', 'w') as fout:
get_layer_output = K.function([model.layers[0].input, K.learning_phase()],
[model.layers[l].output])
layer_output = get_layer_output([random_input, 0])
print 'Layer', l, layer_output[0].shape
if l > 10:
print layer_output
fout.write(str(layer_output[0].shape) + '\n')
fout.write(str(layer_output) + '\n')
'''
#print 'input?'
#get_layer_output = K.function([model.layers[0].input, K.learning_phase()],
# [model.layers[0].input])
#layer_output = get_layer_output([random_input, 0])
#print layer_output
gitextract_rvc94csq/ ├── LICENSE ├── README.md ├── dump_to_simple_cpp.py ├── example/ │ ├── dumped.nnet │ ├── mnist_cnn_one_iteration.py │ ├── my_nn_arch.json │ └── my_nn_weights.h5 ├── example_main.cc ├── keras_model.cc ├── keras_model.h ├── test_compare.py ├── test_run.sh ├── test_run_cnn.cc └── test_run_cnn.py
SYMBOL INDEX (21 symbols across 4 files)
FILE: example_main.cc
function main (line 17) | int main() {
FILE: keras_model.h
function namespace (line 9) | namespace keras
function class (line 30) | class keras::DataChunk {
function virtual (line 47) | virtual void set_data(std::vector<std::vector<std::vector<float> > > con...
function get_data_dim (line 48) | size_t get_data_dim(void) const { return 3; }
function show_name (line 50) | void show_name() {
function show_values (line 54) | void show_values() {
function set_data (line 87) | void set_data(std::vector<float> const & d) { f = d; }
function get_data_dim (line 88) | size_t get_data_dim(void) const { return 1; }
function show_name (line 90) | void show_name() {
function show_values (line 93) | void show_values() {
function read_from_file (line 98) | void read_from_file(const std::string &fname) {}
function virtual (line 108) | virtual ~Layer() {}
function get_input_rows (line 110) | virtual unsigned int get_input_rows() const = 0;
function class (line 131) | class keras::LayerMaxPooling : public Layer {
function class (line 147) | class keras::LayerActivation : public Layer {
function class (line 160) | class keras::LayerConv2D : public Layer {
function class (line 180) | class keras::LayerDense : public Layer {
function class (line 197) | class keras::KerasModel {
FILE: test_compare.py
function get_numbers_from_file (line 7) | def get_numbers_from_file(fname):
FILE: test_run_cnn.cc
function main (line 10) | int main(int argc, char *argv[]) {
Condensed preview — 14 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (104K chars).
[
{
"path": "LICENSE",
"chars": 1072,
"preview": "The MIT License (MIT)\n\nCopyright (c) 2016 Piotr\n\nPermission is hereby granted, free of charge, to any person obtaining a"
},
{
"path": "README.md",
"chars": 1697,
"preview": "# keras2cpp\n\nThis is a bunch of code to port Keras neural network model into pure C++. Neural network weights and archit"
},
{
"path": "dump_to_simple_cpp.py",
"chars": 3089,
"preview": "import numpy as np\nnp.random.seed(1337)\nfrom keras.models import Sequential, model_from_json\nimport json\nimport argparse"
},
{
"path": "example/dumped.nnet",
"chars": 61870,
"preview": "layers 12\nlayer 0 Convolution2D\n4 1 3 3 same\n[ 0.13240439 -0.39469701 -0.05886053]\n[-0.16273086 0.27228925 0.70811206]"
},
{
"path": "example/mnist_cnn_one_iteration.py",
"chars": 2683,
"preview": "'''\nSave CNN network and one sample of train data.\n\nRun one iteration of training of convnet on the MNIST dataset.\n'''\n\n"
},
{
"path": "example/my_nn_arch.json",
"chars": 2299,
"preview": "{\"class_name\": \"Sequential\", \"config\": [{\"class_name\": \"Convolution2D\", \"config\": {\"b_regularizer\": null, \"W_constraint\""
},
{
"path": "example_main.cc",
"chars": 820,
"preview": "#include \"keras_model.h\"\n\n#include <iostream>\n\nusing namespace std;\nusing namespace keras;\n\n// Step 1\n// Dump keras mode"
},
{
"path": "keras_model.cc",
"chars": 13914,
"preview": "#include \"keras_model.h\"\n\n#include <iostream>\n#include <fstream>\n#include <algorithm>\n#include <math.h>\nusing namespace "
},
{
"path": "keras_model.h",
"chars": 6868,
"preview": "#ifndef KERAS_MODEL__H\n#define KERAS_MODEL__H\n\n#include <string>\n#include <vector>\n#include <fstream>\n#include <iostream"
},
{
"path": "test_compare.py",
"chars": 1211,
"preview": "import numpy as np\nnp.random.seed(1336)\nimport json\nimport argparse\nimport sys\n\ndef get_numbers_from_file(fname):\n r "
},
{
"path": "test_run.sh",
"chars": 1322,
"preview": "#!/bin/bash\n\necho 'Test for CNN dumping'\n\n# Parameters\nINPUT_ARCH=\"example/my_nn_arch.json\"\nINPUT_WEIGHTS=\"example/my_nn"
},
{
"path": "test_run_cnn.cc",
"chars": 938,
"preview": "#include \"keras_model.h\"\n\n#include <iostream>\n\nusing namespace std;\nusing namespace keras;\n\n\n\nint main(int argc, char *a"
},
{
"path": "test_run_cnn.py",
"chars": 3274,
"preview": "import numpy as np\nnp.random.seed(1336)\nfrom keras.models import Sequential, model_from_json\nimport json\nimport argparse"
}
]
// ... and 1 more files (download for full content)
About this extraction
This page contains the full source code of the pplonski/keras2cpp GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 14 files (98.7 KB), approximately 45.0k tokens, and a symbol index with 21 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.