Showing preview only (1,032K chars total). Download the full file or copy to clipboard to get everything.
Repository: axelBarroso/Key.Net
Branch: master
Commit: 24808fb5bd73
Files: 29
Total size: 981.0 KB
Directory structure:
gitextract__rv4pmr5/
├── HSequences_bench/
│ ├── HPatches_images.txt
│ ├── splits.json
│ └── tools/
│ ├── HSequences_reader.py
│ ├── aux_tools.py
│ ├── geometry_tools.py
│ ├── matching_tools.py
│ ├── opencv_matcher.py
│ └── repeatability_tools.py
├── LICENSE
├── README.md
├── extract_multiscale_features.py
├── hsequeces_bench.py
├── keyNet/
│ ├── aux/
│ │ ├── desc_aux_function.py
│ │ └── tools.py
│ ├── datasets/
│ │ ├── dataset_utils.py
│ │ └── tf_dataset.py
│ ├── loss/
│ │ └── score_loss_function.py
│ ├── model/
│ │ ├── hardnet_pytorch.py
│ │ └── keynet_architecture.py
│ └── pretrained_nets/
│ ├── HardNet++.pth
│ └── KeyNet_default/
│ ├── checkpoint
│ ├── model--1651.data-00000-of-00001
│ ├── model--1651.index
│ └── model--1651.meta
├── test_im/
│ ├── KeyNet_default/
│ │ └── test_im/
│ │ ├── test_im.ppm.dsc.npy
│ │ └── test_im.ppm.kpt.npy
│ ├── image.txt
│ └── test_im.ppm
└── train_network.py
================================================
FILE CONTENTS
================================================
================================================
FILE: HSequences_bench/HPatches_images.txt
================================================
hpatches-sequences-release/v_churchill/1.ppm
hpatches-sequences-release/v_churchill/2.ppm
hpatches-sequences-release/v_churchill/3.ppm
hpatches-sequences-release/v_churchill/4.ppm
hpatches-sequences-release/v_churchill/5.ppm
hpatches-sequences-release/v_churchill/6.ppm
hpatches-sequences-release/v_dogman/1.ppm
hpatches-sequences-release/v_dogman/2.ppm
hpatches-sequences-release/v_dogman/3.ppm
hpatches-sequences-release/v_dogman/4.ppm
hpatches-sequences-release/v_dogman/5.ppm
hpatches-sequences-release/v_dogman/6.ppm
hpatches-sequences-release/v_maskedman/1.ppm
hpatches-sequences-release/v_maskedman/2.ppm
hpatches-sequences-release/v_maskedman/3.ppm
hpatches-sequences-release/v_maskedman/4.ppm
hpatches-sequences-release/v_maskedman/5.ppm
hpatches-sequences-release/v_maskedman/6.ppm
hpatches-sequences-release/v_wall/1.ppm
hpatches-sequences-release/v_wall/2.ppm
hpatches-sequences-release/v_wall/3.ppm
hpatches-sequences-release/v_wall/4.ppm
hpatches-sequences-release/v_wall/5.ppm
hpatches-sequences-release/v_wall/6.ppm
hpatches-sequences-release/v_pomegranate/1.ppm
hpatches-sequences-release/v_pomegranate/2.ppm
hpatches-sequences-release/v_pomegranate/3.ppm
hpatches-sequences-release/v_pomegranate/4.ppm
hpatches-sequences-release/v_pomegranate/5.ppm
hpatches-sequences-release/v_pomegranate/6.ppm
hpatches-sequences-release/v_samples/1.ppm
hpatches-sequences-release/v_samples/2.ppm
hpatches-sequences-release/v_samples/3.ppm
hpatches-sequences-release/v_samples/4.ppm
hpatches-sequences-release/v_samples/5.ppm
hpatches-sequences-release/v_samples/6.ppm
hpatches-sequences-release/v_beyus/1.ppm
hpatches-sequences-release/v_beyus/2.ppm
hpatches-sequences-release/v_beyus/3.ppm
hpatches-sequences-release/v_beyus/4.ppm
hpatches-sequences-release/v_beyus/5.ppm
hpatches-sequences-release/v_beyus/6.ppm
hpatches-sequences-release/v_wormhole/1.ppm
hpatches-sequences-release/v_wormhole/2.ppm
hpatches-sequences-release/v_wormhole/3.ppm
hpatches-sequences-release/v_wormhole/4.ppm
hpatches-sequences-release/v_wormhole/5.ppm
hpatches-sequences-release/v_wormhole/6.ppm
hpatches-sequences-release/v_bird/1.ppm
hpatches-sequences-release/v_bird/2.ppm
hpatches-sequences-release/v_bird/3.ppm
hpatches-sequences-release/v_bird/4.ppm
hpatches-sequences-release/v_bird/5.ppm
hpatches-sequences-release/v_bird/6.ppm
hpatches-sequences-release/v_weapons/1.ppm
hpatches-sequences-release/v_weapons/2.ppm
hpatches-sequences-release/v_weapons/3.ppm
hpatches-sequences-release/v_weapons/4.ppm
hpatches-sequences-release/v_weapons/5.ppm
hpatches-sequences-release/v_weapons/6.ppm
hpatches-sequences-release/v_vitro/1.ppm
hpatches-sequences-release/v_vitro/2.ppm
hpatches-sequences-release/v_vitro/3.ppm
hpatches-sequences-release/v_vitro/4.ppm
hpatches-sequences-release/v_vitro/5.ppm
hpatches-sequences-release/v_vitro/6.ppm
hpatches-sequences-release/v_posters/1.ppm
hpatches-sequences-release/v_posters/2.ppm
hpatches-sequences-release/v_posters/3.ppm
hpatches-sequences-release/v_posters/4.ppm
hpatches-sequences-release/v_posters/5.ppm
hpatches-sequences-release/v_posters/6.ppm
hpatches-sequences-release/v_cartooncity/1.ppm
hpatches-sequences-release/v_cartooncity/2.ppm
hpatches-sequences-release/v_cartooncity/3.ppm
hpatches-sequences-release/v_cartooncity/4.ppm
hpatches-sequences-release/v_cartooncity/5.ppm
hpatches-sequences-release/v_cartooncity/6.ppm
hpatches-sequences-release/v_birdwoman/1.ppm
hpatches-sequences-release/v_birdwoman/2.ppm
hpatches-sequences-release/v_birdwoman/3.ppm
hpatches-sequences-release/v_birdwoman/4.ppm
hpatches-sequences-release/v_birdwoman/5.ppm
hpatches-sequences-release/v_birdwoman/6.ppm
hpatches-sequences-release/v_bark/1.ppm
hpatches-sequences-release/v_bark/2.ppm
hpatches-sequences-release/v_bark/3.ppm
hpatches-sequences-release/v_bark/4.ppm
hpatches-sequences-release/v_bark/5.ppm
hpatches-sequences-release/v_bark/6.ppm
hpatches-sequences-release/v_colors/1.ppm
hpatches-sequences-release/v_colors/2.ppm
hpatches-sequences-release/v_colors/3.ppm
hpatches-sequences-release/v_colors/4.ppm
hpatches-sequences-release/v_colors/5.ppm
hpatches-sequences-release/v_colors/6.ppm
hpatches-sequences-release/v_strand/1.ppm
hpatches-sequences-release/v_strand/2.ppm
hpatches-sequences-release/v_strand/3.ppm
hpatches-sequences-release/v_strand/4.ppm
hpatches-sequences-release/v_strand/5.ppm
hpatches-sequences-release/v_strand/6.ppm
hpatches-sequences-release/v_eastsouth/1.ppm
hpatches-sequences-release/v_eastsouth/2.ppm
hpatches-sequences-release/v_eastsouth/3.ppm
hpatches-sequences-release/v_eastsouth/4.ppm
hpatches-sequences-release/v_eastsouth/5.ppm
hpatches-sequences-release/v_eastsouth/6.ppm
hpatches-sequences-release/v_coffeehouse/1.ppm
hpatches-sequences-release/v_coffeehouse/2.ppm
hpatches-sequences-release/v_coffeehouse/3.ppm
hpatches-sequences-release/v_coffeehouse/4.ppm
hpatches-sequences-release/v_coffeehouse/5.ppm
hpatches-sequences-release/v_coffeehouse/6.ppm
hpatches-sequences-release/v_abstract/1.ppm
hpatches-sequences-release/v_abstract/2.ppm
hpatches-sequences-release/v_abstract/3.ppm
hpatches-sequences-release/v_abstract/4.ppm
hpatches-sequences-release/v_abstract/5.ppm
hpatches-sequences-release/v_abstract/6.ppm
hpatches-sequences-release/v_blueprint/1.ppm
hpatches-sequences-release/v_blueprint/2.ppm
hpatches-sequences-release/v_blueprint/3.ppm
hpatches-sequences-release/v_blueprint/4.ppm
hpatches-sequences-release/v_blueprint/5.ppm
hpatches-sequences-release/v_blueprint/6.ppm
hpatches-sequences-release/v_artisans/1.ppm
hpatches-sequences-release/v_artisans/2.ppm
hpatches-sequences-release/v_artisans/3.ppm
hpatches-sequences-release/v_artisans/4.ppm
hpatches-sequences-release/v_artisans/5.ppm
hpatches-sequences-release/v_artisans/6.ppm
hpatches-sequences-release/v_dirtywall/1.ppm
hpatches-sequences-release/v_dirtywall/2.ppm
hpatches-sequences-release/v_dirtywall/3.ppm
hpatches-sequences-release/v_dirtywall/4.ppm
hpatches-sequences-release/v_dirtywall/5.ppm
hpatches-sequences-release/v_dirtywall/6.ppm
hpatches-sequences-release/v_yard/1.ppm
hpatches-sequences-release/v_yard/2.ppm
hpatches-sequences-release/v_yard/3.ppm
hpatches-sequences-release/v_yard/4.ppm
hpatches-sequences-release/v_yard/5.ppm
hpatches-sequences-release/v_yard/6.ppm
hpatches-sequences-release/v_there/1.ppm
hpatches-sequences-release/v_there/2.ppm
hpatches-sequences-release/v_there/3.ppm
hpatches-sequences-release/v_there/4.ppm
hpatches-sequences-release/v_there/5.ppm
hpatches-sequences-release/v_there/6.ppm
hpatches-sequences-release/v_tabletop/1.ppm
hpatches-sequences-release/v_tabletop/2.ppm
hpatches-sequences-release/v_tabletop/3.ppm
hpatches-sequences-release/v_tabletop/4.ppm
hpatches-sequences-release/v_tabletop/5.ppm
hpatches-sequences-release/v_tabletop/6.ppm
hpatches-sequences-release/v_busstop/1.ppm
hpatches-sequences-release/v_busstop/2.ppm
hpatches-sequences-release/v_busstop/3.ppm
hpatches-sequences-release/v_busstop/4.ppm
hpatches-sequences-release/v_busstop/5.ppm
hpatches-sequences-release/v_busstop/6.ppm
hpatches-sequences-release/v_adam/1.ppm
hpatches-sequences-release/v_adam/2.ppm
hpatches-sequences-release/v_adam/3.ppm
hpatches-sequences-release/v_adam/4.ppm
hpatches-sequences-release/v_adam/5.ppm
hpatches-sequences-release/v_adam/6.ppm
hpatches-sequences-release/v_fest/1.ppm
hpatches-sequences-release/v_fest/2.ppm
hpatches-sequences-release/v_fest/3.ppm
hpatches-sequences-release/v_fest/4.ppm
hpatches-sequences-release/v_fest/5.ppm
hpatches-sequences-release/v_fest/6.ppm
hpatches-sequences-release/v_azzola/1.ppm
hpatches-sequences-release/v_azzola/2.ppm
hpatches-sequences-release/v_azzola/3.ppm
hpatches-sequences-release/v_azzola/4.ppm
hpatches-sequences-release/v_azzola/5.ppm
hpatches-sequences-release/v_azzola/6.ppm
hpatches-sequences-release/v_boat/1.ppm
hpatches-sequences-release/v_boat/2.ppm
hpatches-sequences-release/v_boat/3.ppm
hpatches-sequences-release/v_boat/4.ppm
hpatches-sequences-release/v_boat/5.ppm
hpatches-sequences-release/v_boat/6.ppm
hpatches-sequences-release/v_war/1.ppm
hpatches-sequences-release/v_war/2.ppm
hpatches-sequences-release/v_war/3.ppm
hpatches-sequences-release/v_war/4.ppm
hpatches-sequences-release/v_war/5.ppm
hpatches-sequences-release/v_war/6.ppm
hpatches-sequences-release/v_talent/1.ppm
hpatches-sequences-release/v_talent/2.ppm
hpatches-sequences-release/v_talent/3.ppm
hpatches-sequences-release/v_talent/4.ppm
hpatches-sequences-release/v_talent/5.ppm
hpatches-sequences-release/v_talent/6.ppm
hpatches-sequences-release/v_charing/1.ppm
hpatches-sequences-release/v_charing/2.ppm
hpatches-sequences-release/v_charing/3.ppm
hpatches-sequences-release/v_charing/4.ppm
hpatches-sequences-release/v_charing/5.ppm
hpatches-sequences-release/v_charing/6.ppm
hpatches-sequences-release/v_man/1.ppm
hpatches-sequences-release/v_man/2.ppm
hpatches-sequences-release/v_man/3.ppm
hpatches-sequences-release/v_man/4.ppm
hpatches-sequences-release/v_man/5.ppm
hpatches-sequences-release/v_man/6.ppm
hpatches-sequences-release/v_laptop/1.ppm
hpatches-sequences-release/v_laptop/2.ppm
hpatches-sequences-release/v_laptop/3.ppm
hpatches-sequences-release/v_laptop/4.ppm
hpatches-sequences-release/v_laptop/5.ppm
hpatches-sequences-release/v_laptop/6.ppm
hpatches-sequences-release/v_bees/1.ppm
hpatches-sequences-release/v_bees/2.ppm
hpatches-sequences-release/v_bees/3.ppm
hpatches-sequences-release/v_bees/4.ppm
hpatches-sequences-release/v_bees/5.ppm
hpatches-sequences-release/v_bees/6.ppm
hpatches-sequences-release/v_grace/1.ppm
hpatches-sequences-release/v_grace/2.ppm
hpatches-sequences-release/v_grace/3.ppm
hpatches-sequences-release/v_grace/4.ppm
hpatches-sequences-release/v_grace/5.ppm
hpatches-sequences-release/v_grace/6.ppm
hpatches-sequences-release/v_graffiti/1.ppm
hpatches-sequences-release/v_graffiti/2.ppm
hpatches-sequences-release/v_graffiti/3.ppm
hpatches-sequences-release/v_graffiti/4.ppm
hpatches-sequences-release/v_graffiti/5.ppm
hpatches-sequences-release/v_graffiti/6.ppm
hpatches-sequences-release/v_wounded/1.ppm
hpatches-sequences-release/v_wounded/2.ppm
hpatches-sequences-release/v_wounded/3.ppm
hpatches-sequences-release/v_wounded/4.ppm
hpatches-sequences-release/v_wounded/5.ppm
hpatches-sequences-release/v_wounded/6.ppm
hpatches-sequences-release/v_feast/1.ppm
hpatches-sequences-release/v_feast/2.ppm
hpatches-sequences-release/v_feast/3.ppm
hpatches-sequences-release/v_feast/4.ppm
hpatches-sequences-release/v_feast/5.ppm
hpatches-sequences-release/v_feast/6.ppm
hpatches-sequences-release/v_apprentices/1.ppm
hpatches-sequences-release/v_apprentices/2.ppm
hpatches-sequences-release/v_apprentices/3.ppm
hpatches-sequences-release/v_apprentices/4.ppm
hpatches-sequences-release/v_apprentices/5.ppm
hpatches-sequences-release/v_apprentices/6.ppm
hpatches-sequences-release/v_home/1.ppm
hpatches-sequences-release/v_home/2.ppm
hpatches-sequences-release/v_home/3.ppm
hpatches-sequences-release/v_home/4.ppm
hpatches-sequences-release/v_home/5.ppm
hpatches-sequences-release/v_home/6.ppm
hpatches-sequences-release/v_astronautis/1.ppm
hpatches-sequences-release/v_astronautis/2.ppm
hpatches-sequences-release/v_astronautis/3.ppm
hpatches-sequences-release/v_astronautis/4.ppm
hpatches-sequences-release/v_astronautis/5.ppm
hpatches-sequences-release/v_astronautis/6.ppm
hpatches-sequences-release/v_soldiers/1.ppm
hpatches-sequences-release/v_soldiers/2.ppm
hpatches-sequences-release/v_soldiers/3.ppm
hpatches-sequences-release/v_soldiers/4.ppm
hpatches-sequences-release/v_soldiers/5.ppm
hpatches-sequences-release/v_soldiers/6.ppm
hpatches-sequences-release/v_machines/1.ppm
hpatches-sequences-release/v_machines/2.ppm
hpatches-sequences-release/v_machines/3.ppm
hpatches-sequences-release/v_machines/4.ppm
hpatches-sequences-release/v_machines/5.ppm
hpatches-sequences-release/v_machines/6.ppm
hpatches-sequences-release/v_gardens/1.ppm
hpatches-sequences-release/v_gardens/2.ppm
hpatches-sequences-release/v_gardens/3.ppm
hpatches-sequences-release/v_gardens/4.ppm
hpatches-sequences-release/v_gardens/5.ppm
hpatches-sequences-release/v_gardens/6.ppm
hpatches-sequences-release/v_bricks/1.ppm
hpatches-sequences-release/v_bricks/2.ppm
hpatches-sequences-release/v_bricks/3.ppm
hpatches-sequences-release/v_bricks/4.ppm
hpatches-sequences-release/v_bricks/5.ppm
hpatches-sequences-release/v_bricks/6.ppm
hpatches-sequences-release/v_tempera/1.ppm
hpatches-sequences-release/v_tempera/2.ppm
hpatches-sequences-release/v_tempera/3.ppm
hpatches-sequences-release/v_tempera/4.ppm
hpatches-sequences-release/v_tempera/5.ppm
hpatches-sequences-release/v_tempera/6.ppm
hpatches-sequences-release/v_bip/1.ppm
hpatches-sequences-release/v_bip/2.ppm
hpatches-sequences-release/v_bip/3.ppm
hpatches-sequences-release/v_bip/4.ppm
hpatches-sequences-release/v_bip/5.ppm
hpatches-sequences-release/v_bip/6.ppm
hpatches-sequences-release/v_courses/1.ppm
hpatches-sequences-release/v_courses/2.ppm
hpatches-sequences-release/v_courses/3.ppm
hpatches-sequences-release/v_courses/4.ppm
hpatches-sequences-release/v_courses/5.ppm
hpatches-sequences-release/v_courses/6.ppm
hpatches-sequences-release/v_sunseason/1.ppm
hpatches-sequences-release/v_sunseason/2.ppm
hpatches-sequences-release/v_sunseason/3.ppm
hpatches-sequences-release/v_sunseason/4.ppm
hpatches-sequences-release/v_sunseason/5.ppm
hpatches-sequences-release/v_sunseason/6.ppm
hpatches-sequences-release/v_yuri/1.ppm
hpatches-sequences-release/v_yuri/2.ppm
hpatches-sequences-release/v_yuri/3.ppm
hpatches-sequences-release/v_yuri/4.ppm
hpatches-sequences-release/v_yuri/5.ppm
hpatches-sequences-release/v_yuri/6.ppm
hpatches-sequences-release/v_calder/1.ppm
hpatches-sequences-release/v_calder/2.ppm
hpatches-sequences-release/v_calder/3.ppm
hpatches-sequences-release/v_calder/4.ppm
hpatches-sequences-release/v_calder/5.ppm
hpatches-sequences-release/v_calder/6.ppm
hpatches-sequences-release/v_woman/1.ppm
hpatches-sequences-release/v_woman/2.ppm
hpatches-sequences-release/v_woman/3.ppm
hpatches-sequences-release/v_woman/4.ppm
hpatches-sequences-release/v_woman/5.ppm
hpatches-sequences-release/v_woman/6.ppm
hpatches-sequences-release/v_london/1.ppm
hpatches-sequences-release/v_london/2.ppm
hpatches-sequences-release/v_london/3.ppm
hpatches-sequences-release/v_london/4.ppm
hpatches-sequences-release/v_london/5.ppm
hpatches-sequences-release/v_london/6.ppm
hpatches-sequences-release/v_wapping/1.ppm
hpatches-sequences-release/v_wapping/2.ppm
hpatches-sequences-release/v_wapping/3.ppm
hpatches-sequences-release/v_wapping/4.ppm
hpatches-sequences-release/v_wapping/5.ppm
hpatches-sequences-release/v_wapping/6.ppm
hpatches-sequences-release/v_underground/1.ppm
hpatches-sequences-release/v_underground/2.ppm
hpatches-sequences-release/v_underground/3.ppm
hpatches-sequences-release/v_underground/4.ppm
hpatches-sequences-release/v_underground/5.ppm
hpatches-sequences-release/v_underground/6.ppm
hpatches-sequences-release/v_circus/1.ppm
hpatches-sequences-release/v_circus/2.ppm
hpatches-sequences-release/v_circus/3.ppm
hpatches-sequences-release/v_circus/4.ppm
hpatches-sequences-release/v_circus/5.ppm
hpatches-sequences-release/v_circus/6.ppm
hpatches-sequences-release/i_ajuntament/1.ppm
hpatches-sequences-release/i_ajuntament/2.ppm
hpatches-sequences-release/i_ajuntament/3.ppm
hpatches-sequences-release/i_ajuntament/4.ppm
hpatches-sequences-release/i_ajuntament/5.ppm
hpatches-sequences-release/i_ajuntament/6.ppm
hpatches-sequences-release/i_parking/1.ppm
hpatches-sequences-release/i_parking/2.ppm
hpatches-sequences-release/i_parking/3.ppm
hpatches-sequences-release/i_parking/4.ppm
hpatches-sequences-release/i_parking/5.ppm
hpatches-sequences-release/i_parking/6.ppm
hpatches-sequences-release/i_kions/1.ppm
hpatches-sequences-release/i_kions/2.ppm
hpatches-sequences-release/i_kions/3.ppm
hpatches-sequences-release/i_kions/4.ppm
hpatches-sequences-release/i_kions/5.ppm
hpatches-sequences-release/i_kions/6.ppm
hpatches-sequences-release/i_fog/1.ppm
hpatches-sequences-release/i_fog/2.ppm
hpatches-sequences-release/i_fog/3.ppm
hpatches-sequences-release/i_fog/4.ppm
hpatches-sequences-release/i_fog/5.ppm
hpatches-sequences-release/i_fog/6.ppm
hpatches-sequences-release/i_brooklyn/1.ppm
hpatches-sequences-release/i_brooklyn/2.ppm
hpatches-sequences-release/i_brooklyn/3.ppm
hpatches-sequences-release/i_brooklyn/4.ppm
hpatches-sequences-release/i_brooklyn/5.ppm
hpatches-sequences-release/i_brooklyn/6.ppm
hpatches-sequences-release/i_ski/1.ppm
hpatches-sequences-release/i_ski/2.ppm
hpatches-sequences-release/i_ski/3.ppm
hpatches-sequences-release/i_ski/4.ppm
hpatches-sequences-release/i_ski/5.ppm
hpatches-sequences-release/i_ski/6.ppm
hpatches-sequences-release/i_gonnenberg/1.ppm
hpatches-sequences-release/i_gonnenberg/2.ppm
hpatches-sequences-release/i_gonnenberg/3.ppm
hpatches-sequences-release/i_gonnenberg/4.ppm
hpatches-sequences-release/i_gonnenberg/5.ppm
hpatches-sequences-release/i_gonnenberg/6.ppm
hpatches-sequences-release/i_yellowtent/1.ppm
hpatches-sequences-release/i_yellowtent/2.ppm
hpatches-sequences-release/i_yellowtent/3.ppm
hpatches-sequences-release/i_yellowtent/4.ppm
hpatches-sequences-release/i_yellowtent/5.ppm
hpatches-sequences-release/i_yellowtent/6.ppm
hpatches-sequences-release/i_lionday/1.ppm
hpatches-sequences-release/i_lionday/2.ppm
hpatches-sequences-release/i_lionday/3.ppm
hpatches-sequences-release/i_lionday/4.ppm
hpatches-sequences-release/i_lionday/5.ppm
hpatches-sequences-release/i_lionday/6.ppm
hpatches-sequences-release/i_tools/1.ppm
hpatches-sequences-release/i_tools/2.ppm
hpatches-sequences-release/i_tools/3.ppm
hpatches-sequences-release/i_tools/4.ppm
hpatches-sequences-release/i_tools/5.ppm
hpatches-sequences-release/i_tools/6.ppm
hpatches-sequences-release/i_books/1.ppm
hpatches-sequences-release/i_books/2.ppm
hpatches-sequences-release/i_books/3.ppm
hpatches-sequences-release/i_books/4.ppm
hpatches-sequences-release/i_books/5.ppm
hpatches-sequences-release/i_books/6.ppm
hpatches-sequences-release/i_salon/1.ppm
hpatches-sequences-release/i_salon/2.ppm
hpatches-sequences-release/i_salon/3.ppm
hpatches-sequences-release/i_salon/4.ppm
hpatches-sequences-release/i_salon/5.ppm
hpatches-sequences-release/i_salon/6.ppm
hpatches-sequences-release/i_leuven/1.ppm
hpatches-sequences-release/i_leuven/2.ppm
hpatches-sequences-release/i_leuven/3.ppm
hpatches-sequences-release/i_leuven/4.ppm
hpatches-sequences-release/i_leuven/5.ppm
hpatches-sequences-release/i_leuven/6.ppm
hpatches-sequences-release/i_contruction/1.ppm
hpatches-sequences-release/i_contruction/2.ppm
hpatches-sequences-release/i_contruction/3.ppm
hpatches-sequences-release/i_contruction/4.ppm
hpatches-sequences-release/i_contruction/5.ppm
hpatches-sequences-release/i_contruction/6.ppm
hpatches-sequences-release/i_santuario/1.ppm
hpatches-sequences-release/i_santuario/2.ppm
hpatches-sequences-release/i_santuario/3.ppm
hpatches-sequences-release/i_santuario/4.ppm
hpatches-sequences-release/i_santuario/5.ppm
hpatches-sequences-release/i_santuario/6.ppm
hpatches-sequences-release/i_zion/1.ppm
hpatches-sequences-release/i_zion/2.ppm
hpatches-sequences-release/i_zion/3.ppm
hpatches-sequences-release/i_zion/4.ppm
hpatches-sequences-release/i_zion/5.ppm
hpatches-sequences-release/i_zion/6.ppm
hpatches-sequences-release/i_nijmegen/1.ppm
hpatches-sequences-release/i_nijmegen/2.ppm
hpatches-sequences-release/i_nijmegen/3.ppm
hpatches-sequences-release/i_nijmegen/4.ppm
hpatches-sequences-release/i_nijmegen/5.ppm
hpatches-sequences-release/i_nijmegen/6.ppm
hpatches-sequences-release/i_autannes/1.ppm
hpatches-sequences-release/i_autannes/2.ppm
hpatches-sequences-release/i_autannes/3.ppm
hpatches-sequences-release/i_autannes/4.ppm
hpatches-sequences-release/i_autannes/5.ppm
hpatches-sequences-release/i_autannes/6.ppm
hpatches-sequences-release/i_nuts/1.ppm
hpatches-sequences-release/i_nuts/2.ppm
hpatches-sequences-release/i_nuts/3.ppm
hpatches-sequences-release/i_nuts/4.ppm
hpatches-sequences-release/i_nuts/5.ppm
hpatches-sequences-release/i_nuts/6.ppm
hpatches-sequences-release/i_crownday/1.ppm
hpatches-sequences-release/i_crownday/2.ppm
hpatches-sequences-release/i_crownday/3.ppm
hpatches-sequences-release/i_crownday/4.ppm
hpatches-sequences-release/i_crownday/5.ppm
hpatches-sequences-release/i_crownday/6.ppm
hpatches-sequences-release/i_pinard/1.ppm
hpatches-sequences-release/i_pinard/2.ppm
hpatches-sequences-release/i_pinard/3.ppm
hpatches-sequences-release/i_pinard/4.ppm
hpatches-sequences-release/i_pinard/5.ppm
hpatches-sequences-release/i_pinard/6.ppm
hpatches-sequences-release/i_londonbridge/1.ppm
hpatches-sequences-release/i_londonbridge/2.ppm
hpatches-sequences-release/i_londonbridge/3.ppm
hpatches-sequences-release/i_londonbridge/4.ppm
hpatches-sequences-release/i_londonbridge/5.ppm
hpatches-sequences-release/i_londonbridge/6.ppm
hpatches-sequences-release/i_miniature/1.ppm
hpatches-sequences-release/i_miniature/2.ppm
hpatches-sequences-release/i_miniature/3.ppm
hpatches-sequences-release/i_miniature/4.ppm
hpatches-sequences-release/i_miniature/5.ppm
hpatches-sequences-release/i_miniature/6.ppm
hpatches-sequences-release/i_resort/1.ppm
hpatches-sequences-release/i_resort/2.ppm
hpatches-sequences-release/i_resort/3.ppm
hpatches-sequences-release/i_resort/4.ppm
hpatches-sequences-release/i_resort/5.ppm
hpatches-sequences-release/i_resort/6.ppm
hpatches-sequences-release/i_fenis/1.ppm
hpatches-sequences-release/i_fenis/2.ppm
hpatches-sequences-release/i_fenis/3.ppm
hpatches-sequences-release/i_fenis/4.ppm
hpatches-sequences-release/i_fenis/5.ppm
hpatches-sequences-release/i_fenis/6.ppm
hpatches-sequences-release/i_dome/1.ppm
hpatches-sequences-release/i_dome/2.ppm
hpatches-sequences-release/i_dome/3.ppm
hpatches-sequences-release/i_dome/4.ppm
hpatches-sequences-release/i_dome/5.ppm
hpatches-sequences-release/i_dome/6.ppm
hpatches-sequences-release/i_kurhaus/1.ppm
hpatches-sequences-release/i_kurhaus/2.ppm
hpatches-sequences-release/i_kurhaus/3.ppm
hpatches-sequences-release/i_kurhaus/4.ppm
hpatches-sequences-release/i_kurhaus/5.ppm
hpatches-sequences-release/i_kurhaus/6.ppm
hpatches-sequences-release/i_toy/1.ppm
hpatches-sequences-release/i_toy/2.ppm
hpatches-sequences-release/i_toy/3.ppm
hpatches-sequences-release/i_toy/4.ppm
hpatches-sequences-release/i_toy/5.ppm
hpatches-sequences-release/i_toy/6.ppm
hpatches-sequences-release/i_veggies/1.ppm
hpatches-sequences-release/i_veggies/2.ppm
hpatches-sequences-release/i_veggies/3.ppm
hpatches-sequences-release/i_veggies/4.ppm
hpatches-sequences-release/i_veggies/5.ppm
hpatches-sequences-release/i_veggies/6.ppm
hpatches-sequences-release/i_objects/1.ppm
hpatches-sequences-release/i_objects/2.ppm
hpatches-sequences-release/i_objects/3.ppm
hpatches-sequences-release/i_objects/4.ppm
hpatches-sequences-release/i_objects/5.ppm
hpatches-sequences-release/i_objects/6.ppm
hpatches-sequences-release/i_village/1.ppm
hpatches-sequences-release/i_village/2.ppm
hpatches-sequences-release/i_village/3.ppm
hpatches-sequences-release/i_village/4.ppm
hpatches-sequences-release/i_village/5.ppm
hpatches-sequences-release/i_village/6.ppm
hpatches-sequences-release/i_crownnight/1.ppm
hpatches-sequences-release/i_crownnight/2.ppm
hpatches-sequences-release/i_crownnight/3.ppm
hpatches-sequences-release/i_crownnight/4.ppm
hpatches-sequences-release/i_crownnight/5.ppm
hpatches-sequences-release/i_crownnight/6.ppm
hpatches-sequences-release/i_fruits/1.ppm
hpatches-sequences-release/i_fruits/2.ppm
hpatches-sequences-release/i_fruits/3.ppm
hpatches-sequences-release/i_fruits/4.ppm
hpatches-sequences-release/i_fruits/5.ppm
hpatches-sequences-release/i_fruits/6.ppm
hpatches-sequences-release/i_castle/1.ppm
hpatches-sequences-release/i_castle/2.ppm
hpatches-sequences-release/i_castle/3.ppm
hpatches-sequences-release/i_castle/4.ppm
hpatches-sequences-release/i_castle/5.ppm
hpatches-sequences-release/i_castle/6.ppm
hpatches-sequences-release/i_partyfood/1.ppm
hpatches-sequences-release/i_partyfood/2.ppm
hpatches-sequences-release/i_partyfood/3.ppm
hpatches-sequences-release/i_partyfood/4.ppm
hpatches-sequences-release/i_partyfood/5.ppm
hpatches-sequences-release/i_partyfood/6.ppm
hpatches-sequences-release/i_steps/1.ppm
hpatches-sequences-release/i_steps/2.ppm
hpatches-sequences-release/i_steps/3.ppm
hpatches-sequences-release/i_steps/4.ppm
hpatches-sequences-release/i_steps/5.ppm
hpatches-sequences-release/i_steps/6.ppm
hpatches-sequences-release/i_school/1.ppm
hpatches-sequences-release/i_school/2.ppm
hpatches-sequences-release/i_school/3.ppm
hpatches-sequences-release/i_school/4.ppm
hpatches-sequences-release/i_school/5.ppm
hpatches-sequences-release/i_school/6.ppm
hpatches-sequences-release/i_ktirio/1.ppm
hpatches-sequences-release/i_ktirio/2.ppm
hpatches-sequences-release/i_ktirio/3.ppm
hpatches-sequences-release/i_ktirio/4.ppm
hpatches-sequences-release/i_ktirio/5.ppm
hpatches-sequences-release/i_ktirio/6.ppm
hpatches-sequences-release/i_chestnuts/1.ppm
hpatches-sequences-release/i_chestnuts/2.ppm
hpatches-sequences-release/i_chestnuts/3.ppm
hpatches-sequences-release/i_chestnuts/4.ppm
hpatches-sequences-release/i_chestnuts/5.ppm
hpatches-sequences-release/i_chestnuts/6.ppm
hpatches-sequences-release/i_indiana/1.ppm
hpatches-sequences-release/i_indiana/2.ppm
hpatches-sequences-release/i_indiana/3.ppm
hpatches-sequences-release/i_indiana/4.ppm
hpatches-sequences-release/i_indiana/5.ppm
hpatches-sequences-release/i_indiana/6.ppm
hpatches-sequences-release/i_nescafe/1.ppm
hpatches-sequences-release/i_nescafe/2.ppm
hpatches-sequences-release/i_nescafe/3.ppm
hpatches-sequences-release/i_nescafe/4.ppm
hpatches-sequences-release/i_nescafe/5.ppm
hpatches-sequences-release/i_nescafe/6.ppm
hpatches-sequences-release/i_porta/1.ppm
hpatches-sequences-release/i_porta/2.ppm
hpatches-sequences-release/i_porta/3.ppm
hpatches-sequences-release/i_porta/4.ppm
hpatches-sequences-release/i_porta/5.ppm
hpatches-sequences-release/i_porta/6.ppm
hpatches-sequences-release/i_duda/1.ppm
hpatches-sequences-release/i_duda/2.ppm
hpatches-sequences-release/i_duda/3.ppm
hpatches-sequences-release/i_duda/4.ppm
hpatches-sequences-release/i_duda/5.ppm
hpatches-sequences-release/i_duda/6.ppm
hpatches-sequences-release/i_lionnight/1.ppm
hpatches-sequences-release/i_lionnight/2.ppm
hpatches-sequences-release/i_lionnight/3.ppm
hpatches-sequences-release/i_lionnight/4.ppm
hpatches-sequences-release/i_lionnight/5.ppm
hpatches-sequences-release/i_lionnight/6.ppm
hpatches-sequences-release/i_melon/1.ppm
hpatches-sequences-release/i_melon/2.ppm
hpatches-sequences-release/i_melon/3.ppm
hpatches-sequences-release/i_melon/4.ppm
hpatches-sequences-release/i_melon/5.ppm
hpatches-sequences-release/i_melon/6.ppm
hpatches-sequences-release/i_bridger/1.ppm
hpatches-sequences-release/i_bridger/2.ppm
hpatches-sequences-release/i_bridger/3.ppm
hpatches-sequences-release/i_bridger/4.ppm
hpatches-sequences-release/i_bridger/5.ppm
hpatches-sequences-release/i_bridger/6.ppm
hpatches-sequences-release/i_greentea/1.ppm
hpatches-sequences-release/i_greentea/2.ppm
hpatches-sequences-release/i_greentea/3.ppm
hpatches-sequences-release/i_greentea/4.ppm
hpatches-sequences-release/i_greentea/5.ppm
hpatches-sequences-release/i_greentea/6.ppm
hpatches-sequences-release/i_greenhouse/1.ppm
hpatches-sequences-release/i_greenhouse/2.ppm
hpatches-sequences-release/i_greenhouse/3.ppm
hpatches-sequences-release/i_greenhouse/4.ppm
hpatches-sequences-release/i_greenhouse/5.ppm
hpatches-sequences-release/i_greenhouse/6.ppm
hpatches-sequences-release/i_pencils/1.ppm
hpatches-sequences-release/i_pencils/2.ppm
hpatches-sequences-release/i_pencils/3.ppm
hpatches-sequences-release/i_pencils/4.ppm
hpatches-sequences-release/i_pencils/5.ppm
hpatches-sequences-release/i_pencils/6.ppm
hpatches-sequences-release/i_pool/1.ppm
hpatches-sequences-release/i_pool/2.ppm
hpatches-sequences-release/i_pool/3.ppm
hpatches-sequences-release/i_pool/4.ppm
hpatches-sequences-release/i_pool/5.ppm
hpatches-sequences-release/i_pool/6.ppm
hpatches-sequences-release/i_bologna/1.ppm
hpatches-sequences-release/i_bologna/2.ppm
hpatches-sequences-release/i_bologna/3.ppm
hpatches-sequences-release/i_bologna/4.ppm
hpatches-sequences-release/i_bologna/5.ppm
hpatches-sequences-release/i_bologna/6.ppm
hpatches-sequences-release/i_table/1.ppm
hpatches-sequences-release/i_table/2.ppm
hpatches-sequences-release/i_table/3.ppm
hpatches-sequences-release/i_table/4.ppm
hpatches-sequences-release/i_table/5.ppm
hpatches-sequences-release/i_table/6.ppm
hpatches-sequences-release/i_smurf/1.ppm
hpatches-sequences-release/i_smurf/2.ppm
hpatches-sequences-release/i_smurf/3.ppm
hpatches-sequences-release/i_smurf/4.ppm
hpatches-sequences-release/i_smurf/5.ppm
hpatches-sequences-release/i_smurf/6.ppm
hpatches-sequences-release/i_troulos/1.ppm
hpatches-sequences-release/i_troulos/2.ppm
hpatches-sequences-release/i_troulos/3.ppm
hpatches-sequences-release/i_troulos/4.ppm
hpatches-sequences-release/i_troulos/5.ppm
hpatches-sequences-release/i_troulos/6.ppm
hpatches-sequences-release/i_boutique/1.ppm
hpatches-sequences-release/i_boutique/2.ppm
hpatches-sequences-release/i_boutique/3.ppm
hpatches-sequences-release/i_boutique/4.ppm
hpatches-sequences-release/i_boutique/5.ppm
hpatches-sequences-release/i_boutique/6.ppm
hpatches-sequences-release/i_dc/1.ppm
hpatches-sequences-release/i_dc/2.ppm
hpatches-sequences-release/i_dc/3.ppm
hpatches-sequences-release/i_dc/4.ppm
hpatches-sequences-release/i_dc/5.ppm
hpatches-sequences-release/i_dc/6.ppm
hpatches-sequences-release/i_whitebuilding/1.ppm
hpatches-sequences-release/i_whitebuilding/2.ppm
hpatches-sequences-release/i_whitebuilding/3.ppm
hpatches-sequences-release/i_whitebuilding/4.ppm
hpatches-sequences-release/i_whitebuilding/5.ppm
hpatches-sequences-release/i_whitebuilding/6.ppm
================================================
FILE: HSequences_bench/splits.json
================================================
{"a": {"test": ["i_ajuntament", "i_resort", "i_table", "i_troulos", "i_bologna", "i_lionnight", "i_porta", "i_zion", "i_brooklyn", "i_fruits", "i_books", "i_bridger", "i_whitebuilding", "i_kurhaus", "i_salon", "i_autannes", "i_tools", "i_santuario", "i_fog", "i_nijmegen", "v_courses", "v_coffeehouse", "v_abstract", "v_feast", "v_woman", "v_talent", "v_tabletop", "v_bees", "v_strand", "v_fest", "v_yard", "v_underground", "v_azzola", "v_eastsouth", "v_yuri", "v_soldiers", "v_man", "v_pomegranate", "v_birdwoman", "v_busstop"], "train": ["v_there", "i_yellowtent", "i_boutique", "v_wapping", "i_leuven", "i_school", "i_crownnight", "v_artisans", "v_colors", "i_ski", "v_circus", "v_tempera", "v_london", "v_war", "i_parking", "v_bark", "v_charing", "i_indiana", "v_weapons", "v_wormhole", "v_maskedman", "v_dirtywall", "v_wall", "v_vitro", "i_nuts", "i_londonbridge", "i_pool", "i_pinard", "i_greentea", "v_calder", "i_lionday", "i_crownday", "i_kions", "v_posters", "i_dome", "v_machines", "v_laptop", "v_boat", "v_churchill", "i_pencils", "v_beyus", "v_sunseason", "v_samples", "v_cartooncity", "v_gardens", "v_bip", "v_home", "i_veggies", "i_nescafe", "v_wounded", "i_toy", "v_dogman", "i_duda", "i_contruction", "v_graffiti", "i_gonnenberg", "v_astronautis", "i_ktirio", "i_castle", "i_greenhouse", "i_fenis", "i_partyfood", "v_adam", "v_apprentices", "v_blueprint", "i_smurf", "i_objects", "v_bird", "i_melon", "v_grace", "i_miniature", "v_bricks", "i_chestnuts", "i_village", "i_steps", "i_dc"], "name": "a"}, "c": {"test": ["i_ski", "i_table", "i_troulos", "i_melon", "i_tools", "i_kions", "i_londonbridge", "i_nijmegen", "i_boutique", "i_parking", "i_steps", "i_fog", "i_leuven", "i_dc", "i_partyfood", "i_pool", "i_castle", "i_bologna", "i_smurf", "i_crownnight", "v_azzola", "v_tempera", "v_machines", "v_coffeehouse", "v_graffiti", "v_artisans", "v_maskedman", "v_talent", "v_bees", "v_dirtywall", "v_blueprint", "v_war", "v_adam", "v_pomegranate", "v_busstop", "v_weapons", "v_gardens", "v_feast", "v_man", "v_wounded"], "train": ["v_there", "i_yellowtent", "i_whitebuilding", "v_wapping", "v_laptop", "i_school", "v_calder", "i_duda", "v_circus", "i_porta", "v_home", "i_lionnight", "i_chestnuts", "v_abstract", "v_soldiers", "i_contruction", "v_charing", "i_indiana", "v_strand", "v_fest", "v_yuri", "v_wormhole", "v_eastsouth", "i_autannes", "v_colors", "v_wall", "v_vitro", "i_nuts", "i_pinard", "v_tabletop", "i_brooklyn", "i_lionday", "i_crownday", "v_bip", "v_posters", "v_underground", "i_dome", "v_grace", "i_ajuntament", "v_cartooncity", "v_boat", "v_churchill", "i_pencils", "v_beyus", "v_sunseason", "v_samples", "i_kurhaus", "i_santuario", "i_resort", "i_zion", "i_veggies", "i_nescafe", "i_toy", "v_dogman", "i_books", "v_courses", "v_birdwoman", "v_yard", "i_salon", "i_gonnenberg", "v_astronautis", "i_ktirio", "i_bridger", "i_greenhouse", "i_fenis", "v_woman", "v_bricks", "v_apprentices", "i_greentea", "i_objects", "v_bird", "v_london", "i_fruits", "i_miniature", "i_village", "v_bark"], "name": "c"}, "b": {"test": ["i_fruits", "i_melon", "i_castle", "i_resort", "i_chestnuts", "i_kions", "i_kurhaus", "i_autannes", "i_duda", "i_partyfood", "i_ski", "i_dome", "i_greenhouse", "i_pencils", "i_porta", "i_lionday", "i_school", "i_bridger", "i_village", "i_fog", "v_astronautis", "v_bip", "v_charing", "v_woman", "v_feast", "v_yard", "v_churchill", "v_graffiti", "v_london", "v_sunseason", "v_posters", "v_bees", "v_apprentices", "v_birdwoman", "v_colors", "v_laptop", "v_there", "v_adam", "v_underground", "v_war"], "train": ["v_wormhole", "i_yellowtent", "i_boutique", "v_wapping", "i_leuven", "i_pinard", "i_crownnight", "v_artisans", "i_toy", "v_circus", "v_tempera", "i_lionnight", "i_parking", "v_soldiers", "i_contruction", "i_whitebuilding", "i_indiana", "v_azzola", "v_weapons", "v_fest", "v_yuri", "v_dirtywall", "v_eastsouth", "v_man", "v_wall", "v_vitro", "i_nuts", "i_londonbridge", "i_pool", "v_tabletop", "i_greentea", "i_brooklyn", "i_smurf", "v_cartooncity", "v_wounded", "v_calder", "v_coffeehouse", "v_grace", "v_machines", "i_tools", "v_boat", "v_beyus", "v_strand", "i_santuario", "i_crownday", "v_bark", "i_veggies", "i_nescafe", "v_maskedman", "v_abstract", "v_talent", "i_books", "i_table", "v_courses", "i_nijmegen", "i_salon", "i_gonnenberg", "v_samples", "i_ktirio", "v_gardens", "i_zion", "v_pomegranate", "i_fenis", "v_home", "i_ajuntament", "i_objects", "v_bird", "v_dogman", "i_troulos", "i_miniature", "v_bricks", "i_bologna", "v_busstop", "v_blueprint", "i_steps", "i_dc"], "name": "b"}, "illum_test": {"test": ["i_crownnight", "i_table", "i_objects", "i_nescafe", "i_nijmegen", "i_whitebuilding", "i_porta", "i_santuario", "i_dc", "i_castle", "i_steps", "i_contruction", "i_melon", "i_miniature", "i_troulos", "i_veggies", "i_zion", "i_gonnenberg", "i_autannes", "i_boutique", "i_fruits", "i_pool", "i_fog", "i_fenis", "i_ajuntament", "i_partyfood", "i_kurhaus", "i_school", "i_chestnuts", "i_smurf", "i_indiana", "i_pinard", "i_lionnight", "i_kions", "i_ski", "i_greenhouse", "i_ktirio", "i_tools", "i_toy", "i_bridger", "i_lionday", "i_brooklyn", "i_londonbridge", "i_greentea", "i_leuven", "i_nuts", "i_resort", "i_bologna", "i_duda", "i_dome", "i_pencils", "i_books", "i_parking", "i_salon"], "name": "illum_test"}, "illum": {"test": ["i_crownnight", "i_table", "i_objects", "i_nescafe", "i_nijmegen", "i_whitebuilding", "i_porta", "i_santuario", "i_dc", "i_castle", "i_steps", "i_contruction", "i_melon", "i_yellowtent", "i_miniature", "i_troulos", "i_veggies", "i_zion", "i_gonnenberg", "i_autannes", "i_boutique", "i_fruits", "i_pool", "i_fog", "i_fenis", "i_village", "i_ajuntament", "i_partyfood", "i_kurhaus", "i_school", "i_chestnuts", "i_smurf", "i_indiana", "i_pinard", "i_lionnight", "i_kions", "i_ski", "i_greenhouse", "i_ktirio", "i_tools", "i_toy", "i_bridger", "i_lionday", "i_brooklyn", "i_crownday", "i_londonbridge", "i_greentea", "i_leuven", "i_nuts", "i_resort", "i_bologna", "i_duda", "i_dome", "i_pencils", "i_books", "i_parking", "i_salon"], "name": "illum"}, "full": {"test": ["i_crownnight", "i_table", "i_objects", "i_nescafe", "i_nijmegen", "i_whitebuilding", "i_porta", "i_santuario", "i_dc", "i_castle", "i_steps", "i_contruction", "i_melon", "i_yellowtent", "i_miniature", "i_troulos", "i_veggies", "i_zion", "i_gonnenberg", "i_autannes", "i_boutique", "i_fruits", "i_pool", "i_fog", "i_fenis", "i_village", "i_ajuntament", "i_partyfood", "i_kurhaus", "i_school", "i_chestnuts", "i_smurf", "i_indiana", "i_pinard", "i_lionnight", "i_kions", "i_ski", "i_greenhouse", "i_ktirio", "i_tools", "i_toy", "i_bridger", "i_lionday", "i_brooklyn", "i_crownday", "i_londonbridge", "i_greentea", "i_leuven", "i_nuts", "i_resort", "i_bologna", "i_duda", "i_dome", "i_pencils", "i_books", "i_parking", "i_salon", "v_circus", "v_charing", "v_colors", "v_astronautis", "v_maskedman", "v_talent", "v_london", "v_underground", "v_coffeehouse", "v_calder", "v_grace", "v_yard", "v_dogman", "v_laptop", "v_eastsouth", "v_boat", "v_strand", "v_busstop", "v_artisans", "v_machines", "v_soldiers", "v_home", "v_wapping", "v_wounded", "v_weapons", "v_adam", "v_there", "v_vitro", "v_cartooncity", "v_abstract", "v_dirtywall", "v_beyus", "v_apprentices", "v_sunseason", "v_wall", "v_war", "v_bricks", "v_fest", "v_churchill", "v_blueprint", "v_tempera", "v_samples", "v_man", "v_bees", "v_pomegranate", "v_bip", "v_feast", "v_azzola", "v_woman", "v_yuri", "v_posters", "v_bird", "v_graffiti", "v_bark", "v_wormhole", "v_tabletop", "v_courses", "v_birdwoman", "v_gardens"], "name": "full"}, "view_test": {"test": ["v_circus", "v_charing", "v_colors", "v_astronautis", "v_maskedman", "v_talent", "v_london", "v_underground", "v_coffeehouse", "v_calder", "v_grace", "v_yard", "v_dogman", "v_laptop", "v_boat", "v_strand", "v_busstop", "v_machines", "v_soldiers", "v_home", "v_wapping", "v_wounded", "v_weapons", "v_adam", "v_there", "v_vitro", "v_cartooncity", "v_abstract", "v_dirtywall", "v_beyus", "v_apprentices", "v_sunseason", "v_wall", "v_war", "v_bricks", "v_fest", "v_churchill", "v_blueprint", "v_tempera", "v_samples", "v_man", "v_bees", "v_pomegranate", "v_bip", "v_feast", "v_azzola", "v_woman", "v_yuri", "v_posters", "v_bird", "v_graffiti", "v_bark", "v_wormhole", "v_tabletop", "v_courses", "v_gardens"], "name": "view_test"}, "view": {"test": ["v_circus", "v_charing", "v_colors", "v_astronautis", "v_maskedman", "v_talent", "v_london", "v_underground", "v_coffeehouse", "v_calder", "v_grace", "v_yard", "v_dogman", "v_laptop", "v_eastsouth", "v_boat", "v_strand", "v_busstop", "v_artisans", "v_machines", "v_soldiers", "v_home", "v_wapping", "v_wounded", "v_weapons", "v_adam", "v_there", "v_vitro", "v_cartooncity", "v_abstract", "v_dirtywall", "v_beyus", "v_apprentices", "v_sunseason", "v_wall", "v_war", "v_bricks", "v_fest", "v_churchill", "v_blueprint", "v_tempera", "v_samples", "v_man", "v_bees", "v_pomegranate", "v_bip", "v_feast", "v_azzola", "v_woman", "v_yuri", "v_posters", "v_bird", "v_graffiti", "v_bark", "v_wormhole", "v_tabletop", "v_courses", "v_birdwoman", "v_gardens"], "name": "view"}, "debug_view": {"test": ["v_wormhole", "v_tabletop", "v_courses", "v_birdwoman", "v_gardens"], "name": "debug_view"}, "debug_illum": {"test": ["i_ajuntament", "i_resort", "i_table", "i_troulos", "i_bologna"], "name": "debug_illum"}}
================================================
FILE: HSequences_bench/tools/HSequences_reader.py
================================================
import os
import json
import numpy as np
from skimage import io
class HSequences_dataset(object):
def __init__(self, dataset_path, split, split_path):
self.dataset_path = dataset_path
self.split = split
self.splits = json.load(open(split_path))
self.sequences = self.splits[self.split]['test']
self.count = 0
def read_image(self, path):
im = io.imread(path, as_gray=True)
return im.reshape(im.shape[0], im.shape[1], 1)
def read_homography(self, h_name):
h = np.zeros((3, 3))
h_file = open(h_name, 'r')
# Prepare Homography
for j in range(3):
line = h_file.readline()
line = str.split(line);
for i in range(3):
h[j, i] = float(line[i])
inv_h = np.linalg.inv(h)
inv_h = inv_h / inv_h[2, 2]
return h, inv_h
def get_sequence(self, folder_id):
images_dst = []
h_src_2_dst = []
h_dst_2_src = []
sequence_path = os.path.join(self.dataset_path, self.sequences[folder_id])
name_image_src_path = sequence_path + '/1.ppm'
im_src = self.read_image(name_image_src_path)
im_src = im_src.astype(float) / im_src.max()
for i in range(5):
name_image_dst_path = sequence_path + '/' + str(i+2) + '.ppm'
dst = self.read_image(name_image_dst_path)
dst = dst.astype(float) / dst.max()
images_dst.append(dst)
homography_path = sequence_path + '/H_1_'+str(i+2)
src_2_dst, dst_2_src = self.read_homography(homography_path)
h_src_2_dst.append(src_2_dst)
h_dst_2_src.append(dst_2_src)
images_dst = np.asarray(images_dst)
h_src_2_dst = np.asarray(h_src_2_dst)
h_dst_2_src = np.asarray(h_dst_2_src)
return {'im_src': im_src, 'images_dst': images_dst, 'h_src_2_dst': h_src_2_dst, 'h_dst_2_src': h_dst_2_src,
'sequence_name': self.sequences[folder_id]}
def extract_hsequences(self):
for idx_sequence in range(len(self.sequences)):
yield self.get_sequence(idx_sequence)
================================================
FILE: HSequences_bench/tools/aux_tools.py
================================================
from os import path, mkdir
import numpy as np
import cv2
def convert_opencv_matches_to_numpy(matches):
"""Returns a np.ndarray array with points indices correspondences
with the shape of Nx2 which each N feature is a vector containing
the keypoints id [id_ref, id_dst].
"""
assert isinstance(matches, list), type(matches)
correspondences = []
for match in matches:
assert isinstance(match, cv2.DMatch), type(match)
correspondences.append([match.queryIdx, match.trainIdx])
return np.asarray(correspondences)
def create_results():
return {
'num_features': [],
'rep_single_scale': [],
'rep_multi_scale': [],
'num_points_single_scale': [],
'num_points_multi_scale': [],
'error_overlap_single_scale': [],
'error_overlap_multi_scale': [],
'mma': [],
'mma_corr': [],
'num_matches': [],
'num_mutual_corresp': [],
'avg_mma': [],
'num_matches': [],
}
def create_overlapping_results(detector_name, overlap):
results = create_results()
results['detector'] = detector_name
results['overlap'] = overlap
return results
def check_directory(dir):
if not path.isdir(dir):
mkdir(dir)
def convert_openCV_to_np(pts, dsc, order_coord):
for idx, kp in enumerate(pts):
if order_coord == 'xysr':
kp_np = np.asarray([kp.pt[0], kp.pt[1], kp.size, kp.response, kp.angle])
else:
kp_np = np.asarray([kp.pt[1], kp.pt[0], kp.size, kp.response, kp.angle])
dsc_np = np.asarray(dsc[idx], np.uint8).flatten()
if idx == 0:
kps_np = kp_np
dscs_np = dsc_np
else:
kps_np = np.vstack([kps_np, kp_np])
dscs_np = np.vstack([dscs_np, dsc_np])
return kps_np, dscs_np
================================================
FILE: HSequences_bench/tools/geometry_tools.py
================================================
import numpy as np
from cv2 import warpPerspective as applyH
def remove_borders(image, borders):
shape = image.shape
new_im = np.zeros_like(image)
if len(shape) == 4:
shape = [shape[1], shape[2], shape[3]]
new_im[:, borders:shape[0]-borders, borders:shape[1]-borders, :] = image[:, borders:shape[0]-borders, borders:shape[1]-borders, :]
elif len(shape) == 3:
new_im[borders:shape[0] - borders, borders:shape[1] - borders, :] = image[borders:shape[0] - borders,
borders:shape[1] - borders, :]
else:
new_im[borders:shape[0] - borders, borders:shape[1] - borders] = image[borders:shape[0] - borders,
borders:shape[1] - borders]
return new_im
def create_common_region_masks(h_dst_2_src, shape_src, shape_dst):
# Create mask. Only take into account pixels in the two images
inv_h = np.linalg.inv(h_dst_2_src)
inv_h = inv_h / inv_h[2, 2]
# Applies mask to destination. Where there is no 1, we can no find a point in source.
ones_dst = np.ones((shape_dst[0], shape_dst[1]))
ones_dst = remove_borders(ones_dst, borders=15)
mask_src = applyH(ones_dst, h_dst_2_src, (shape_src[1], shape_src[0]))
mask_src = np.where(mask_src >= 0.75, 1.0, 0.0)
mask_src = remove_borders(mask_src, borders=15)
ones_src = np.ones((shape_src[0], shape_src[1]))
ones_src = remove_borders(ones_src, borders=15)
mask_dst = applyH(ones_src, inv_h, (shape_dst[1], shape_dst[0]))
mask_dst = np.where(mask_dst >= 0.75, 1.0, 0.0)
mask_dst = remove_borders(mask_dst, borders=15)
return mask_src, mask_dst
def prepare_homography(hom):
h = np.zeros((3, 3))
# Prepare Homography
for j in range(3):
for i in range(3):
if j == 2 and i == 2:
h[j, i] = 1.
else:
h[j, i] = hom[j * 3 + i]
return h
def apply_homography_to_points(points, h):
new_points = []
for point in points:
new_point = h.dot([point[0], point[1], 1.0])
tmp = point[2]**2+np.finfo(np.float32).eps
Mi1 = [[1/tmp, 0], [0, 1/tmp]]
Mi1_inv = np.linalg.inv(Mi1)
Aff = getAff(point[0], point[1], h)
BMB = np.linalg.inv(np.dot(Aff, np.dot(Mi1_inv, np.matrix.transpose(Aff))))
[e, _] = np.linalg.eig(BMB)
new_radious = 1/((e[0] * e[1])**0.5)**0.5
new_point = [new_point[0] / new_point[2], new_point[1] / new_point[2], new_radious, point[3]]
new_points.append(new_point)
return np.asarray(new_points)
def getAff(x,y,H):
h11 = H[0,0]
h12 = H[0,1]
h13 = H[0,2]
h21 = H[1,0]
h22 = H[1,1]
h23 = H[1,2]
h31 = H[2,0]
h32 = H[2,1]
h33 = H[2,2]
fxdx = h11 / (h31 * x + h32 * y + h33) - (h11 * x + h12 * y + h13) * h31 / (h31 * x + h32 * y + h33) ** 2
fxdy = h12 / (h31 * x + h32 * y + h33) - (h11 * x + h12 * y + h13) * h32 / (h31 * x + h32 * y + h33) ** 2
fydx = h21 / (h31 * x + h32 * y + h33) - (h21 * x + h22 * y + h23) * h31 / (h31 * x + h32 * y + h33) ** 2
fydy = h22 / (h31 * x + h32 * y + h33) - (h21 * x + h22 * y + h23) * h32 / (h31 * x + h32 * y + h33) ** 2
Aff = [[fxdx, fxdy], [fydx, fydy]]
return np.asarray(Aff)
def find_index_higher_scores(map, num_points = 1000, threshold = -1):
# Best n points
if threshold == -1:
flatten = map.flatten()
order_array = np.sort(flatten)
order_array = np.flip(order_array, axis=0)
threshold = order_array[num_points-1]
if threshold <= 0.0:
indexes = np.argwhere(order_array > 0.0)
if len(indexes) == 0:
threshold = 0.0
else:
threshold = order_array[indexes[len(indexes)-1]]
# elif threshold == 0.0:
# threshold = order_array[np.nonzero(order_array)].min()
indexes = np.argwhere(map >= threshold)
return indexes[:num_points]
def get_point_coordinates(map, scale_value=1., num_points=1000, threshold=-1, order_coord='xysr'):
indexes = find_index_higher_scores(map, num_points=num_points, threshold=threshold)
new_indexes = []
for ind in indexes:
scores = map[ind[0], ind[1]]
if order_coord == 'xysr':
tmp = [ind[1], ind[0], scale_value, scores]
elif order_coord == 'yxsr':
tmp = [ind[0], ind[1], scale_value, scores]
new_indexes.append(tmp)
indexes = np.asarray(new_indexes)
return np.asarray(indexes)
def get_point_coordinates3D(map, scale_factor=1., up_levels=0, num_points=1000, threshold=-1, order_coord='xysr'):
indexes = find_index_higher_scores(map, num_points=num_points, threshold=threshold)
new_indexes = []
for ind in indexes:
scale_value = (scale_factor ** (ind[2] - up_levels))
scores = map[ind[0], ind[1], ind[2]]
if order_coord == 'xysr':
tmp = [ind[1], ind[0], scale_value, scores]
elif order_coord == 'yxsr':
tmp = [ind[0], ind[1], scale_value, scores]
new_indexes.append(tmp)
indexes = np.asarray(new_indexes)
return np.asarray(indexes)
================================================
FILE: HSequences_bench/tools/matching_tools.py
================================================
import numpy as np
def create_precision_recall_results():
return {
'recall': 0.0,
'precision': 0.0,
'correct_matches': [],
'false_matches': [],
}
# retrieve the true correspondences
def compute_matching_based_distance(points_src, points_dst, matches, num_points, pixel_threshold, possible_matches):
dist = np.sqrt((points_src[matches[:, 0], 0] - points_dst[matches[:, 1], 0]) ** 2 + (
points_src[matches[:, 0], 1] - points_dst[matches[:, 1], 1]) ** 2)
matches_dist = np.sum(np.where(dist < pixel_threshold, 1.0, 0.0))
match_score = matches_dist / num_points
match_score_corr = matches_dist / (possible_matches+1e-6)
return match_score, match_score_corr, matches_dist
def compute_precision_recall(matches, true_matches, num_points, eps=1e-6):
results = create_precision_recall_results()
if len(true_matches) == 0:
return results
num_correct_matches, num_false_matches = 0.0, 0.0
for match in matches:
found_match = np.where(match[0] == true_matches[:, 0]) == \
np.where(match[1] == true_matches[:, 1])
if found_match:
num_correct_matches += 1
results['correct_matches'].append(match)
else:
num_false_matches += 1
results['false_matches'].append(match)
# stack matches
results['correct_matches'] = np.array(results['correct_matches'])
results['false_matches'] = np.array(results['false_matches'])
# compute the actual statistics
num_correspondences = true_matches.shape[0] + eps
sum_matches = num_correct_matches + num_false_matches + eps
# return a dictionary with all the results
results['recall'] = num_correct_matches / num_correspondences
results['recall_total'] = num_correct_matches / num_points
results['precision'] = 1. - num_false_matches / sum_matches
return results
# find matches
def find_matches(dsc_src, dsc_dst):
dsc_src = np.reshape(dsc_src, (dsc_src.shape[0], 1, 128))
dsc_dst = np.reshape(dsc_dst, (1, dsc_dst.shape[0], 128))
dsc_src = np.repeat(dsc_src, dsc_dst.shape[1], axis=1)
dsc_dst = np.repeat(dsc_dst, dsc_src.shape[0], axis=0)
l2_matrix = np.sum((dsc_src - dsc_dst)**2, axis=-1)
matches = l2_matrix.argmin(axis=1)
return [np.arange(len(dsc_src)), matches]
================================================
FILE: HSequences_bench/tools/opencv_matcher.py
================================================
import cv2
import numpy as np
class OpencvBruteForceMatcher(object):
name = 'opencv_brute_force_matcher'
distances = {}
distances['l2'] = cv2.NORM_L2
distances['hamming'] = cv2.NORM_HAMMING
def __init__(self, distance='l2'):
self._matcher = cv2.BFMatcher(self.distances[distance])
def match(self, descs1, descs2):
"""Compute brute force matches between two sets of descriptors.
"""
assert isinstance(descs1, np.ndarray), type(descs1)
assert isinstance(descs2, np.ndarray), type(descs2)
assert len(descs1.shape) == 2, descs1.shape
assert len(descs2.shape) == 2, descs2.shape
matches = self._matcher.match(descs1, descs2)
return matches
def match_putative(self, descs1, descs2, knn=2, threshold_ratio=0.7):
"""Compute putatives matches betweem two sets of descriptors.
"""
assert isinstance(descs1, np.ndarray), type(descs1)
assert isinstance(descs2, np.ndarray), type(descs2)
assert len(descs1.shape) == 2, descs1.shape
assert len(descs2.shape) == 2, descs2.shape
matches = self._matcher.knnMatch(descs1, descs2, k=knn)
# apply Lowe's ratio test
good = []
for m, n in matches:
if m.distance < threshold_ratio * n.distance:
good.append(m)
return good
def convert_opencv_matches_to_numpy(self, matches):
"""Returns a np.ndarray array with points indices correspondences
with the shape of Nx2 which each N feature is a vector containing
the keypoints id [id_ref, id_dst].
"""
assert isinstance(matches, list), type(matches)
correspondences = []
for match in matches:
assert isinstance(match, cv2.DMatch), type(match)
correspondences.append([match.queryIdx, match.trainIdx])
return np.asarray(correspondences)
================================================
FILE: HSequences_bench/tools/repeatability_tools.py
================================================
import numpy as np
from scipy.ndimage.filters import maximum_filter
def check_common_points(kpts, mask):
idx_valid_points = []
for idx, ktp in enumerate(kpts):
if mask[int(round(ktp[0])), int(round(ktp[1]))]:
idx_valid_points.append(idx)
return np.asarray(idx_valid_points)
def select_top_k(kpts, k=1000):
scores = -1 * kpts[:, 3]
return np.argsort(scores)[:k]
def apply_nms(score_map, size):
score_map = score_map * (score_map == maximum_filter(score_map, footprint=np.ones((size, size))))
return score_map
def intersection_area(R, r, d = 0):
"""Return the area of intersection of two circles.
The circles have radii R and r, and their centres are separated by d.
"""
if d <= abs(R-r):
# One circle is entirely enclosed in the other.
return np.pi * min(R, r)**2
if d >= r + R:
# The circles don't overlap at all.
return 0
r2, R2, d2 = r**2, R**2, d**2
alpha = np.arccos((d2 + r2 - R2) / (2*d*r))
beta = np.arccos((d2 + R2 - r2) / (2*d*R))
return ( r2 * alpha + R2 * beta - 0.5 * (r2 * np.sin(2*alpha) + R2 * np.sin(2*beta)))
def union_area(r, R, intersection):
return (np.pi * (r ** 2)) + (np.pi * (R ** 2)) - intersection
def compute_repeatability(src_indexes, dst_indexes, overlap_err=0.4, eps=1e-6, dist_match_thresh=3, radious_size=30.):
error_overlap_s = 0.
error_overlap_m = 0.
found_points_s = 0
found_points_m = 0
possible_matches = 0
correspondences = []
correspondences_m = []
dst_indexes_num = len(dst_indexes)
src_indexes_num = len(src_indexes)
matrix_overlaps = np.zeros((len(src_indexes), len(dst_indexes)))
matrix_overlaps_single_scale = np.zeros((len(src_indexes), len(dst_indexes)))
max_distance = 4 * radious_size
for idx_ref, point_ref in enumerate(src_indexes):
radious_ref = point_ref[2]
found_possible_match = False
for idx_dst, point_dst in enumerate(dst_indexes):
radious_dst = point_dst[2]
distance = (((point_ref[0] - point_dst[0]) ** 2) + ((point_ref[1] - point_dst[1]) ** 2)) ** 0.5
if distance <= dist_match_thresh and not found_possible_match:
found_possible_match = True
possible_matches += 1
if distance > max_distance:
continue
factor_scale = radious_size / (max(radious_ref, radious_dst) + np.finfo(float).eps)
I = intersection_area(factor_scale*radious_ref, factor_scale*radious_dst, distance)
U = union_area(factor_scale*radious_ref, factor_scale*radious_dst, I) + eps
matrix_overlaps[idx_ref, idx_dst] = I/U
I = intersection_area(radious_size, radious_size, distance)
U = union_area(radious_size, radious_size, I) + eps
matrix_overlaps_single_scale[idx_ref, idx_dst] = I/U
y_visited = np.zeros(src_indexes.shape[0], dtype=np.uint8)
x_visited = np.zeros(dst_indexes.shape[0], dtype=np.uint8)
# Multiply matrix to get descendent order
for index in (-1 * matrix_overlaps_single_scale).flatten().argsort():
y_pos = index // dst_indexes.shape[0]
x_pos = index % dst_indexes.shape[0]
if x_visited[x_pos] or y_visited[y_pos]:
continue
max_overlap = matrix_overlaps_single_scale[y_pos, x_pos]
if max_overlap < (1 - overlap_err):
break
found_points_s += 1
error_overlap_s += (1 - max_overlap)
correspondences.append([x_pos, y_pos])
# update visited cells
x_visited[x_pos] = 1
y_visited[y_pos] = 1
matrix_overlaps_single_scale = 0
del matrix_overlaps_single_scale
y_visited = np.zeros(src_indexes.shape[0], dtype=np.uint8)
x_visited = np.zeros(dst_indexes.shape[0], dtype=np.uint8)
# Multiply matrix to get descendent order
for index in (-1 * matrix_overlaps).flatten().argsort():
y_pos = index // dst_indexes.shape[0]
x_pos = index % dst_indexes.shape[0]
if x_visited[x_pos] or y_visited[y_pos]:
continue
max_overlap = matrix_overlaps[y_pos, x_pos]
if max_overlap < (1 - overlap_err):
break
found_points_m += 1
error_overlap_m += (1 - max_overlap)
correspondences_m.append([x_pos, y_pos])
# update visited cells
x_visited[x_pos] = 1
y_visited[y_pos] = 1
matrix_overlaps = 0
del matrix_overlaps
points = dst_indexes_num
if src_indexes_num < points:
points = src_indexes_num
rep_s = (found_points_s / np.asarray(points, float)) * 100.0
rep_m = (found_points_m / np.asarray(points, float)) * 100.0
if found_points_m == 0:
error_overlap_m = 0.0
else:
error_overlap_m = error_overlap_m / float(found_points_m+np.finfo(float).eps)
if found_points_s == 0:
error_overlap_s = 0.0
else:
error_overlap_s = error_overlap_s / float(found_points_s+np.finfo(float).eps)
return {'rep_single_scale': rep_s, 'rep_multi_scale': rep_m, 'num_points_single_scale': found_points_s,
'num_points_multi_scale': found_points_m, 'error_overlap_single_scale': error_overlap_s,
'error_overlap_multi_scale': error_overlap_m, 'total_num_points': points,
'correspondences': np.asarray(correspondences), 'possible_matches': possible_matches,
'correspondences_m': np.asarray(correspondences_m)}
================================================
FILE: LICENSE
================================================
The Clear BSD License
Copyright (c) 2019 Axel Barroso-Laguna
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted (subject to the limitations in the disclaimer
below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the names of the copyright holders nor the names of the
contributors nor the names of their institutions may be used to endorse
or promote products derived from this software without specific prior
written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY
THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
================================================
FILE: README.md
================================================
# Key.Net: Keypoint Detection by Handcrafted and Learned CNN Filters
Code for the ICCV19 paper:
```text
"Key.Net: Keypoint Detection by Handcrafted and Learned CNN Filters".
Axel Barroso-Laguna, Edgar Riba, Daniel Ponsa, Krystian Mikolajczyk. ICCV 2019.
```
[[Paper on arxiv](https://arxiv.org/abs/1904.00889)]
## Update on December 8 2021
We have created a repository with a Key.Net version implemented in PyTorch. Refer to [our new repo](https://github.com/axelBarroso/Key.Net-Pytorch) for more details.
## Update on March 20 2020
We have updated the descriptor part. Before, we were using a TensorFlow implementation of the HardNet descriptor, which we switched to the official [HardNet model in Pytorch](https://github.com/DagnyT/hardnet).
This change provides better results on the matching step, and thus, all that follows.
## Prerequisite
Python 3.7 is required for running Key.Net code. Use Conda to install the dependencies:
```bash
conda create --name keyNet_environment tensorflow-gpu=1.13.1
conda activate keyNet_environment
conda install -c conda-forge opencv tqdm
conda install -c conda-forge scikit-image
conda install pytorch==1.2.0 -c pytorch
```
## Feature Extraction
`extract_multiscale_features.py` can be used to extract Key.Net features for a given list of images. The list of images must contain the full path to them, if they do not exist, an error will raise.
The script generates two numpy files, one '.kpt' for keypoints, and a '.dsc' for descriptors. The descriptor used together with Key.Net is [HardNet](https://github.com/DagnyT/hardnet). The output format of the keypoints is as follow:
- `keypoints` [`N x 4`] array containing the positions of keypoints `x, y`, scales `s` and their scores `sc`.
Arguments:
* list-images: File containing the image paths for extracting features.
* results-dir: The output path to save the extracted features.
* checkpoint-det-dir: The path to the checkpoint file to load the detector weights. Default: Pretrained Key.Net.
* checkpoint-desc-dir: The path to the checkpoint file to load the HardNet descriptor weights.
* num-points: The number of desired features to extract. Default: 1500.
* extract-MS: Set to True if you want to extract multi-scale features. Default: True.
Run the following script to generate the keypoint and descriptor numpy files from the image allocated in `test_im` directory.
```bash
python extract_multiscale_features.py --list-images test_im/image.txt --results-dir test_im/
```
## HSequences Benchmark
We also provide the benchmark to compute [HSequences](https://github.com/hpatches/hpatches-dataset) repeatability (single- and multi-scale), and MMA metrics. To do so, first download full images (HSequences) from [HPatches repository](http://icvl.ee.ic.ac.uk/vbalnt/hpatches/hpatches-sequences-release.tar.gz). Once downloaded, place it on the root directory of the project. We provide a file `HSequences_bench/HPatches_images.txt` containing the list of images inside HSequences.
Run the next script to compute the features from HSequences:
```bash
python extract_multiscale_features.py --list-images HSequences_bench/HPatches_images.txt --results-dir extracted_features
```
Once all features have been extracted, to compute repeatability and MMA metrics run:
```bash
python hsequeces_bench.py --results-dir extracted_features --results-bench-dir HSequences_bench/results --split full
```
Use arguments to set different options:
* results-bench-dir: The output path to save the results in a pickle file.
* results-dir: The output path to load the extracted features.
* split: The name of the HPatches (HSequences) split. Use full, view or illum.
* top-k-points: The number of top points to use for evaluation. Set to None to use all points.
* pixel-threshold: The distance of pixels for a matching correspondence to be considered correct.
* overlap: The overlap threshold for a correspondence to be considered correct.
* detector-name: Set the name of the detector for which you desire to compute the benchmark (and features have been already extracted).
## Training Key.Net
Before training Key.Net a synthetic dataset must be generated. In our paper, we downloaded ImageNet and used it to generate synthetic pairs of images, however, any other dataset would work if it is big enough. Therefore, the first time you run the `train_network.py` script, two tfrecord will be generated, one for training and another for validation. This is only done when the code couldn't find them, thus, the next runs of the script will skip this part.
```bash
python train_network.py --data-dir /path/to/ImageNet --network-version KeyNet_default
```
Check the arguments to customize your training, some parameters you might want to change are:
* Dataset parameters:
* max-angle: The max angle value for generating a synthetic view to train Key.Net.
* max-scale: The max scale value for generating a synthetic view to train Key.Net.
* max-shearing: The max shearing value for generating a synthetic view to train Key.Net.
* Network Architecture:
* num-filters: The number of filters in each learnable block.
* num-learnable-blocks: The number of learnable blocks after handcrafted block.
* num-levels-within-net: The number of pyramid levels inside the architecture.
* factor-scaling-pyramid: The scale factor between the multi-scale pyramid levels in the architecture.
* conv-kernel-size: The size of the convolutional filters in each of the learnable blocks.
## BibTeX
If you use this code in your research, please cite our paper:
```bibtex
@InProceedings{Barroso-Laguna2019ICCV,
author = {Barroso-Laguna, Axel and Riba, Edgar and Ponsa, Daniel and Mikolajczyk, Krystian},
title = {{Key.Net: Keypoint Detection by Handcrafted and Learned CNN Filters}},
booktitle = {Proceedings of the 2019 IEEE/CVF International Conference on Computer Vision},
year = {2019},
}
================================================
FILE: extract_multiscale_features.py
================================================
import os, sys, cv2
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from os import path, mkdir
import argparse
import keyNet.aux.tools as aux
from skimage.transform import pyramid_gaussian
import HSequences_bench.tools.geometry_tools as geo_tools
import HSequences_bench.tools.repeatability_tools as rep_tools
from keyNet.model.keynet_architecture import *
import keyNet.aux.desc_aux_function as loss_desc
from keyNet.model.hardnet_pytorch import *
from keyNet.datasets.dataset_utils import read_bw_image
import torch
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
def check_directory(dir):
if not path.isdir(dir):
mkdir(dir)
def create_result_dir(path):
directories = path.split('/')
tmp = ''
for idx, dir in enumerate(directories):
tmp += (dir + '/')
if idx == len(directories)-1:
continue
check_directory(tmp)
def extract_multiscale_features():
parser = argparse.ArgumentParser(description='HSequences Extract Features')
parser.add_argument('--list-images', type=str, help='File containing the image paths for extracting features.',
required=True)
parser.add_argument('--results-dir', type=str, default='extracted_features/',
help='The output path to save the extracted keypoint.')
parser.add_argument('--network-version', type=str, default='KeyNet_default',
help='The Key.Net network version name')
parser.add_argument('--checkpoint-det-dir', type=str, default='keyNet/pretrained_nets/KeyNet_default',
help='The path to the checkpoint file to load the detector weights.')
parser.add_argument('--pytorch-hardnet-dir', type=str, default='keyNet/pretrained_nets/HardNet++.pth',
help='The path to the checkpoint file to load the HardNet descriptor weights.')
# Detector Settings
parser.add_argument('--num-filters', type=int, default=8,
help='The number of filters in each learnable block.')
parser.add_argument('--num-learnable-blocks', type=int, default=3,
help='The number of learnable blocks after handcrafted block.')
parser.add_argument('--num-levels-within-net', type=int, default=3,
help='The number of pyramid levels inside the architecture.')
parser.add_argument('--factor-scaling-pyramid', type=float, default=1.2,
help='The scale factor between the multi-scale pyramid levels in the architecture.')
parser.add_argument('--conv-kernel-size', type=int, default=5,
help='The size of the convolutional filters in each of the learnable blocks.')
# Multi-Scale Extractor Settings
parser.add_argument('--extract-MS', type=bool, default=True,
help='Set to True if you want to extract multi-scale features.')
parser.add_argument('--num-points', type=int, default=1500,
help='The number of desired features to extract.')
parser.add_argument('--nms-size', type=int, default=15,
help='The NMS size for computing the validation repeatability.')
parser.add_argument('--border-size', type=int, default=15,
help='The number of pixels to remove from the borders to compute the repeatability.')
parser.add_argument('--order-coord', type=str, default='xysr',
help='The coordinate order that follows the extracted points. Use yxsr or xysr.')
parser.add_argument('--random-seed', type=int, default=12345,
help='The random seed value for TensorFlow and Numpy.')
parser.add_argument('--pyramid_levels', type=int, default=5,
help='The number of downsample levels in the pyramid.')
parser.add_argument('--upsampled-levels', type=int, default=1,
help='The number of upsample levels in the pyramid.')
parser.add_argument('--scale-factor-levels', type=float, default=np.sqrt(2),
help='The scale factor between the pyramid levels.')
parser.add_argument('--scale-factor', type=float, default=2.,
help='The scale factor to extract patches before descriptor.')
# GPU Settings
parser.add_argument('--gpu-memory-fraction', type=float, default=0.9,
help='The fraction of GPU used by the script.')
parser.add_argument('--gpu-visible-devices', type=str, default="0",
help='Set CUDA_VISIBLE_DEVICES variable.')
args = parser.parse_known_args()[0]
# remove verbose bits from tf
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
tf.logging.set_verbosity(tf.logging.ERROR)
# Set CUDA GPU environment
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu_visible_devices
version_network_name = args.network_version
if not args.extract_MS:
args.pyramid_levels = 0
args.upsampled_levels = 0
print('Extract features for : ' + version_network_name)
aux.check_directory(args.results_dir)
aux.check_directory(os.path.join(args.results_dir, version_network_name))
def extract_features(image):
pyramid = pyramid_gaussian(image, max_layer=args.pyramid_levels, downscale=args.scale_factor_levels)
score_maps = {}
for (j, resized) in enumerate(pyramid):
im = resized.reshape(1, resized.shape[0], resized.shape[1], 1)
feed_dict = {
input_network: im,
phase_train: False,
dimension_image: np.array([1, im.shape[1], im.shape[2]], dtype=np.int32),
}
im_scores = sess.run(maps, feed_dict=feed_dict)
im_scores = geo_tools.remove_borders(im_scores, borders=args.border_size)
score_maps['map_' + str(j + 1 + args.upsampled_levels)] = im_scores[0, :, :, 0]
if args.upsampled_levels:
for j in range(args.upsampled_levels):
factor = args.scale_factor_levels ** (args.upsampled_levels - j)
up_image = cv2.resize(image, (0, 0), fx=factor, fy=factor)
im = np.reshape(up_image, (1, up_image.shape[0], up_image.shape[1], 1))
feed_dict = {
input_network: im,
phase_train: False,
dimension_image: np.array([1, im.shape[1], im.shape[2]], dtype=np.int32),
}
im_scores = sess.run(maps, feed_dict=feed_dict)
im_scores = geo_tools.remove_borders(im_scores, borders=args.border_size)
score_maps['map_' + str(j + 1)] = im_scores[0, :, :, 0]
im_pts = []
for idx_level in range(levels):
scale_value = (args.scale_factor_levels ** (idx_level - args.upsampled_levels))
scale_factor = 1. / scale_value
h_scale = np.asarray([[scale_factor, 0., 0.], [0., scale_factor, 0.], [0., 0., 1.]])
h_scale_inv = np.linalg.inv(h_scale)
h_scale_inv = h_scale_inv / h_scale_inv[2, 2]
num_points_level = point_level[idx_level]
if idx_level > 0:
res_points = int(np.asarray([point_level[a] for a in range(0, idx_level + 1)]).sum() - len(im_pts))
num_points_level = res_points
im_scores = rep_tools.apply_nms(score_maps['map_' + str(idx_level + 1)], args.nms_size)
im_pts_tmp = geo_tools.get_point_coordinates(im_scores, num_points=num_points_level, order_coord='xysr')
im_pts_tmp = geo_tools.apply_homography_to_points(im_pts_tmp, h_scale_inv)
if not idx_level:
im_pts = im_pts_tmp
else:
im_pts = np.concatenate((im_pts, im_pts_tmp), axis=0)
if args.order_coord == 'yxsr':
im_pts = np.asarray(list(map(lambda x: [x[1], x[0], x[2], x[3]], im_pts)))
im_pts = im_pts[(-1 * im_pts[:, 3]).argsort()]
im_pts = im_pts[:args.num_points]
# Extract descriptor from features
descriptors = []
im = image.reshape(1, image.shape[0], image.shape[1], 1)
for idx_desc_batch in range(int(len(im_pts) / 250 + 1)):
points_batch = im_pts[idx_desc_batch * 250: (idx_desc_batch + 1) * 250]
if not len(points_batch):
break
feed_dict = {
input_network: im,
phase_train: False,
kpts_coord: points_batch[:, :2],
kpts_scale: args.scale_factor * points_batch[:, 2],
kpts_batch: np.zeros(len(points_batch)),
dimension_image: np.array([1, im.shape[1], im.shape[2]], dtype=np.int32),
}
patch_batch = sess.run(input_patches, feed_dict=feed_dict)
patch_batch = np.reshape(patch_batch, (patch_batch.shape[0], 1, 32, 32))
data_a = torch.from_numpy(patch_batch)
data_a = data_a.cuda()
data_a = Variable(data_a)
with torch.no_grad():
out_a = model(data_a)
desc_batch = out_a.data.cpu().numpy().reshape(-1, 128)
if idx_desc_batch == 0:
descriptors = desc_batch
else:
descriptors = np.concatenate([descriptors, desc_batch], axis=0)
return im_pts, descriptors
with tf.Graph().as_default():
tf.set_random_seed(args.random_seed)
with tf.name_scope('inputs'):
# Define the input tensor shape
tensor_input_shape = (None, None, None, 1)
input_network = tf.placeholder(dtype=tf.float32, shape=tensor_input_shape, name='input_network')
dimension_image = tf.placeholder(dtype=tf.int32, shape=(3,), name='dimension_image')
kpts_coord = tf.placeholder(dtype=tf.float32, shape=(None, 2), name='kpts_coord')
kpts_batch = tf.placeholder(dtype=tf.int32, shape=(None,), name='kpts_batch')
kpts_scale = tf.placeholder(dtype=tf.float32, name='kpts_scale')
phase_train = tf.placeholder(tf.bool, name='phase_train')
with tf.name_scope('model_deep_detector'):
deep_architecture = keynet(args)
output_network = deep_architecture.model(input_network, phase_train, dimension_image, reuse=False)
maps = tf.nn.relu(output_network['output'])
# Extract Patches from inputs:
input_patches = loss_desc.build_patch_extraction(kpts_coord, kpts_batch, input_network, kpts_scale=kpts_scale)
# Define Pytorch HardNet
model = HardNet()
checkpoint = torch.load(args.pytorch_hardnet_dir)
model.load_state_dict(checkpoint['state_dict'])
model.eval()
model.cuda()
# Define variables
detect_var = [v for v in tf.trainable_variables(scope='model_deep_detector')]
if os.listdir(args.checkpoint_det_dir):
init_assign_op_det, init_feed_dict_det = tf.contrib.framework.assign_from_checkpoint(
tf.train.latest_checkpoint(args.checkpoint_det_dir), detect_var)
point_level = []
tmp = 0.0
factor_points = (args.scale_factor_levels ** 2)
levels = args.pyramid_levels + args.upsampled_levels + 1
for idx_level in range(levels):
tmp += factor_points ** (-1 * (idx_level - args.upsampled_levels))
point_level.append(args.num_points * factor_points ** (-1 * (idx_level - args.upsampled_levels)))
point_level = np.asarray(list(map(lambda x: int(x / tmp), point_level)))
# GPU Usage
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = args.gpu_memory_fraction
config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess:
sess.run(tf.global_variables_initializer())
if os.listdir(args.checkpoint_det_dir):
sess.run(init_assign_op_det, init_feed_dict_det)
# read image and extract keypoints and descriptors
f = open(args.list_images, "r")
for path_to_image in f:
path = path_to_image.split('\n')[0]
if not os.path.exists(path):
print('[ERROR]: File {0} not found!'.format(path))
return
create_result_dir(os.path.join(args.results_dir, version_network_name, path))
im = read_bw_image(path)
im = im.astype(float) / im.max()
im_pts, descriptors = extract_features(im)
file_name = os.path.join(args.results_dir, version_network_name, path)+'.kpt'
np.save(file_name, im_pts)
file_name = os.path.join(args.results_dir, version_network_name, path)+'.dsc'
np.save(file_name, descriptors)
if __name__ == '__main__':
extract_multiscale_features()
================================================
FILE: hsequeces_bench.py
================================================
import os
import argparse
import numpy as np
import pickle
from tqdm import tqdm
import HSequences_bench.tools.aux_tools as aux
import HSequences_bench.tools.geometry_tools as geo_tools
import HSequences_bench.tools.repeatability_tools as rep_tools
import HSequences_bench.tools.matching_tools as match_tools
from HSequences_bench.tools.HSequences_reader import HSequences_dataset
from HSequences_bench.tools.opencv_matcher import OpencvBruteForceMatcher
def hsequences_metrics():
parser = argparse.ArgumentParser(description='HSequences Compute Repeatability')
parser.add_argument('--data-dir', type=str, default='hpatches-sequences-release/',
help='The root path to HSequences dataset.')
parser.add_argument('--results-bench-dir', type=str, default='HSequences_bench/results/',
help='The output path to save the results.')
parser.add_argument('--detector-name', type=str, default='KeyNet_default',
help='The name of the detector to compute metrics.')
parser.add_argument('--results-dir', type=str, default='extracted_features/',
help='The path to the extracted points.')
parser.add_argument('--split', type=str, default='view',
help='The name of the HPatches (HSequences) split. Use full, debug_view, debug_illum, view or illum.')
parser.add_argument('--split-path', type=str, default='HSequences_bench/splits.json',
help='The path to the split json file.')
parser.add_argument('--top-k-points', type=int, default=1000,
help='The number of top points to use for evaluation. Set to None to use all points')
parser.add_argument('--overlap', type=float, default=0.6,
help='The overlap threshold for a correspondence to be considered correct.')
parser.add_argument('--pixel-threshold', type=int, default=5,
help='The distance of pixels for a matching correspondence to be considered correct.')
parser.add_argument('--dst-to-src-evaluation', type=bool, default=True,
help='Order to apply homography to points. Use True for dst to src, False otherwise.')
parser.add_argument('--order-coord', type=str, default='xysr',
help='The coordinate order that follows the extracted points. Use either xysr or yxsr.')
args = parser.parse_args()
print(args.detector_name + ': ' + args.split)
aux.check_directory(args.results_bench_dir)
# create the dataloader
data_loader = HSequences_dataset(args.data_dir, args.split, args.split_path)
results = aux.create_overlapping_results(args.detector_name, args.overlap)
# matching method
matcher = OpencvBruteForceMatcher('l2')
count_seq = 0
# load data and compute the keypoints
for sample_id, sample_data in enumerate(data_loader.extract_hsequences()):
sequence = sample_data['sequence_name']
count_seq += 1
image_src = sample_data['im_src']
images_dst = sample_data['images_dst']
h_src_2_dst = sample_data['h_src_2_dst']
h_dst_2_src = sample_data['h_dst_2_src']
print('\nComputing ' + sequence + ' sequence {0} / {1} \n'.format(count_seq, len(data_loader.sequences)))
for idx_im in tqdm(range(len(images_dst))):
# create the mask to filter out the points outside of the common areas
mask_src, mask_dst = geo_tools.create_common_region_masks(h_dst_2_src[idx_im], image_src.shape, images_dst[idx_im].shape)
# compute the files paths
src_pts_filename = os.path.join(args.results_dir, args.detector_name,
'hpatches-sequences-release', '{}/1.ppm.kpt.npy'.format(sample_data['sequence_name']))
src_dsc_filename = os.path.join(args.results_dir, args.detector_name,
'hpatches-sequences-release', '{}/1.ppm.dsc.npy'.format(sample_data['sequence_name']))
dst_pts_filename = os.path.join(args.results_dir, args.detector_name,
'hpatches-sequences-release', '{}/{}.ppm.kpt.npy'.format(sample_data['sequence_name'], idx_im+2))
dst_dsc_filename = os.path.join(args.results_dir, args.detector_name,
'hpatches-sequences-release', '{}/{}.ppm.dsc.npy'.format(sample_data['sequence_name'], idx_im+2))
if not os.path.isfile(src_pts_filename):
print("Could not find the file: " + src_pts_filename)
return False
if not os.path.isfile(src_dsc_filename):
print("Could not find the file: " + src_dsc_filename)
return False
if not os.path.isfile(dst_pts_filename):
print("Could not find the file: " + dst_pts_filename)
return False
if not os.path.isfile(dst_dsc_filename):
print("Could not find the file: " + dst_dsc_filename)
return False
# load the points
src_pts = np.load(src_pts_filename)
src_dsc = np.load(src_dsc_filename)
dst_pts = np.load(dst_pts_filename)
dst_dsc = np.load(dst_dsc_filename)
if args.order_coord == 'xysr':
src_pts = np.asarray(list(map(lambda x: [x[1], x[0], x[2], x[3]], src_pts)))
dst_pts = np.asarray(list(map(lambda x: [x[1], x[0], x[2], x[3]], dst_pts)))
# Check Common Points
src_idx = rep_tools.check_common_points(src_pts, mask_src)
src_pts = src_pts[src_idx]
src_dsc = src_dsc[src_idx]
dst_idx = rep_tools.check_common_points(dst_pts, mask_dst)
dst_pts = dst_pts[dst_idx]
dst_dsc = dst_dsc[dst_idx]
# Select top K points
if args.top_k_points:
src_idx = rep_tools.select_top_k(src_pts, args.top_k_points)
src_pts = src_pts[src_idx]
src_dsc = src_dsc[src_idx]
dst_idx = rep_tools.select_top_k(dst_pts, args.top_k_points)
dst_pts = dst_pts[dst_idx]
dst_dsc = dst_dsc[dst_idx]
src_pts = np.asarray(list(map(lambda x: [x[1], x[0], x[2], x[3]], src_pts)))
dst_pts = np.asarray(list(map(lambda x: [x[1], x[0], x[2], x[3]], dst_pts)))
src_to_dst_pts = geo_tools.apply_homography_to_points(
src_pts, h_src_2_dst[idx_im])
dst_to_src_pts = geo_tools.apply_homography_to_points(
dst_pts, h_dst_2_src[idx_im])
if args.dst_to_src_evaluation:
points_src = src_pts
points_dst = dst_to_src_pts
else:
points_src = src_to_dst_pts
points_dst = dst_pts
# compute repeatability
repeatability_results = rep_tools.compute_repeatability(points_src, points_dst, overlap_err=1-args.overlap,
dist_match_thresh=args.pixel_threshold)
# match descriptors
matches = matcher.match(src_dsc, dst_dsc)
matches_np = aux.convert_opencv_matches_to_numpy(matches)
matches_inv = matcher.match(dst_dsc, src_dsc)
matches_inv_np = aux.convert_opencv_matches_to_numpy(matches_inv)
mask = matches_np[:, 0] == matches_inv_np[matches_np[:, 1], 1]
matches_np = matches_np[mask]
match_score, match_score_corr, num_matches = {}, {}, {}
# compute matching based on pixel distance
for th_i in range(1, 11):
match_score_i, match_score_corr_i, num_matches_i = match_tools.compute_matching_based_distance(points_src, points_dst, matches_np,
repeatability_results['total_num_points'],
pixel_threshold=th_i,
possible_matches=repeatability_results['possible_matches'])
match_score[str(th_i)] = match_score_i
match_score_corr[str(th_i)] = match_score_corr_i
num_matches[str(th_i)] = num_matches_i
mma = np.mean([match_score[str(idx)] for idx in match_score])
results['rep_single_scale'].append(
repeatability_results['rep_single_scale'])
results['rep_multi_scale'].append(
repeatability_results['rep_multi_scale'])
results['num_points_single_scale'].append(
repeatability_results['num_points_single_scale'])
results['num_points_multi_scale'].append(
repeatability_results['num_points_multi_scale'])
results['error_overlap_single_scale'].append(
repeatability_results['error_overlap_single_scale'])
results['error_overlap_multi_scale'].append(
repeatability_results['error_overlap_multi_scale'])
results['mma'].append(match_score[str(args.pixel_threshold)])
results['mma_corr'].append(match_score_corr[str(args.pixel_threshold)])
results['num_matches'].append(num_matches[str(args.pixel_threshold)])
results['num_mutual_corresp'].append(len(matches_np))
results['avg_mma'].append(mma)
results['num_features'].append(repeatability_results['total_num_points'])
# average the results
rep_single = np.array(results['rep_single_scale']).mean()
rep_multi = np.array(results['rep_multi_scale']).mean()
error_overlap_s = np.array(results['error_overlap_single_scale']).mean()
error_overlap_m = np.array(results['error_overlap_multi_scale']).mean()
mma = np.array(results['mma']).mean()
mma_corr = np.array(results['mma_corr']).mean()
num_matches = np.array(results['num_matches']).mean()
num_mutual_corresp = np.array(results['num_mutual_corresp']).mean()
avg_mma = np.array(results['avg_mma']).mean()
num_features = np.array(results['num_features']).mean()
# Matching Score: Matching Score taking into account all features that have been
# detected in any of the two images.
# Matching Score (possible matches): Matching Score only taking into account those features that have been
# detected in both images.
# MMA Score is computed based on the Matching Score (all detected features)
print('\n## Overlap @{0}:\n \
#### Rep. Multi: {1:.4f}\n \
#### Rep. Single: {2:.4f}\n \
#### Overlap Multi: {3:.4f}\n \
#### Overlap Single: {4:.4f}\n \
#### MMA: {5:.4f}\n \
#### MMA (possible matches): {6:.4f}\n \
#### Num matches: {7:.4f}\n \
#### Num Mutual Correspondences: {8:.4f}\n \
#### Avg. over Threshold MMA: {9:.4f}\n \
#### Num Feats: {10:.4f}'.format(
args.overlap, rep_multi, rep_single, error_overlap_s, error_overlap_m, mma,
mma_corr, num_matches, num_mutual_corresp, avg_mma, num_features))
# Store data (serialize)
output_file_path = os.path.join(args.results_bench_dir, '{0}_{1}.pickle'
.format(args.detector_name, args.split))
with open(output_file_path, 'wb') as handle:
pickle.dump(results, handle, protocol=pickle.HIGHEST_PROTOCOL)
if __name__ == '__main__':
hsequences_metrics()
================================================
FILE: keyNet/aux/desc_aux_function.py
================================================
import tensorflow as tf
def _meshgrid(height, width):
with tf.name_scope('meshgrid'):
# This should be equivalent to:
# x_t, y_t = np.meshgrid(np.linspace(-1, 1, width),
# np.linspace(-1, 1, height))
# ones = np.ones(np.prod(x_t.shape))
# grid = np.vstack([x_t.flatten(), y_t.flatten(), ones])
x_t = tf.matmul(tf.ones(shape=tf.stack([height, 1])),
tf.transpose(tf.expand_dims(tf.linspace(-1.0, 1.0, width), 1), [1, 0]))
y_t = tf.matmul(tf.expand_dims(tf.linspace(-1.0, 1.0, height), 1),
tf.ones(shape=tf.stack([1, width])))
x_t_flat = tf.reshape(x_t, (1, -1))
y_t_flat = tf.reshape(y_t, (1, -1))
ones = tf.ones_like(x_t_flat)
grid = tf.concat(axis=0, values=[x_t_flat, y_t_flat, ones])
return grid
def transformer_crop(images, out_size, batch_inds, kpts_xy, kpts_scale=None, kpts_ori=None, thetas=None,
name='SpatialTransformCropper'):
# images : [B,H,W,C]
# out_size : (out_width, out_height)
# batch_inds : [B*K,] tf.int32 [0,B)
# kpts_xy : [B*K,2] tf.float32 or whatever
# kpts_scale : [B*K,] tf.float32
# kpts_ori : [B*K,2] tf.float32 (cos,sin)
if isinstance(out_size, int):
out_width = out_height = out_size
else:
out_width, out_height = out_size
hoW = out_width // 2
hoH = out_height // 2
with tf.name_scope(name):
num_batch = tf.shape(images)[0]
height = tf.shape(images)[1]
width = tf.shape(images)[2]
C = tf.shape(images)[3]
num_kp = tf.shape(kpts_xy)[0] # B*K
zero = tf.zeros([], dtype=tf.int32)
max_y = tf.cast(tf.shape(images)[1] - 1, tf.int32)
max_x = tf.cast(tf.shape(images)[2] - 1, tf.int32)
grid = _meshgrid(out_height, out_width) # normalized -1~1
grid = tf.expand_dims(grid, 0)
grid = tf.reshape(grid, [-1])
grid = tf.tile(grid, tf.stack([num_kp]))
grid = tf.reshape(grid, tf.stack([num_kp, 3, -1]))
# create 6D affine from scale and orientation
# [s, 0, 0] [cos, -sin, 0]
# [0, s, 0] * [sin, cos, 0]
# [0, 0, 1] [0, 0, 1]
if thetas is None:
thetas = tf.eye(2, 3, dtype=tf.float32)
thetas = tf.tile(thetas[None], [num_kp, 1, 1])
if kpts_scale is not None:
thetas = thetas * kpts_scale[:, None, None]
ones = tf.tile(tf.constant([[[0, 0, 1]]], tf.float32), [num_kp, 1, 1])
thetas = tf.concat([thetas, ones], axis=1) # [num_kp, 3,3]
if kpts_ori is not None:
cos = tf.slice(kpts_ori, [0, 0], [-1, 1]) # [num_kp, 1]
sin = tf.slice(kpts_ori, [0, 1], [-1, 1])
zeros = tf.zeros_like(cos)
ones = tf.ones_like(cos)
R = tf.concat([cos, -sin, zeros, sin, cos, zeros, zeros, zeros, ones], axis=-1)
R = tf.reshape(R, [-1, 3, 3])
thetas = tf.matmul(thetas, R)
# Apply transformation to regular grid
T_g = tf.matmul(thetas, grid) # [num_kp,3,3] * [num_kp,3,H*W]
x = tf.slice(T_g, [0, 0, 0], [-1, 1, -1]) # [num_kp,1,H*W]
y = tf.slice(T_g, [0, 1, 0], [-1, 1, -1])
# unnormalization [-1,1] --> [-out_size/2,out_size/2]
x = x * out_width / 2.0
y = y * out_height / 2.0
if kpts_xy.dtype != tf.float32:
kpts_xy = tf.cast(kpts_xy, tf.float32)
kp_x_ofst = tf.expand_dims(tf.slice(kpts_xy, [0, 0], [-1, 1]), axis=1) # [B*K,1,1]
kp_y_ofst = tf.expand_dims(tf.slice(kpts_xy, [0, 1], [-1, 1]), axis=1) # [B*K,1,1]
# centerize on keypoints
x = x + kp_x_ofst
y = y + kp_y_ofst
x = tf.reshape(x, [-1]) # num_kp*out_height*out_width
y = tf.reshape(y, [-1])
# interpolation
x0 = tf.cast(tf.floor(x), tf.int32)
x1 = x0 + 1
y0 = tf.cast(tf.floor(y), tf.int32)
y1 = y0 + 1
x0 = tf.clip_by_value(x0, zero, max_x)
x1 = tf.clip_by_value(x1, zero, max_x)
y0 = tf.clip_by_value(y0, zero, max_y)
y1 = tf.clip_by_value(y1, zero, max_y)
dim2 = width
dim1 = width * height
base = tf.tile(batch_inds[:, None], [1, out_height * out_width]) # [B*K,out_height*out_width]
base = tf.reshape(base, [-1]) * dim1
base_y0 = base + y0 * dim2
base_y1 = base + y1 * dim2
idx_a = base_y0 + x0
idx_b = base_y1 + x0
idx_c = base_y0 + x1
idx_d = base_y1 + x1
im_flat = tf.reshape(images, tf.stack([-1, C])) # [B*height*width,C]
im_flat = tf.cast(im_flat, tf.float32)
Ia = tf.gather(im_flat, idx_a)
Ib = tf.gather(im_flat, idx_b)
Ic = tf.gather(im_flat, idx_c)
Id = tf.gather(im_flat, idx_d)
x0_f = tf.cast(x0, tf.float32)
x1_f = tf.cast(x1, tf.float32)
y0_f = tf.cast(y0, tf.float32)
y1_f = tf.cast(y1, tf.float32)
wa = tf.expand_dims(((x1_f - x) * (y1_f - y)), 1)
wb = tf.expand_dims(((x1_f - x) * (y - y0_f)), 1)
wc = tf.expand_dims(((x - x0_f) * (y1_f - y)), 1)
wd = tf.expand_dims(((x - x0_f) * (y - y0_f)), 1)
output = tf.add_n([wa * Ia, wb * Ib, wc * Ic, wd * Id])
output = tf.reshape(output, tf.stack([num_kp, out_height, out_width, C]))
output.set_shape([batch_inds.shape[0], out_height, out_width, images.shape[-1]])
return output
def build_patch_extraction(kpts, batch_inds, images, kpts_scale, name='PatchExtract', patch_size=32):
with tf.name_scope(name):
patches = transformer_crop(images, patch_size, batch_inds, kpts, kpts_scale=kpts_scale)
return patches
================================================
FILE: keyNet/aux/tools.py
================================================
import os
def remove_borders(images, borders=3):
shape = images.shape
if len(shape) == 4:
for batch_id in range(shape[0]):
images[batch_id, 0:borders, :, :] = 0
images[batch_id, :, 0:borders, :] = 0
images[batch_id, shape[1] - borders:shape[1], :, :] = 0
images[batch_id, :, shape[2] - borders:shape[2], :] = 0
elif len(shape) == 3:
images[0:borders, :, :] = 0
images[:, 0:borders, :] = 0
images[shape[1] - borders:shape[1], :, :] = 0
images[:, shape[2] - borders:shape[2], :] = 0
else:
images[0:borders, :] = 0
images[:, 0:borders] = 0
images[shape[0] - borders:shape[0], :] = 0
images[:, shape[1] - borders:shape[1]] = 0
return images
def check_directory(file_path):
if not os.path.exists(file_path):
os.mkdir(file_path)
def check_tensorboard_directory(version_network_name):
check_directory('keyNet/logs_network')
check_directory('keyNet/logs_network/' + version_network_name)
================================================
FILE: keyNet/datasets/dataset_utils.py
================================================
import cv2
import numpy as np
from cv2 import warpPerspective as applyH
perms = ((0, 1, 2), (0, 2, 1),
(1, 0, 2), (1, 2, 0),
(2, 0, 1), (2, 1, 0))
def read_bw_image(path):
img = read_color_image(path)
img = to_black_and_white(img)
return img
def read_color_image(path):
im_c = cv2.imread(path)
return im_c.reshape(im_c.shape[0], im_c.shape[1], 3)
def apply_h_2_source_image(source_im, h):
shape_source_im = source_im.shape
dst = applyH(source_im, h, (shape_source_im[1], shape_source_im[0]))
return np.reshape(dst, (shape_source_im[0], shape_source_im[1], 1))
def generate_composed_homography(max_angle=45, max_scaling=2.0, max_shearing=0.8):
# random sample
scale = np.random.uniform(0.5, max_scaling)
angle = np.random.uniform(-max_angle, max_angle)
shear = np.random.uniform(-max_shearing, max_shearing)
# scale transform
scale_mat = np.eye(3)
scale_mat[0, 0] = scale
scale_mat[1, 1] = scale
# rotation transform
angle = np.deg2rad(angle)
rotation_mat = np.eye(3)
rotation_mat[0, 0] = np.cos(angle)
rotation_mat[0, 1] = -np.sin(angle)
rotation_mat[1, 0] = np.sin(angle)
rotation_mat[1, 1] = np.cos(angle)
# shear transform
shear_mat = np.eye(3)
shear_mat[0, 1] = shear
# compose transforms
h = np.matmul(shear_mat, np.matmul(scale_mat, rotation_mat))
return h
def color_distorsion(im_c):
im_correction = colorDistorsion(im_c)
im = cv2.cvtColor(im_correction, cv2.COLOR_BGR2GRAY)
return im.reshape(im.shape[0], im.shape[1], 1)
def to_black_and_white(img):
im = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return im.reshape(im.shape[0], im.shape[1], 1)
def colorDistorsion(image, lower=0.5, upper=1.5, delta=18.0, delta_brigtness=36):
image = image.astype(float)
if np.random.randint(2):
delta = np.random.uniform(-delta_brigtness, delta_brigtness)
image += delta
image = check_margins(image)
contrast = np.random.randint(2)
if contrast:
alpha = np.random.uniform(lower, upper)
image *= alpha
image = check_margins(image)
image = image.astype(np.uint8)
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
image = image.astype(float)
if np.random.randint(2):
image[:, :, 1] *= np.random.uniform(lower, upper)
image = check_margins(image, axis=1)
if np.random.randint(2):
image[:, :, 0] += np.random.uniform(-delta, delta)
image[:, :, 0][image[:, :, 0] > 360.0] -= 360.0
image[:, :, 0][image[:, :, 0] < 0.0] += 360.0
image = image.astype(np.uint8)
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
image = image.astype(float)
if contrast:
alpha = np.random.uniform(lower, upper)
image *= alpha
image = check_margins(image)
if np.random.randint(2):
swap = perms[np.random.randint(len(perms))]
image = swap_channels(image, swap) # shuffle channels
return image.astype(np.uint8)
def check_margins(img, axis=-1):
if axis == -1:
img[img > 255.0] = 255.0
img[img < 0.0] = 0.0
else:
img[:, :, axis][img[:, :, axis] > 255.0] = 255.0
img[:, :, axis][img[:, :, axis] < 0.0] = 0.0
return img
def swap_channels(image, swaps):
image = image[:, :, swaps]
return image
================================================
FILE: keyNet/datasets/tf_dataset.py
================================================
import os
import cv2
import numpy as np
import tensorflow as tf
import keyNet.datasets.dataset_utils as tools
from tqdm import tqdm
class tf_dataset(object):
def __init__(self, dataset_root, tfrecord_root, size_patches, batch_size, max_angle, max_scaling, max_shearing, random_seed, is_debugging=False):
self.size_patches = size_patches
self.batch_size = batch_size
self.dataset_root = dataset_root
self.num_examples = 0
self.num_val_examples = 0
self.max_angle = max_angle
self.max_scaling = max_scaling
self.max_shearing = max_shearing
self.is_debugging = is_debugging
tf.set_random_seed(random_seed)
np.random.seed(random_seed)
self.tfrecord_root = tfrecord_root
if is_debugging:
self.tfrecord_path = self.tfrecord_root + '/train_dataset_debug.tfrecord'
self.tfrecord_val_path = self.tfrecord_root + '/val_dataset_debug.tfrecord'
else:
self.tfrecord_path = self.tfrecord_root + '/train_dataset.tfrecord'
self.tfrecord_val_path = self.tfrecord_root + '/val_dataset.tfrecord'
tfrecord_exists = os.path.isfile(self.tfrecord_path)
if not tfrecord_exists:
self.data_path = self._find_data_path(self.dataset_root)
self.images_info = self._load_data_names(self.data_path)
self._create_tfrecords(False)
self._create_tfrecords(True)
self._compute_num_examples()
self.feature_description = {
'im_src_patch': tf.FixedLenFeature([], tf.string),
'im_dst_patch': tf.FixedLenFeature([], tf.string),
'homography_src_2_dst': tf.FixedLenFeature([], tf.string),
'homography_dst_2_src': tf.FixedLenFeature([], tf.string),
}
def get_num_patches(self, is_val=False):
if is_val:
return self.num_val_examples
else:
return self.num_examples
def create_dataset_object(self, is_val=False):
self.is_val = is_val
if self.is_val:
dataset = tf.data.TFRecordDataset([self.tfrecord_val_path])
batch_size = 1
else:
dataset = tf.data.TFRecordDataset([self.tfrecord_path])
batch_size = self.batch_size
dataset = dataset.map(self._prepare_data)
dataset = dataset.shuffle(buffer_size=500)
dataset = dataset.batch(batch_size)
return dataset.repeat()
def _compute_num_examples(self):
self.num_examples = 0
for _ in tf.python_io.tf_record_iterator(self.tfrecord_path):
self.num_examples += 1
self.num_val_examples = 0
for _ in tf.python_io.tf_record_iterator(self.tfrecord_val_path):
self.num_val_examples += 1
def _parse_function(self, sample_pair):
return tf.parse_single_example(sample_pair, self.feature_description)
def _prepare_data(self, sample_pair):
if self.is_val:
patch_size = 2 * self.size_patches
else:
patch_size = self.size_patches
features = tf.parse_single_example(sample_pair, self.feature_description)
im_src_patch = tf.decode_raw(features['im_src_patch'], tf.float64)
im_src_patch = tf.reshape(im_src_patch, [patch_size, patch_size, 1])
im_dst_patch = tf.decode_raw(features['im_dst_patch'], tf.float64)
im_dst_patch = tf.reshape(im_dst_patch, [patch_size, patch_size, 1])
homography_src_2_dst = tf.decode_raw(features['homography_src_2_dst'], tf.float32)
homography_src_2_dst = tf.reshape(homography_src_2_dst, [8])
homography_dst_2_src = tf.decode_raw(features['homography_dst_2_src'], tf.float32)
homography_dst_2_src = tf.reshape(homography_dst_2_src, [8])
return im_src_patch, im_dst_patch, homography_src_2_dst, homography_dst_2_src
def _find_data_path(self, data_path):
assert os.path.isdir(data_path), \
"Invalid directory: {}".format(data_path)
return data_path
def _load_data_names(self, data_path):
count = 0
images_info = []
for r, d, f in os.walk(data_path):
for file_name in f:
if file_name.endswith(".JPEG") or file_name.endswith(".jpg") or file_name.endswith(".png"):
images_info.append(os.path.join(data_path, r, file_name))
count += 1
src_idx = np.random.permutation(len(np.asarray(images_info)))
images_info = np.asarray(images_info)[src_idx]
return images_info
def _bytes_feature(self, value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def _create_tfrecords(self, is_val):
self._create_pair_images(is_val)
def _create_pair_images(self, is_val):
# More stable repeatability when using bigger size patches
if is_val:
size_patches = 2 * self.size_patches
self.counter += 1
else:
size_patches = self.size_patches
self.counter = 0
counter_patches = 0
print('Writing TFrecords . . .')
if is_val:
writer = tf.python_io.TFRecordWriter(self.tfrecord_val_path)
else:
writer = tf.python_io.TFRecordWriter(self.tfrecord_path)
for path_image_idx in tqdm(range(len(self.images_info))):
name_image_path = self.images_info[(self.counter+path_image_idx)%len(self.images_info)]
correct_patch = False
counter = -1
while counter < 10:
counter += 1
incorrect_h = True
while incorrect_h:
scr_c = tools.read_color_image(name_image_path)
source_shape = scr_c.shape
h = tools.generate_composed_homography(self.max_angle, self.max_scaling, self.max_shearing)
inv_h = np.linalg.inv(h)
inv_h = inv_h / inv_h[2, 2]
scr = tools.to_black_and_white(scr_c)
dst = tools.color_distorsion(scr_c)
dst = tools.apply_h_2_source_image(dst, inv_h)
if dst.max() > 0.0:
incorrect_h = False
scr_sobelx = cv2.Sobel(scr, cv2.CV_64F, 1, 0, ksize=3)
scr_sobelx = abs(scr_sobelx.reshape((scr.shape[0], scr.shape[1], 1)))
scr_sobelx = scr_sobelx.astype(float) / scr_sobelx.max()
dst_sobelx = cv2.Sobel(dst, cv2.CV_64F, 1, 0, ksize=3)
dst_sobelx = abs(dst_sobelx.reshape((dst.shape[0], dst.shape[1], 1)))
dst_sobelx = dst_sobelx.astype(float) / dst_sobelx.max()
scr = scr.astype(float) / scr.max()
dst = dst.astype(float) / dst.max()
if size_patches/2 >= scr.shape[0]-size_patches/2 or size_patches/2 >= scr.shape[1]-size_patches/2:
break
window_point = [scr.shape[0]/2, scr.shape[1]/2]
# Define points
point_src = [window_point[0], window_point[1], 1.0]
im_src_patch = scr[int(point_src[0] - size_patches / 2): int(point_src[0] + size_patches / 2),
int(point_src[1] - size_patches / 2): int(point_src[1] + size_patches / 2)]
point_dst = inv_h.dot([point_src[1], point_src[0], 1.0])
point_dst = [point_dst[1] / point_dst[2], point_dst[0] / point_dst[2]]
if (point_dst[0] - size_patches / 2) < 0 or (point_dst[1] - size_patches / 2) < 0:
continue
if (point_dst[0] + size_patches / 2) > source_shape[0] or (point_dst[1] + size_patches / 2) > \
source_shape[1]:
continue
h_src_translation = np.asanyarray([[1., 0., -(int(point_src[1]) - size_patches / 2)],
[0., 1., -(int(point_src[0]) - size_patches / 2)], [0., 0., 1.]])
h_dst_translation = np.asanyarray(
[[1., 0., int(point_dst[1] - size_patches / 2)], [0., 1., int(point_dst[0] - size_patches / 2)],
[0., 0., 1.]])
im_dst_patch = dst[int(point_dst[0] - size_patches / 2): int(point_dst[0] + size_patches / 2),
int(point_dst[1] - size_patches / 2): int(point_dst[1] + size_patches / 2)]
label_dst_patch = dst_sobelx[
int(point_dst[0] - size_patches / 2): int(point_dst[0] + size_patches / 2),
int(point_dst[1] - size_patches / 2): int(point_dst[1] + size_patches / 2)]
label_scr_patch = scr_sobelx[
int(point_src[0] - size_patches / 2): int(point_src[0] + size_patches / 2),
int(point_src[1] - size_patches / 2): int(point_src[1] + size_patches / 2)]
if im_src_patch.shape[0] != size_patches or im_src_patch.shape[1] != size_patches:
continue
if label_dst_patch.max() < 0.25:
continue
if label_scr_patch.max() < 0.25:
continue
correct_patch = True
break
if correct_patch:
im_src_patch = im_src_patch.reshape((1, im_src_patch.shape[0], im_src_patch.shape[1], 1))
im_dst_patch = im_dst_patch.reshape((1, im_dst_patch.shape[0], im_dst_patch.shape[1], 1))
homography = np.dot(h_src_translation, np.dot(h, h_dst_translation))
homography_dst_2_src = homography.astype('float32')
homography_dst_2_src = homography_dst_2_src.flatten()
homography_dst_2_src = homography_dst_2_src / homography_dst_2_src[8]
homography_dst_2_src = homography_dst_2_src[:8]
homography_src_2_dst = np.linalg.inv(homography)
homography_src_2_dst = homography_src_2_dst.astype('float32')
homography_src_2_dst = homography_src_2_dst.flatten()
homography_src_2_dst = homography_src_2_dst / homography_src_2_dst[8]
homography_src_2_dst = homography_src_2_dst[:8]
homography_src_2_dst = homography_src_2_dst.reshape((1, homography_src_2_dst.shape[0]))
homography_dst_2_src = homography_dst_2_src.reshape((1, homography_dst_2_src.shape[0]))
sample = tf.train.Example(
features=tf.train.Features(
feature={
'im_src_patch': self._bytes_feature(im_src_patch.tostring()),
'im_dst_patch': self._bytes_feature(im_dst_patch.tostring()),
'homography_src_2_dst': self._bytes_feature(homography_src_2_dst.tostring()),
'homography_dst_2_src': self._bytes_feature(homography_dst_2_src.tostring())
}))
writer.write(sample.SerializeToString())
counter_patches += 1
if is_val and counter_patches > 1500:
break
elif counter_patches > 4000:
break
if is_val and self.is_debugging and counter_patches > 100:
break
elif not is_val and self.is_debugging and counter_patches > 400:
break
writer.close()
self.counter = counter_patches
================================================
FILE: keyNet/loss/score_loss_function.py
================================================
import tensorflow as tf
import numpy as np
# Index Proposal Layer
def ip_layer(scores, window_size, kernels):
exponential_value = np.e
shape_scores = tf.shape(scores)
weights = tf.nn.max_pool(tf.stop_gradient(scores), [1, window_size, window_size, 1], strides=[1, window_size, window_size, 1], padding='VALID')
max_pool_unpool = tf.nn.conv2d_transpose(weights, kernels['upsample_filter_np_'+str(window_size)],
output_shape=[shape_scores[0], shape_scores[1], shape_scores[2], 1],
strides=[1, window_size, window_size, 1])
exp_map_1 = tf.add(tf.pow(exponential_value, tf.div(scores, max_pool_unpool+1e-6)), -1*(1.-1e-6))
sum_exp_map_1 = tf.nn.conv2d(exp_map_1, kernels['ones_kernel_'+str(window_size)], [1, window_size, window_size, 1], padding='VALID')
indexes_map = tf.nn.conv2d(exp_map_1, kernels['indexes_kernel_' + str(window_size)], [1, window_size, window_size, 1], padding='VALID')
indexes_map = tf.divide(indexes_map, tf.add(sum_exp_map_1, 1e-6))
max_weights = tf.reduce_max(weights, axis=[1, 2, 3], keepdims=True)
norm_weights = tf.divide(weights, max_weights + 1e-6)
return indexes_map, [weights, norm_weights]
def ip_softscores(scores, window_size, kernels):
exponential_value = np.e
shape_scores = tf.shape(scores)
weights = tf.nn.max_pool(scores, [1, window_size, window_size, 1], strides=[1, window_size, window_size, 1], padding='VALID')
max_pool_unpool = tf.nn.conv2d_transpose(weights, kernels['upsample_filter_np_'+str(window_size)],
output_shape=[shape_scores[0], shape_scores[1], shape_scores[2], 1],
strides=[1, window_size, window_size, 1])
exp_map_1 = tf.add(tf.pow(exponential_value, tf.div(scores, tf.add(max_pool_unpool, 1e-6))), -1*(1. - 1e-6))
sum_exp_map_1 = tf.nn.conv2d(exp_map_1, kernels['ones_kernel_'+str(window_size)], [1, window_size, window_size, 1], padding='VALID')
sum_scores_map_1 = tf.nn.conv2d(exp_map_1*scores, kernels['ones_kernel_'+str(window_size)], [1, window_size, window_size, 1], padding='VALID')
soft_scores = tf.divide(sum_scores_map_1, tf.add(sum_exp_map_1, 1e-6))
return soft_scores
def unpool(pool, ind, ksize=[1, 2, 2, 1], scope='unpool'):
with tf.variable_scope(scope):
input_shape = tf.shape(pool)
output_shape = [input_shape[0], input_shape[1] * ksize[1], input_shape[2] * ksize[2], input_shape[3]]
flat_input_size = tf.reduce_prod(input_shape)
flat_output_shape = [output_shape[0], output_shape[1] * output_shape[2] * output_shape[3]]
pool_ = tf.reshape(pool, [flat_input_size])
batch_range = tf.reshape(tf.range(tf.cast(output_shape[0], tf.int64), dtype=ind.dtype),
shape=[input_shape[0], 1, 1, 1])
b = tf.ones_like(ind) * batch_range
b1 = tf.reshape(b, [flat_input_size, 1])
ind_ = tf.reshape(ind, [flat_input_size, 1])
ind_ = tf.concat([b1, ind_], 1)
ret = tf.scatter_nd(ind_, pool_, shape=tf.cast(flat_output_shape, tf.int64))
ret = tf.reshape(ret, output_shape)
set_input_shape = pool.get_shape()
set_output_shape = [set_input_shape[0], set_input_shape[1] * ksize[1], set_input_shape[2] * ksize[2],
set_input_shape[3]]
ret.set_shape(set_output_shape)
return ret
def grid_indexes_nms_conv(scores, kernels, window_size):
weights, indexes = tf.nn.max_pool_with_argmax(scores, ksize=[1, window_size, window_size, 1],
strides=[1, window_size, window_size, 1], padding='VALID')
weights_norm = tf.divide(weights, tf.add(weights, np.finfo(float).eps))
score_map = unpool(weights_norm, indexes, ksize=[1, window_size, window_size, 1], scope='unpool')
indexes_label = tf.nn.conv2d(score_map, kernels['indexes_kernel_'+str(window_size)], [1, window_size, window_size, 1], padding='VALID')
ind_rand = tf.cast(tf.random_uniform(tf.shape(indexes_label), minval=0, maxval=window_size, dtype=tf.int32), tf.float32)
indexes_label = tf.where(tf.equal(indexes_label, tf.zeros_like(indexes_label)), ind_rand, indexes_label)
return indexes_label, weights, score_map
def loss_ln_indexes_norm(src_indexes, label_indexes, weights_indexes, window_size, n=2):
norm_sq = tf.reduce_sum(((src_indexes-label_indexes)/window_size)**n, axis=-1, keepdims=True)
weigthed_norm_sq = 1000*(tf.multiply(weights_indexes, norm_sq))
loss = tf.reduce_mean(weigthed_norm_sq)
return loss
def msip_loss_function(src_im, src_score_maps, dst_score_maps, window_size, kernels, h_src_2_dst, h_dst_2_src,
coordinate_weighting, patch_size, mask_borders):
tf.set_random_seed(12345)
np.random.seed(12345)
src_maps = tf.nn.relu(src_score_maps['output'])
dst_maps = tf.nn.relu(dst_score_maps['output'])
# Check if patch size is divisible by the window size
if patch_size % window_size > 0:
batch_shape = tf.shape(src_maps)
new_size = patch_size - (patch_size % window_size)
src_maps = tf.slice(src_maps, [0, 0, 0, 0], [batch_shape[0], new_size, new_size, batch_shape[3]])
dst_maps = tf.slice(dst_maps, [0, 0, 0, 0], [batch_shape[0], new_size, new_size, batch_shape[3]])
mask_borders = tf.slice(mask_borders, [0, 0, 0, 0], [batch_shape[0], new_size, new_size, batch_shape[3]])
# Tensorflow inverts homography
src_maps_warped = tf.contrib.image.transform(src_maps * mask_borders, h_dst_2_src, interpolation='BILINEAR')
src_im_warped = tf.contrib.image.transform(src_im, h_dst_2_src, interpolation='BILINEAR')
dst_maps_warped = tf.contrib.image.transform(dst_maps * mask_borders, h_src_2_dst, interpolation='BILINEAR')
visible_src_mask = tf.contrib.image.transform(mask_borders, h_src_2_dst, interpolation='BILINEAR')
visible_dst_mask = tf.contrib.image.transform(mask_borders, h_dst_2_src, interpolation='BILINEAR')
# Remove borders and stop gradients to only backpropagate on the unwarped maps
src_maps_warped = tf.stop_gradient(src_maps_warped)
dst_maps_warped = tf.stop_gradient(dst_maps_warped)
visible_src_mask = visible_src_mask * mask_borders
visible_dst_mask = visible_dst_mask * mask_borders
src_maps *= visible_src_mask
dst_maps *= visible_dst_mask
src_maps_warped *= visible_dst_mask
dst_maps_warped *= visible_src_mask
# Compute visible coordinates to discard uncommon regions
_, weights_visible_src, map_nms = grid_indexes_nms_conv(visible_src_mask, kernels, window_size)
_, weights_visible_dst, _ = grid_indexes_nms_conv(visible_dst_mask, kernels, window_size)
# Extract NMS coordinates from warped maps
src_indexes_nms_warped, weights_src_warped, _ = grid_indexes_nms_conv(src_maps_warped, kernels, window_size)
dst_indexes_nms_warped, weights_dst_warped, _ = grid_indexes_nms_conv(dst_maps_warped, kernels, window_size)
# Use IP Layer to extract soft coordinates
src_indexes, _ = ip_layer(src_maps, window_size, kernels)
dst_indexes, _ = ip_layer(dst_maps, window_size, kernels)
# Compute soft weights
weights_src = tf.stop_gradient(ip_softscores(src_maps, window_size, kernels))
weights_dst = tf.stop_gradient(ip_softscores(dst_maps, window_size, kernels))
if coordinate_weighting:
shape = tf.shape(weights_src)
weights_src = tf.layers.flatten(weights_src)
weights_dst = tf.layers.flatten(weights_dst)
weights_src = tf.nn.softmax(weights_src)
weights_dst = tf.nn.softmax(weights_dst)
weights_src = 100 * weights_visible_src * tf.reshape(weights_src, shape)
weights_dst = 100 * weights_visible_dst * tf.reshape(weights_dst, shape)
else:
weights_src = weights_visible_src
weights_dst = weights_visible_dst
loss_src = loss_ln_indexes_norm(src_indexes, dst_indexes_nms_warped, weights_src, window_size, n=2)
loss_dst = loss_ln_indexes_norm(dst_indexes, src_indexes_nms_warped, weights_dst, window_size, n=2)
loss_indexes = (loss_src + loss_dst) / 2.
loss_elements = {}
loss_elements['src_im'] = src_im
loss_elements['src_im_warped'] = src_im_warped
loss_elements['map_nms'] = map_nms
loss_elements['src_maps'] = src_maps
loss_elements['dst_maps'] = dst_maps
loss_elements['src_maps_warped'] = src_maps_warped
loss_elements['dst_maps_warped'] = dst_maps_warped
loss_elements['weights_src'] = weights_src
loss_elements['weights_src_warped'] = weights_src_warped
loss_elements['weights_visible_src'] = weights_visible_src
loss_elements['weights_dst'] = weights_dst
loss_elements['weights_visible_dst'] = weights_visible_dst
loss_elements['weights_dst_warped'] = weights_dst_warped
loss_elements['src_indexes'] = src_indexes
loss_elements['dst_indexes'] = dst_indexes
loss_elements['dst_indexes_nms_warped'] = dst_indexes_nms_warped
return loss_indexes, loss_elements
================================================
FILE: keyNet/model/hardnet_pytorch.py
================================================
#!/usr/bin/python2 -utt
# -*- coding: utf-8 -*-
import torch
import torch.nn as nn
from torch.autograd import Variable
class L2Norm(nn.Module):
def __init__(self):
super(L2Norm, self).__init__()
self.eps = 1e-10
def forward(self, x):
norm = torch.sqrt(torch.sum(x * x, dim=1) + self.eps)
x = x / norm.unsqueeze(-1).expand_as(x)
return x
class L1Norm(nn.Module):
def __init__(self):
super(L1Norm, self).__init__()
self.eps = 1e-10
def forward(self, x):
norm = torch.sum(torch.abs(x), dim=1) + self.eps
x = x / norm.expand_as(x)
return x
class HardNet(nn.Module):
"""HardNet model definition
"""
def __init__(self):
super(HardNet, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(1, 32, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(32, affine=False),
nn.ReLU(),
nn.Conv2d(32, 32, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(32, affine=False),
nn.ReLU(),
nn.Conv2d(32, 64, kernel_size=3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(64, affine=False),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(64, affine=False),
nn.ReLU(),
nn.Conv2d(64, 128, kernel_size=3, stride=2, padding=1, bias=False),
nn.BatchNorm2d(128, affine=False),
nn.ReLU(),
nn.Conv2d(128, 128, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(128, affine=False),
nn.ReLU(),
nn.Dropout(0.1),
nn.Conv2d(128, 128, kernel_size=8, bias=False),
nn.BatchNorm2d(128, affine=False),
)
# self.features.apply(weights_init)
def input_norm(self, x):
flat = x.view(x.size(0), -1)
mp = torch.mean(flat, dim=1)
sp = torch.std(flat, dim=1) + 1e-7
return (x - mp.detach().unsqueeze(-1).unsqueeze(-1).unsqueeze(-1).expand_as(x)) / sp.detach().unsqueeze(
-1).unsqueeze(-1).unsqueeze(1).expand_as(x)
def forward(self, input):
x_features = self.features(self.input_norm(input))
x = x_features.view(x_features.size(0), -1)
return L2Norm()(x)
================================================
FILE: keyNet/model/keynet_architecture.py
================================================
import math
import numpy as np
import tensorflow as tf
def gaussian_multiple_channels(num_channels, sigma):
r = 2*sigma
size = 2*r+1
size = int(math.ceil(size))
x = np.arange(0, size, 1, float)
y = x[:, np.newaxis]
x0 = y0 = r
gaussian = np.float32((np.exp(-1 * (((x - x0) ** 2 + (y - y0) ** 2) / (2 * (sigma ** 2))))) / ((2 * math.pi * (sigma ** 2))**0.5))
weights = np.zeros((size, size, num_channels, num_channels), dtype=np.float32)
for i in range(num_channels):
weights[:, :, i, i] = gaussian
return weights
def ones_multiple_channels(size, num_channels):
ones = np.ones((size, size))
weights = np.zeros((size, size, num_channels, num_channels), dtype=np.float32)
for i in range(num_channels):
weights[:, :, i, i] = ones
return weights
def grid_indexes(size):
weights = np.zeros((size, size, 1, 2), dtype=np.float32)
columns = []
for idx in range(1, 1+size):
columns.append(np.ones((size))*idx)
columns = np.asarray(columns)
rows = []
for idx in range(1, 1+size):
rows.append(np.asarray(range(1, 1+size)))
rows = np.asarray(rows)
weights[:, :, 0, 0] = columns
weights[:, :, 0, 1] = rows
return weights
def get_kernel_size(factor):
"""
Find the kernel size given the desired factor of upsampling.
"""
return 2 * factor - factor % 2
def linear_upsample_weights(half_factor, number_of_classes):
"""
Create weights matrix for transposed convolution with linear filter
initialization.
"""
filter_size = get_kernel_size(half_factor)
weights = np.zeros((filter_size,
filter_size,
number_of_classes,
number_of_classes), dtype=np.float32)
upsample_kernel = np.ones((filter_size, filter_size))
for i in range(number_of_classes):
weights[:, :, i, i] = upsample_kernel
return weights
def create_derivatives_kernel():
# Sobel derivative 3x3 X
kernel_filter_dx_3 = np.float32(np.asarray([[-1, 0, 1],
[-2, 0, 2],
[-1, 0, 1]]))
kernel_filter_dx_3 = kernel_filter_dx_3[..., np.newaxis]
kernel_filter_dx_3 = kernel_filter_dx_3[..., np.newaxis]
# Sobel derivative 3x3 Y
kernel_filter_dy_3 = np.float32(np.asarray([[-1, -2, -1],
[0, 0, 0],
[1, 2, 1]]))
kernel_filter_dy_3 = kernel_filter_dy_3[..., np.newaxis]
kernel_filter_dy_3 = kernel_filter_dy_3[..., np.newaxis]
return kernel_filter_dx_3, kernel_filter_dy_3
class keynet(object):
def __init__(self, args, MSIP_sizes=[]):
# Start Key.Net architecture
self.pyramid_levels = args.num_levels_within_net
self.factor_scaling = args.factor_scaling_pyramid
self.num_blocks = args.num_learnable_blocks
self.num_filters = args.num_filters
self.conv_kernel_size = args.conv_kernel_size
self.ksize = args.nms_size
self.batch_size = 16
self.patch_size = 32
tf.set_random_seed(args.random_seed)
np.random.seed(args.random_seed)
name_scope = tf.contrib.framework.get_name_scope()
# Smooth Gausian Filter
gaussian_avg = gaussian_multiple_channels(1, 1.5)
self.gaussian_avg = tf.constant(gaussian_avg, name=name_scope + '_Gaussian_avg')
# Sobel derivatives
kernel_x, kernel_y = create_derivatives_kernel()
self.kernel_filter_dx = tf.constant(kernel_x, name=name_scope + '_kernel_filter_dx')
self.kernel_filter_dy = tf.constant(kernel_y, name=name_scope + '_kernel_filter_dy')
# create_kernels
self.kernels = {}
if MSIP_sizes != []:
self.create_kernels(MSIP_sizes, name_scope)
if 8 not in MSIP_sizes:
self.create_kernels([8], name_scope)
def create_kernels(self, MSIP_sizes, name_scope):
# Grid Indexes for MSIP
for ksize in MSIP_sizes:
ones_kernel = ones_multiple_channels(ksize, 1)
indexes_kernel = grid_indexes(ksize)
upsample_filter_np = linear_upsample_weights(int(ksize / 2), 1)
self.ones_kernel = tf.constant(ones_kernel, name=name_scope +'_Ones_kernel_'+str(ksize))
self.kernels['ones_kernel_'+str(ksize)] = self.ones_kernel
self.upsample_filter_np = tf.constant(upsample_filter_np, name=name_scope+'_upsample_filter_np_'+str(ksize))
self.kernels['upsample_filter_np_'+str(ksize)] = self.upsample_filter_np
self.indexes_kernel = tf.constant(indexes_kernel, name=name_scope +'_indexes_kernel_'+str(ksize))
self.kernels['indexes_kernel_'+str(ksize)] = self.indexes_kernel
index_size = int(self.patch_size/ksize)
zeros = np.zeros((self.batch_size, index_size, index_size, 2))
zeros = tf.constant(zeros, name=name_scope +'zeros_ind_kernel_'+str(ksize), dtype=tf.float32)
self.kernels['zeros_ind_kernel_'+str(ksize)] = zeros
ones = np.ones((self.batch_size, index_size, index_size, 2))
ones = tf.constant(ones, name=name_scope +'ones_ind_kernel_'+str(ksize), dtype=tf.float32)
self.kernels['ones_ind_kernel_'+str(ksize)] = ones
def get_kernels(self):
return self.kernels
def model(self, input_data, is_training, dim, reuse=False, train_score=True, H_vector=[], apply_homography = False):
features, network = self.compute_features(input_data, dim, reuse, is_training)
features = tf.layers.batch_normalization(inputs=features, scale=True, training=is_training,
name=tf.contrib.framework.get_name_scope() + '_batch_final', reuse=reuse)
output = self.conv_block(features, 'last_layer', reuse, is_training, num_filters=1, size_kernel=self.conv_kernel_size, batchnorm=False, activation_function=False)
if apply_homography:
output = self.transform_map(output, H_vector)
network['input_data'] = input_data
network['features'] = features
network['output'] = output
return network
def compute_handcrafted_features(self, image, network, idx, name_scope):
# Sobel_conv_derivativeX
dx = tf.nn.conv2d(image, self.kernel_filter_dx, strides=[1, 1, 1, 1], padding='SAME')
dxx = tf.nn.conv2d(dx, self.kernel_filter_dx, strides=[1, 1, 1, 1], padding='SAME')
dx2 = tf.multiply(dx, dx)
# Sobel_conv_derivativeY
dy = tf.nn.conv2d(image, self.kernel_filter_dy, strides=[1, 1, 1, 1], padding='SAME')
dyy = tf.nn.conv2d(dy, self.kernel_filter_dy, strides=[1, 1, 1, 1], padding='SAME')
dy2 = tf.multiply(dy, dy)
dxy = tf.nn.conv2d(dx, self.kernel_filter_dy, strides=[1, 1, 1, 1], padding='SAME')
dxdy = tf.multiply(dx, dy)
dxxdyy = tf.multiply(dxx, dyy)
dxy2 = tf.multiply(dxy, dxy)
# Concatenate Handcrafted Features
features_t = tf.concat([dx, dx2, dxx, dy, dy2, dyy, dxdy, dxxdyy, dxy, dxy2], axis=3)
network['dx_' + str(idx + 1)] = dx
network['dx2_' + str(idx + 1)] = dx2
network['dy_' + str(idx + 1)] = dy
network['dy2_' + str(idx + 1)] = dy2
network['dxdy_' + str(idx + 1)] = dxdy
network['dxxdyy_' + str(idx + 1)] = dxxdyy
network['dxy_' + str(idx + 1)] = dxy
network['dxy2_' + str(idx + 1)] = dxy2
network['dx2dy2_' + str(idx + 1)] = dx2+dy2
return features_t, network
def local_norm_image(self, x, k_size=65, eps=1e-10):
pad = int(k_size / 2)
x_pad = tf.pad(x, [[0, 0], [pad, pad], [pad, pad], [0, 0]], 'REFLECT')
x_mean = tf.nn.avg_pool(x_pad, ksize=[1, k_size, k_size, 1], strides=[1, 1, 1, 1], padding='VALID')
x2_mean = tf.nn.avg_pool(tf.pow(x_pad, 2.0), ksize=[1, k_size, k_size, 1], strides=[1, 1, 1, 1],
padding='VALID')
x_std = (tf.sqrt(tf.abs(x2_mean - x_mean * x_mean)) + eps)
x_norm = (x - x_mean) / (1.+x_std)
return x_norm
def compute_features(self, input_data, dim, reuse, is_training):
dim_float = tf.cast(dim, tf.float32)
features = []
network = {}
for idx_level in range(self.pyramid_levels):
if idx_level == 0:
input_data_smooth = input_data
else:
input_data_smooth = tf.nn.conv2d(input_data, self.gaussian_avg, strides=[1, 1, 1, 1], padding='SAME')
input_data_resized = tf.image.resize_images(input_data_smooth, size=tf.cast(
(dim_float[1] / (self.factor_scaling ** idx_level), dim_float[2] / (self.factor_scaling ** idx_level)),
tf.int32), align_corners=True, method=0)
input_data_resized = self.local_norm_image(input_data_resized)
features_t, network = self.compute_handcrafted_features(input_data_resized, network, idx_level,
tf.contrib.framework.get_name_scope())
for idx_layer in range(self.num_blocks):
features_t = self.conv_block(features_t, str(idx_layer + 1), reuse or idx_level > 0, is_training,
num_filters=self.num_filters, size_kernel=self.conv_kernel_size)
features_t = tf.image.resize_images(features_t, size=tf.cast((dim_float[1], dim_float[2]), tf.int32),
align_corners=True, method=0)
if not idx_level:
features = features_t
else:
features = tf.concat([features, features_t], axis=3)
return features, network
def conv_block(self, features, name, reuse, is_training, num_filters, size_kernel, batchnorm=True, activation_function=True):
features = tf.layers.conv2d(inputs=features, filters=num_filters,
kernel_size=size_kernel,
strides=1, padding='SAME', use_bias=True,
kernel_initializer=tf.contrib.layers.variance_scaling_initializer(),
kernel_regularizer=tf.contrib.layers.l2_regularizer(scale=0.1),
data_format='channels_last',
name=tf.contrib.framework.get_name_scope() + '_conv_'+name, reuse=reuse)
if batchnorm:
features = tf.layers.batch_normalization(inputs=features, scale=True, training=is_training,
name=tf.contrib.framework.get_name_scope() + '_batch_'+name, reuse=reuse)
if activation_function:
features = tf.nn.relu(features)
return features
def non_maximum_supression(self, map, thresh=0.):
pooled = tf.nn.max_pool(map, ksize=[1, self.ksize, self.ksize, 1], strides=[1, 1, 1, 1], padding='SAME')
mask_scores = tf.where(tf.equal(map, pooled), tf.ones_like(map), tf.zeros_like(map))
mask_th = tf.where(tf.math.greater(map, thresh * tf.ones_like(map)), tf.ones_like(map), tf.zeros_like(map))
scores_nms = tf.multiply(mask_scores, mask_th)
scores_nms = tf.multiply(map, scores_nms)
return scores_nms
================================================
FILE: keyNet/pretrained_nets/KeyNet_default/checkpoint
================================================
model_checkpoint_path: "model--1651"
all_model_checkpoint_paths: "model--635"
all_model_checkpoint_paths: "model--1143"
all_model_checkpoint_paths: "model--1651"
================================================
FILE: test_im/image.txt
================================================
test_im/test_im.ppm
================================================
FILE: test_im/test_im.ppm
================================================
P6
800 640
255
ϻŸѽjf>dC(Q8'<-$."8('5!#5""7&8(7%9&(3!#3!4'5&6% :%(:");"&89 ;'@+9"+:"p$0/@3B/=2EYt̻ͽpy`lV{mRta@^Y:TX;SX=UR9SU=[uaҼʲεͳϳˮɫͮίѲʬϲдδԽԽѶϰϫկֱӱնӱԱѮͫѯձƠgjeOZIEfLWnPaest~zwwnv`gCP5V.G4F1C-@(4;?.>&=,?,<-9.:+8*<,I,G1C6=15/>'C5B/5/?.E+C0@07172C4C:G<J:M6P0O1O3M9L>J9?BEDFDIIQGOOWWaKSNQ{wǭîc^[]dkut۰аӴȩ}кЧͣȽfXBJ:J]f֬ĠmoIZMY˳Ūն|BA@KOYw|իѳٳܴظҶҵԴǫʪѭodsILAL7F1B0C.C*B.D4G:H6D4C9J7K5J3I4L2A4A/G3O3F=E@D4<1B<L8D7C8G8L*A5G7D,:3C1A5E.<=K8G6P5V9W<R2G5OJbM[w16P',=*2<2=3)09-3:*5R=JjO^qTc\?NH,;<"19!.5!+0#),&(&$#/+)1)'1%'7&13$-/$'D:90%%0"&1!(7($;-*1!'<*0B1/?-(>,)=--6,+0!$;%-I*6S+9X*7a/:f6:h7;e18b0:T*1M./A.)<0*<1.:0.8/.D;<B5:P>Guakuj\eTEMNAGC:?C:?H>AJBBC=8B?6;:/9;0A?5;2+?2.C33</03*.6,0@35:-/;,,A*+L(*],1h*10<.;0?4E/C0F,E;L;F7K*?-;09:C/?5H5B7@7D6J5K;K<J6I3F@P7J5L5Cm)'A%)5"&3')("%)'+OMR9<0&* &'/%!-3('3./'#)'*'#("" '& ! )'$&#%""$% %(!#"& (&-,&1,*,%*%&-& )"%! #!;28ŰϹʲǪɦÛŘÖS@J-'..+28-5`V^ȧàŦz19)!& ##$" $#))$EFAbfbx~}}zLXI&# "$'*.*)-0,0($&-+,*+*"*''10*13)-3*,.,,(33&?B4Zcbbjpfmrgqofvn^qnZoudtyftwhuwlwykwwnxypz{oywmxsl|wl|k}m}tx~y~}y~u~wto|rx}u|;?J !#!!$" " $%& !"#!!"#& %(%)%$+#$$#!#&###!*#%! )%#-#%$*"""'% .+#A91`YXb_dWYaV^dSbbTe_`eedijkmg|rauXsStXo[omfZ_f;@P%$2##%$"%$*!#'*&$(% )%$"%"# %!&!!""%#ïкujPE0a6'N1*>'(=&+9$*8#)6#%;)"7&6% 6#%3 #5""211 <&+0/05#F.,C#'=%)D"(q 1*B/E4D=O^t̽˺Ƴλu˾įɲ̱ѵѵαˮ˭ʭͮѱ̬ήָշҴѳӴֶճԱղ˩ЮհѩԪҨ̦{|`qbWfQPcFPcBSfFWfr}yp|`jGR<R1@,8$6"<)D$9,:/;2;<E6>2;/9*=*N*L-D8@690:,A5A6?4E.E+C(</=3@1C1B7G;K8L8O6P8X6U8S;P;K>M>M>KBN9H?R;OCQnnǯҶ϶VUOXKYYa䫡η˲ƴhPZGȌrЮ۷Ŧ\O;A;GCG\Yngsk\_P[ij座аίɫ{gWBFAPO[gq˳Դұqδδ۸^H@v6?@S8L3F2I5O1L*D.H.E2?8E8F8I2D7K4I1A/A-J-K0F4D2A2E3H<O:L:K3F3J+G2M2K3L3L5N,C3H6K<S<S:M:D;AHUGa:OZ$,N,-;*-0)32,;:/<A1:hT_x{{cplUaE/=='57#/6$/1#-1(01-34/4:,07+-6-,7/,;207,+8*+6*33(53)22)'7.$3("1#,3%40#-4&13#/7%1?,5:%*:$&='#@(&@%'A%+;$*6%(:0/@47:(3@0:SENaTZ|mqzzyjmfRZP?DJ=?H?=>96>96943878633@65E87<.07-181::47;2-90,90,?30D00D),H',O-,Q*)[+,j.1x.159>C;BDP9M1F7CBH?F9F<J4<9>3<9H8H@J>C9C3B:N>V8Nv&2D""5)'/&&/&*'%JAI}åW\P&#)!&(#,%!- /&$/++(%)/'0.'. ,+)"!))(()($%%!!((%,*&*$)(!-'#,&%&" ( )# %# )%#vkkȲȮ¢ƗƒƏËdq=/97170/2=><˪ȩQXH&+"&)$ ! $"')%." %'&']^\txt~{\eU.5+" !))-&-/%),"#'%"%(#&1*,6/1053/97(12$,.+10*0*7<1QXLYcbakqaipbnnctmavp]tt`ruetwhvwguvhuumyynyylvuqywt{rzpyt|z|x}}vy{xu{V\_$$0 +%,%!& !$$$+%$'"!#!$$#+$&)!##""# #%#.)+(&. (")"&"$" #'#=:)`^SZ[W\^`]_dadjcekedfadfjlkvobrWwS{VvYlob\eh=FV!#4!(!"&''*),%&*%(+$!'&#'#"#"!$&#(""$ %!'!*# ȻιŒ{paRK<5e5-U:5E+/A&-7#(/ $2 #8&!5$4# 1#.!5#%1#3%-8#(17"$7""4"<("9;!J#)}&7*D)@,=7IYjǺŮͳ̮вгαϲ˰̲̱ϴϳѴΰҳаմүԳвϱҲӱհҭկ֭Ԫt~s`teYlYYnV\lMYiGVmMYhpz}|ujtMV;L6A/81B&B&F&B(6)0+4-80>1A0A*A'I.N/F9B8:2:5I5C2>3D7L1J-G.E.C-@7J;O7M0F6K9L?V9O6H6D:E7C>K<L9M3K2N5PUc䣠ʵ̵δtSU?O<NKV⟘Ϻӻɰq]NF}pѷв¨g]BK>N8CBH?DEK8FAO|zůѱѱ{UKAI>Kahӌ}ĬѲմzi[J̸˳βַo^Od8::J6K6M7Q5S2N0F):.@1C4?8B:F8F7H;N6I8H2D5N4N3G1B.C1L5H<M6F2B/@1F-D+D8R7O-E2F2D8G3A1C6M:OJVY\SWr4?J%-<))6-(2/.309:2AM=Js`h~o`iY@MN.?F'8;'63+65178-.4)+6.06028111(&1%#2'16+<8-;6,13*%-$/"(+)0&)2*.5.4-&.+&+&$%,*)0*$/'"6+,6*/;-6;.8E8AM=GYFQo_huvc]XRNIC>=C8@B4A;6@=8>@67B735+(4/010655875/41+61,81,:1-:.+?1.?5-;1*9.,=,.F+/J%'U&'^%'{6>4D4G2?5?6D5I?J>B7:2<0B2D9C29:C;H>S7Mn%4H'.8&)0+&4-,3)/5(2{nz|/5-*,(%'& #!#).&$21.'''.)./,1" %!" &+)-0--!$!!'%,)&&&!'"""&#"("%$'&!)#1(&)%"))&NJIǯ̷ͯØÅŃÀ{cpA3:-*0<5<=5;_baäʫȯ{q'*# !!"!((%,*+"#%#$TTP{}w}{pxgEH=('"(#$( $-%*+%*)&+(&++*.,,.'%'3-/5.0,,,+0/&+*""&+( % 593\c]`llfpxdnycptbtnayn]xq^srctvgwxfuvhwwkyyn{{n{yiusn{tvysuvyx~xz|}w|z~z~jrv)+.#!$&'#)+,0&',"%,"'$'$-.!('&#%/%*+"'$#$ &&###!#)%) # '( &##*#%+%DC3^aS]eY\g^_he^bcffjiedbbedgmnkjn`qXrSrSno[Z`b<AU#!9* %%#),''&%& !%$ '&%##$# * $)'!'' '##ǻǷbePSR@@3.d2+O61@*'?(&5(&5++8&*<)'>,'5""5!&7$+<*.3#(2"%5%$7%#15!7&5$4!B&(?$'P$+~$4.D*?,?9MSes˽ʳӵаϮаͱ϶Ͷ͵ѹӻԻϴ̱ҶαԷӰӲԵֹոӲԱղԱկʤ{erhziepa]lYYjPTlMTqSZjnxdlIS;P9J8E4D0G)@/D%4+8,;,=-@)?)@6O-H,E2E;H/99F@S8D4@/@/D2I-F,F1I1E8L6L>S0D>P:K3P3P8N8J<Q3K:R;R0F2K5OAVĹи̶йh\GM6F7H=G҃}ηӵӶ{PMplůٺc\>M7N7I=L5F:M0G=N{yƳдӵtG?BGW\ٖ侴ȱç̨ϫxp]İϵӶֶtkDAw1=5H.ED^RmPnF`;J18047=.7-8.;3C1E.C3J5C2<5I4J3D3A:M<T9F5A/<3A/>3B:J;J<L2D4D0==F:@:B6E6OCaBY8CX+,A+):,*7-)2*%-&(>3=iZgzufxrVeSDNF;B>.4=072*2/*0+&+9026++2'+=0=1$51#2/#(6+*5)(/&#)&-+&($#-(-+'.$!''%).((5+,8+1?/9Q?KiUbu_kwtopbT`TASG=HC:>@53;1*3+$2.+++.016123211=:8=72=4-<2*H<4F0-B00;025,0:.5<,3?,1A+0G.5I+5U0:_06l36t15~3:/616/5+7,?.B9E7E<K<G)7p"/G%*7+,4#'.((@3;l^jũFK?##!%#% &! ,"",$$+))#!!)%''),!%*'"("%*&'%!(#!%#+# "'#!)#$&!(&#&'$$("*'#&%$vosҺĠÀxvq|aj]@E/*,,14407UMVŦ35,%%%%$%#$",-'%%#";:/vui{sznKSG).'&%$"+"&3'*8-.*!".&)-),,+-.12,//2222010./,**))'%&")+&*,*QTR`gebqqbqwcr|arwaupaynazpbvtfwyjz{kyzn{|n{|o||o~}j}zh{srxvzswrzs}~yz|{~|BIO"$#*&)("!#"''"'*'+0"'+!%&%)'"%$#!&*#)( &*(+!%&"#!!#$!$ #!$!%"#' JI@SXLWcURbTTdYZf`_gdfe``cd[aigioxmkj^kWoUnhRZ[]BAY+"@+5*.+%-,$%*&'!%'#,& ) "" ($)&"#"!"#̾ķĶ˻͵ØttS\KLMES::20x15W11<$5#2&".$%6&'7%':$(5$6&2%6$+1"!0"1$6(6&6#"3$5#:""6&%;'(Z,20<4C/>0D2K@WZgy{Ķű̬˫Ь˧ͲнѼѴΫ̨ԱԱӳԴҲҴҰԲֶնӶѵӸձӫϪɩygvingihy`auW\tU[z\`on}imP]=Q4J.D*<(4.8-9#6%<+@-A3E/A+>0D0E4E2D0H0L2G4=01-2&2'5(2,309+8,:2?.93=-65>/;-F/B7:794@1E7H7E8J/G<T`oɺεƨoaST?M5F4A>GvvɰկղMKhfܴĤc[<OD\@R1B5N1O<VFQކ}ɲӵټzyTFpgϣòδħȨ֯Ԯq{jƱϳҵÝslXX/?6PAS<S^s厘ۄ]jHVp6B~'18?3:-6/=-@1H+F/L7C172F-I&>,><M8J0?6H.C/F/D4F9F;E9G2F2I6K?P6F1DAUH`:Oa/:H-.4##8(-9/.7.+=/6N<Js`rv|ix~dr^NXPEMD8?B29I5>9*4@4@=1;8+52&/2&.*'2%-9,2;*45!/9"2A*:D0=R@J`OZp\j}hwt{whmgX[OAAA32?01>/36'/;1:419/,241330.=94>:2=5/A&,C.1>41C:4D638)*<259/6?3;5+.9/2C5>C0<C-2A(%H+*?$$G')S#(b%|.3BB:@1;t.-[-(F,+<.21)*0)'/)22*3^T]ug,,$$$" (%%(%$+'&.'*&$+!)*#'*&&*'+'#**%.'!*# &"$&&"!*$"! '%(+#*($0+7#1.()%&! '$#PHLDZϺȴěăyjykv_flCC=+.5.7.3::EFv~~\aP,)*)&"*)")*&"$%/11ba\zuUcS\qW{zyQ]P)7$$*+)*$!( !%"!)$+%%-(+$*"&.'&.+'.-*--211832643//-*,(+/()1(2;3Xa\bolast^ru`uzYos\or`pogtqhxwfvwhuwkvynw|py~my}kz}k~}cuul}~rm|zs|x}|}}~Yeb*43"((#1!)* ((#,*)2#&-%( &%"'" $# )()%-+$,$% #!$&!%%%!#!#&! *($LLG\]Wad]]`\\aaZ_ebgo_gd_je_mjdolhkevmcm_mZ~lZ]\[BIX*+A' 3"$''&2)'0)(&##$$&'(,!"%#$%((+'$)!$$$'""%! "!"[QjdȐѳƼƺúüƲqrSZ@G6@99@8J6C/9{'0[&+A!%9$'4#', ,!1"#7"(3#7&?$-?',6%4#6"%=&/>&1=(.<+(B.,F-3?+/I-1e06>D:=::1>/G9S?ST[mnļŷĴ·ı˯ջϮα˳ϳծիԬҪԮկΫԱղԲԴԵӶӷؽضӮ̪βyktmpnloglbi}_f{]cwymrKS;F4B0?0=0:3;2<*=(?.F/G+D,F.I+D0?7B0=/D5O2G6A(.$-'4.8--.)&!.+0/-*51-(+&)$.+.1039<@E=G6D1C/B3E4FBP~ŰǭǶrXMCH9H3F:KBKjcɬկԱ˯ZYbeǎzV[:R7Q6L&:0H:V>VDL~s˰ٷڸ¡ιʰ̱ӴǧήӮmxԶЯƿeYM]5M:TX[~y차;㾳vtqEPy5J0G6I9@6>5A+:/A0E1G<K7D1I0L0I1E0H*F/F/I,H'C)C,A+<6F<N:O4L7Q5O?XG]KWr0?U)6D-14)'8..:.7;-4A26`QYw}s}||rwsgnrcls`lu`ou]o~ctl|i|jfytt}|~uv|fkfLUR5CI5>2+,2+.6.25-172254034-;**G98B:89-/>+3E1<A3>>29A33=3/<53:3:5,89/73'(3*&5/,/*)1"&5!B#)?#%C$%D !=!;'8)(6*/5*+0%%2(4YO\<C.*$#, , &"%! ("#*"%'"%&$&--///-)%$$")!)&(!&##)$&)#%$!!!%$$&"$*#&+"&-#''##!!5-1vǩͮŦ{j|^oXeEMK533.01.7:<EmqtçƩws%,%%'!&&,*)%",(4%#1PPT~IIC &(%5-CWDYhWyv}vYaT170!%(&)!"$$ # "!!$)((3)',-+0)).--1/-.72/81*62./.*))$')!(-#DJCahaipp`hpepxjv~`nu_kseowjrzgx{dxz`qsdsvivxjxyhxxk}|nj|~oussx|}nyq)1,#$(&#*!&%*.%+'!&'%+$'+%*- (&'!%''$!*$ +&!)"($ ($ %+"'* &' &&"'"!# ()UODa]Teb^babZ[aX[e\_jchbbkbZf^\faacbkab{fhc^cVdYOISR-?I)6!$''".("-*(%$&!%$'*01088320.($& !! ! #!##'57DFQPncǍ{Ѫ·ŵ̯×{gm^WO;771y0*h%$r'.0?3?-0e(*E%*7(/0#%4%"3"2 6';.A&2C*2C,/8%%7"%;!*@".G+6>(.8();+-='/>&0L%1j%5,<4?8?2B-F-I4HCMINV]lq~ɵɷжʮέѮҳѶгխӪҭϪӰԲЯӲѱճӲյԵϲӷԹյճѳĬxxu{|zzwxqsprop{y¥¡xvKM7@,:.>/>/=,:/@,:+9+<,@&=&?-F/H0H4H6J4O5R9P2='.*31=7?@?A;1-525284@:821*,&)"/-$,/297;AG<L8N9HFQPSlcřįϿbS=949<GAR9P:OT[יֵ¦ƤX[YdhrlmbbIW<U>U@R:L4J6P5L9BcZᆭвշаԱӰүέԴɽϯԲؽ}̷Ѱ̳ɾpZfits˘ὡзγɯtj@D6H<O9G<H8F0?,>3G5K0F,<.=7L2H0C2B)@1O6N8P2L/G2G0?3=6E<N;J5EDVM_IX:DR.6@'17)1-%&1*(9+0<&8eN^s~txek]KHK69A*4=(4:-57557:4<;37:2684924G7=E5:A9:@;::32840541-,12/9/+0820;/0<36/*.5+33 )0#1$$*"%,$'(." 7(-6'/3(*0&#G;Aw~ZfM&&/"(0,%#&%$,())$$% )%"(%%(*,$),$%%,''*"'+)((''%%##&&#&'"'("&' ,+"'") "^PYΩҦŐmz^oVj?LQ)-.&#-,/95Afbmª7<.*(%30+/.%()%+.4KMS}ztt/(,"#$!$>G;bm]|wq~sQ]O%/#!&$##+)&-,)(-((*#++&3*'2""&"%%(,1+,6.)/1-30/5,,1//1/.,-+&%%$&)*-1.'-'4<5Zd__jg`jkfjqgmscnphvv`mphrzir~ds|ev}iw|juzoy|myyixunzlzoquvsz|z|IOO!$& #!& '"#*(((%&%$&'"'(#(*"$%""!'$&(",'$1!!0#%4( '##%&%$)()'&*"!&!%#%' $!$$TND_YTb__abf]cg^fjXbdXa\\haPaXWhb]gdecfobjw`cf_u]RZPM38E!6!!** (%!&'(!)*(&')()"&"& %%& #%!*'*.>5C4?=AECZTuoēϮڿп̺ƴ˾÷Ļ̿˽Ÿƥwsb`TUHK<1*))g*-N*.B$+J#.o)61;-2i""Q*,B(,<)&1 47!&8$,>+38%*=*):&!8$(<%/@'/@%,C)1C-4=*1:%-D(1J*5a,>1I&C*D.A0B0F1E5G:G6A6ECO^`jjqpzx|z¼ĽîſƬϰֵѯϮϯѰزְӱҰԲԳӳֶҲӳӲַӶϴҸ̲ͯѱԹDz~|{{¨pnJP5F1D5F):0@0C/E(<.?0@-=-?-B-D1L/L0K5O5P4P3K5G,5+20569621)1'1(52,*y%#w" +*5430-(/04>/:2>6E9M=SJWYYRHp`~npeYF>66+-).3<4C;P>RU\xsy{oNR<IGQRXNTAN9I@N8D'9/F1C?OLWEGЇyɭзԺвάʩʬ̴|çӳ̴{qysʻٻеճɪʵ˳ɧǣ̾γp`FG=K4B,5/?-B'@)@-B+A-G(?2E+?4F6F6H0H*H3M7M+?5G4E,<5C:J5F&3?HKPBFV-/?+-;-66(37)37)1@/8P;Jy_w}}zyucgdPXM9D@0;=09?=@3864:6483>=<>:;;79926:12830753/01779-*+0++1)03,2$$,$,1'10).++(,%+/&)2&+/ (-()#D:<~wxyr.4)&#-*0,( !+$)*"(%$'"'!#&&+&(,+(0*$.%!('#(,(,,'.(!*#&#%&#%#!!''")*$!"+$"(4**zȣΤѡŅk|YkL`:Lj-7:()))%013USZ`fY%",)$))(,**'&$),(&/+`fcvpy) +$"( $( $*&'%%"+0(FOEepc}|}iqd<H:$."&#*#),."!**%1$'1+4DAMA>M1/>,,5+.0$(('*+/-2/.2./1*,,/10133**+')+),/&,-)(:GEYihVhearpjxvetr^nkgxvarqevvhwyjwykwyny{lwznz|kxziwyl{{kzyp~p~tzx~{|~~}^gf%', $ *%))%*! %"#( ## '&'&!'''(("+#&"#%%+$(1 *)#!'$&)$%%++#%&&#%*()%&'('#LG@[YW\]c\cj`knZfdTa[P]ZUebVhbWibZgadifa^^jb_xh]m]qXM?37! ,$)((&()%&*("((#('"!%%!&"('!)'*!$!% &&);/?3B/>7E<GKQSLi[|p̕ҫָƿý¾οʾǾż»Ĺ¾ʶζ~fiQ`PLM>P:K??45q$(R"(B&*9%$?*&j44285;98b((O!$G*'3":%'<*,1#$6((7&$7!;!="':!*<#,<&-:&,<)0?-4J3:K)1\*7{+>-E-E2C0:1?.>2B0@/=8F:H:J5F<JNVLQNTvOWpQZ{ipx}̬ЯҰѰԲϬϫЬѮЭӰЭЭүصԱӱϮѱҴϱ״׳ίŭïįlrFU7F.@.D/G+A(:-<*=(;2D3E9J3H4L8R3K3K2J1I1I2I4L+A,?4E8F.9/41,.%x*%1.//,./1-/11,#( ;>7F6P/I5H=K3>-,5(B0Q>ZH_NF:z4+92923.66:A2A9L8DUUh]aRE;<:;9:<:>>C;@9?6?2D0E1;:C=N:MU[̃x˰طݶҫԿ|tc}lɮʩƺsfoNIѵЯѲػд̰ίʨίziB?3@-@1?4<-?)C-I4J6C1=9K/G0G4I7J/C-E.H)H3P>U9K2E:O3K8Q=U:O:FHLr52O1,5,*,+,60:3)3;-7C1<lWev}{}~wsmr~nycM\UGT?5==8;43265775;1,9?6B8.3?8;9673205303/-2,+8.6;/56+/6)4,!10*7(')&$#*$%1*--$(/$*-"(:.6ujrĩ?L<)+%-#''!&"* &*%(-'.(!,' -)"/(!+1*2*#()$/'&1"$)" &!&*)#"'!&"$-',-'*+&( ' '%(&%'YMLƨǠ˞p}[mLa>Rs&3E#(0,,./0>?B¢|r784( #+(+./0%$%%"/)-QRSvv}& ,,#+* ""% (&'()(&*&2:0OXKr~i}u~~yzo|n\g[CMA18- % #(&&&%&$$&"#')(.F@I_Vb~|oox@@M-,9//6,-/.--,&.-'.-*---,(*(043055.3-*0'& !+)P\[[ji]mkZkgdwqewtarq_pp_po`qofwrgyufxthzvfwriyudsojzujyvl{yozqzryz~~n{4A?%'(%#$'"")"#)"%% &(#+'")#"' "&&&!%&***.').%()$&%#$#$'!#( "(' )# ,'(-/$$$&(#"!#!(*$NGAYUU[\c^bmbhobhi[c^Ta`UeeYihWgcXd_Za\`b^iibqh\hUx^IWJ;11-#"'&($$$!!%$$$$%"#%"")&%-(*,$)%!"!.=+8,8.;.<4A=FCG@@BCKKfdqjvȖ̩Ҷɹθǥzoy`p^bSWILBCC8G3P3P9D9Av&,X),>&%9+(=/*Y+*066=/2v'*a#+M');)%D10<*(9'&1 7##:"$9";"*A)1B+3@,19&*5$&:*+B+.Y,7u(9+@+?+:&-*,'5#6);*:/<7C1;2A.E,A0B8GCODM<ERYwINwJLWVa]khkh{wÿºȳ̨˧ϬѮӳϯѲմճճӱճԱְӮӭҬկհҬѧѨ˨ǪƪĜƕȕlrBP6>3@/D.H*C*<.;-:*8+<.@2F2F,A2G1G2G-C7L3I1F4K4O9T3I+>/?9D5:43~/),)..+-+-,,{,+.%)%8B4K2P3N-B0?-31-7,9-<3=6A:93}1'9+?,7'1,8B1C5C5<<9C7C4B2|<,~5-.*0-0-618501)03=CG;A:J4L3FENi\oƗ|pubQJNTwv嵟˰cZzDFkgްҹոΰӳƪǬkbRJF<F0D*?5B>F5G,E+D2D9B9?7C3F0D9L6G2C*>*A%@-I0G6H5H7K4JG]H^7Hp,2Y20D.*;-*-*()+,4.77-6aU]tzxwtƦxǤwġxġzyxƠxĝoh`]Ydkuugp`QXE4<D5>F;E?8D;9B+*01/5504@8:80/7.,<217*1;03?562'/),:/;3*.:003)+/%'1'+3*/6,2aW]ƬW`V*0++,&-(!*!*!.&-#'*(.,(0+'0/(3,#,0&--#'$!'#")%$)##$ !!!)))-,-.&+,$('"&"$"(*',%)>48wxĝÌq}]iL\FX4@A!.'''(*/04styå~PTG01/,&*)'*$')$$&1)/B:?~|vv60=)),#%&!&" '$$('&%&"+.&&, #4!3F4J]Mewjv~{p|{}~zv|qsyqkqkae`gjd`dZaeVVZGIO=>D519*(2&(.''*%/,--&+,%/*%/-+377>^]c||QR[42?.,60-47150%-1'.4-3304./0).,$+'$*%+&096U^_\hjanodrqjurepobnodqrfuueusizui}ye{vg|wgzthzrhxokzpn~tj{sluowryz~{S\\ $$)!(!%& '& '"#"$#&&&((%&($%("%*#''#'%$'$%(! # "(!)!$%%')'&( !'!#!"%*&DC:TSP^\a_^fdemilqafiYdjVdiUehP_aUccWaa]dcV\]jfc}eX}\IdO?<62&#++&4#%& $)'#"&'')$%'!'-'#)$"'#%"# +:/;*4+6&3(4-73?.=1A/@4A?DJGUKfYzlzŚˬ̲йzshqXbMVFRIKG=D8C=F<D3@4F3K-D+95>05[')B&)6%)4&+M'3)<,?0<4A,;\$,D)'<*&<+%=,&>/)9,*8,/;07</6B4:=.3:),6$$A--@+*E"&m'6"6%:):,9*7&6)B)D'?*?->2@3?2@->*<'62>7>?@KIDJ<DAH=D7@@K~ANPR}XQciw±ļɴθӵϱήѰԱӯֱճӲԴմѱӲطԴήЯ˨հ֯ٲذկѰŪţʮɹȶɢƗnmFQ3B/:6>7>2>.B0K%?'A,F(E1J0D->3B3F5H.A3F6I8K4H+K/N1C-61649+2-,~/#/%-%-','*$v'2$0)4?2H2J0C0A-;)1))|,%4*2*/'816.7+:*7"2!,',6.<6<440-2187;7~=594304/90;05*+#*)22)$2-14095=6;7D=HHMOPKMAG?KT\xqv~tSZ<NBRfi۽һϸáƒ|n`GC7<3<4@7C6B2>5D3E1H4P9O8G>L5I7M7I7H:H2B+=5J8Q0L1H?Q8J>OCSKY5@P"';'%6-*B/2E+31#(-(,ZESgtxoĠjdahjoĨ~}|lb[SxL}Rbkzu~guaP]E<E;8>7696155,1;065,14.27368.;707736,).-)0;486-'7,*1$-'$0%.A6Ad[f|p{o4;4&(#/-,-()+%()"$/**.))5-33+3-&.'!)+&,*%)+&&*((% $*")%#*)'*,%2+%0)&* $) +! ,"!* !* $%!(jZaʠǣÕ|_kQY@N*9O$)&$%/-/,2ZX\\bL.1'/01&(% (""-&'.(:75njhieq0$5,**$,&)(#')$+-(.&"%$"!%$ %)(% #/%#1%+9-4>68>;8A<<H?0<33=46>55;3<@87:288.66.//)/0++-&%',-!)(.-$(,"%+# %#" .$%5(-5/3IGJdfh{ggn99D22=35=4494-10)//)01,32.3+**%$!-('2,/+)-JLRdms^mogzycuspvunstkrtirueqretriytlzm{eytezucwth|yj|nnpqsww~||v{z<=A!!#(,""("#"%!'"$)(-'')#$%%((%)('#*#(%"%""$$#$# !% #*&-&+*((&!!'#""%(#:D6MSKVYWWX\[^eahoZckWalXdoO^gN_eScgTaeWaeVbi_agoVN]M~YLW??:(84 9(!'#!"&!") !-## "&&$!-F.F*>-@+?+B+D1D.=)</E/D0B4?8@=DDKUYXYd`ldsg}prs~ǤǤšɬȮ̵ɯγҼ˼ǶȽǽĺǾʼʶȱӹҴɨwjvbhW`SZPUNTQNQFP<L<O3F3D4D6E2B<N;Q9O:J6D5=}+*\+)L%+U%7n)<*D1R1I3:343=a)6G(*E*'@' @+#B1-:+.3'.5**@--:!%1"#0*&7)#Q)(p&,.9.8-518.6*6,=/F,A0?3>4?2@,?2B2;4>,7.9$./6065>9G3D5D7@9B<Ky;G{EGxFBwK@vSCz_Rmkİį¬žIJεϲԲϰȪͩիЦկճӱӱֵӱִӳϰϲѲͫԲԱάӴԱֱӱʭƸòĭů͵;ŷ˫ˡÖnm<K6Q9P2C6D7E0C.E)C(B*C%<,C1G3I-F+J2M-B,=,>(>1K4Q1L4I0>-3,-*'0,,'~1$}/",()*}# |/!3!+&(1/A5L1G.6(%0+*&/(+)-*/(.(.(0(,$)#(&(,092;26302)6.102725++.+7/5)8..(-(2%~/}.+ )',1170<;F7B3C7M4K-D<ESM_Y\]LW?W>]?^KLaZi_`XOJOP<B1:2?0A/B6I9L2D5F9E4D7K;O2F4G3F8K7I7H8H4F8M=V9W0L6P3K<SEX=En3-B&3%"1))6.29+4D.;K4Ap[g{tlpplnn lÜkɠqǥsƭ{vuw~oktq`jdOWLSWFXU=\P2dR0cU2bY;b`K]bXcliiuukwzy}vyqckXAQ@->=1@83=349.22+0,7:688<63>-)3/'+6,*9-(1$(3%33"1C2Aqcqp/4/'*,""$'$),',(!&)#$-)('$"'"%/*,.'))!#+$&-&*1+0,-, #!**((%%&!&,$/,#3( /&"',$)'!/"'-!%) #(!$C98Ƙ͞k{QcAO9D_%-5$,,/&,.@DFkpY63*.+***)(&)$$*%,'"*MKN}s|/"+,%%$""'%$((&(,(++%*'&-"..#2%"%(,#*+",&%+$' +(*'$&((&)*&''$(%&-%+,$*$ )#%&# '+ (.$&)%%$"&%%&)"&, !&%#2+3QFSldl|yxxQLXD=N92=70292./+0+(4-,6))+,+)0,+6,23*2/,0Y\\^hebpldspcqphtugrvequdptfstivsm|uovo{jy{kw|oz{p~zn~rzyztsqzuyvz{ZX`.$8!*" ) "&#+)+&("!(''!!!##$ $()!#'$+$'&#"##++'&%#%!"'!#$% ' +"%'#$ "#$**497NQJ]]Wdbc__f_di[efYkk[jnV_i^bo]_lZ`g[dg[dkU[gWW]k][z\WqLJS5:<)3%""$!#% %'!!# $ "& #!" $(#(%#1H/D3E*;*<)>*A.B.?*>-C*A*?0@2@2F/C9L9H=H;?BC@=LJKMQVWY^Y`SdPiSiPmSnTnUv[b_fmpposʚw~lvh|is[iQiXg^_WWNVKPFICGEFFHGBC?D=FFS=N9L8F7@5>4=5B2C0E4L7L.@2?164:+:*B2G/E/J4E9<251A)=u(1] (L )C'/>*18$)?),>3-9*$9 8! ?*(M**i*2.7/5%)#'!&")#.%2&2%/-2,.043<0>*9,80<1;3=(2-8+7)8(< 7,<289<;B9A?D|?Cu;?h59h<@fBDhIKlTU{hh{y¯Ϲ˳ˮЭִ֮ԶӱԪЦ׳ϭѮүЭͫѯаϯԳشձӰճϰѯӰӲΰɾʻ̹DzǰζͿ¶æˣonCQ8U0L1I/D5I.D+C0A/=+;);)?)D.N(I'D,D3G6G.?,A-F.G*A2E2>.2'$~)"8.1%~4!{31)%(*.640+./)3&81G/@89),%~(+.%/*3+4&}.),$& *$40)*1;:J3<453/;7}43355612104-/$3'5-3,2(-y+/#*%//36093;2<1B0F-B4B<?72MITRHL>HK\EZ9G=HAJ<D29;C3=3=0>1D+B1I/G.C1B9?:D8G<N3I9O8N3F6E3?:F9F9I8L7L5F5J3K6K0<Y&+H')9&(5).5)09,6B3>dS]owyqg\\]ƠeȣlţmånoqçvĪ{qrYj[G]I8P;+?.'2) -$'3,&/(+.&2-$/%/&5.!42)**&01177:85::?@CQOMXYouzvy}ozjWfK8H7)66/70.21/,-*/51>41:0....'45.4.05)2>/:hWcJM;)*$%$("%,&-.$-/$,2'/&$-)-*).)(*''$$$'' +(#/,(.0.&+(!&##%$""%'$,'"/*"0+!(+ ')&,#)& %""+(,wdiőȎÊv\kCV0Dm!.A$+!!&%+2//8xu} zj;=/+%$.*+()%&&!&"&-(3A=Fyz{y}1)-+#&)#'($'-)+*&'-*+'#'/+1&!*(!*$$"%)#"-$+'*&$-&&(&$#)*&&'"(($&#")#&2*0&'' &!"' -(#1!"1 ), -.$5*!0$ %#$ %?D@lnsomwTN^JCO=8:9650.531>/.7//.11+1.+905-%,656^fbcslbwr_vrdvxgpxgpyhs{erviywlvjqjqoxo~}n{{tyrttzy}w{rqt}|zlus9<B$".(!!&##*$'-#*+*) )+$,+&,!"$& $%%+*!" !#!"&&#&'!%#'! & ! $ ( #-$'( #& ",),686QTJYZO^_\cdibfkemmdop`gl`bm[[hX[dXabXg`WgcVbh[ajc]bp[YvSOeBAT7<8/,&"$$'$!!$$"$($#'"&!!0E2E4E-<-=+>*?*>/B,C,E)C(?/B0B/E2H6K4G9H5A5=4;6C4I0J/H8G@BA>6BELGFHCFBFEMKJFPIOERHMHQOUP_VZORTGRFQCJ=D;F?J@IAFDF@B;=;@8@4=8B:E7D5E=O=O0A.=3@2>7C5E)=0D9J4C6B3?2@/C-G0I6J:H/8/8)6(;(;+8&4k&5Z*8O*7I'0K)/G*(D%"J'%M%$U&(^&-f#/k(1a*'Y$#T!"T%(S'*S*.N'*H!$L#%Q#%\)*b(*l,1m'/s#-z&!'02112428-8/?'?%?6I5>063=~.9r*3k&2l-;p6Eg2=c28`23g8@c9>hFEY@8`LBsc\sc_upk}{νĿ©˭ұհСϩҴӵԯӬղճղԱղѯ˩˫ήԳӯӭխӬֲִӳնϱϳ˰ɳdzϾġʳθʿˮȣplCM:P3H2F-?8K1G1I8H4?2?/@+A)E-O,K-?->/>+;0A+>.B+@,>5@14)%(!-&}.%}/ 3|24$-+"*&20>4?3<+=;P4>4-+0(z,x)|,!/)3)7':(5,66%&'"|-&~*&,54E?G(,,2y.4x21y0%x1'4.-)0+}.%~."."3,0/*(&!*",#5,2.+-692:-<1D3?-.'!6.C<<5;6=;45=A5?2?(5.<3A7D7B:E5D,@%<-F,D1F1B>F8D2B9L6K1F,@6H3B'37A9C6A8F9J5D<N<Lu'1H%(;-/4%,1!+6%2>,:^JZq{`HCHQňaƓjǟnŢlâmãqtyb{pIb]1FA%6/&0)"( $'*' /&!)!2)#.#!1#$4"(1&3$)1&),#$#.&&3))0#%"$+%,/2;>CY^bv{kXdK8G7&59.22(15-9427/2-(/%.700/2D9B}pyLOF23,0/(++)$"%,',(&,")* )2*22,3+(0&%)&)'!&&)"&%/)&++* $% $'*,2'(0 "* (+%.2!+)&&#)"$(')-#JDJÖȑƆx_rMa;Mq -K%-"'$*.5=]]eEQ=+,',%*)$()*&&) #$$1/6fei>>?*&,0+--(%($"$ &#$" !*'((&&+*,%)#$(,('2'2%*"%+""$(*% !&#$)('&%%(&,#%# #*$7(%6&/* 2/#81$6)"-$$'$)%AIDt{xddqGHRGIL@FD25:/.7659653.-**(*+#*' &688^jf`rn\tqYsrauxox~irzkv}ky|hxxh{ujtk}tk{xl{|j{vnrxwr~tsxuzrzuztw}Yde&.1#),)--%!"!"'#)*"((&&)&!*"&%#)#$'#&'$((!"" $"!!#$ ((#%#"$# &#$#!"%"#)$&( #'!& 35,U[JaiWai_^ee_ehgklffjcci_^j^_lY]hXcfSb^Ob]Sbd_dp`]kc[_gVSfGFa:>F2.1"&#$$" "$&%&$##$"*A+A-A*=*>'=)B'>*@)B)F)F&@.C-A+D,G.H3K3F2B%1)<+E,E,C/F/G2M4P*E9L?I;B8B3D1G2I6E9=:@<M9M;F=;;64<9N5L6I6G2F3G;M8F;E7A7A3?3C4F4H8L8N4K1J3N2R6T5O2G->*92B6G5H3D=N1E/H0L0N3M7I5@3@';*A-?/@4L3G3B/<(9~'>w'Bl$2f%,b()_&%VU#P"*L(-A$%B#'F&.A )=#D!&@ <"$C)-B%+I(/U28R,1\48Z25W.._.)l0%}2'>755-3'4(>1J/D2B-=0G~+Cw,=u1;i-0k65c//b-2e.7j,?g-;b/4]0-W/*W0.iADmNN[GB\LF]TN`^Xinhr}w}ŹǴĭ̱ӵҰОլҵԸֳԮүӰӰճضںֵٵְӫҩҪɣٵնϳϲոӶδ͵˳̹®˳ų˲˧hc>B4A1=4>/:5A,=*>/B+>4F1C3G3I1J5K6A1<5A'63C0A5F,=5B59.+&,%($,*+#*,8#5-/5):)?4A9@,=&<&.){%z,w*w(z(|'%|&#,$.)(.8I5D12>692,/,68:460;+7{.*8#<&0!.%.(~1*z/'}-$.*(,&-*,3-6)2$,!*#41-02>5E3<408-H;L@=4;4B>?>;;@<?>6;2=1@5F9K5C/<5F3H7N6L8K<M6H4H2G7K7I9G0:0?1D4E6D4@4A+;2F-J4Gj+3E!#B)*>..%" .&)?,7nXfzt^}Jw;~@ІBЍJΒY̘g͡qȣmms|zsOgS1K<*0*..*563)*'#&#$(%&+(*++*#)-&''!' *#*$!%.$0!#-# & -)!%!)" )# "&!)$'" %%&+18;OYXuwn|qTeL=E8+59-85.20-'+,$12/528~uCL?(,-----+!"'*0(),&))%+)(! !"#&$(-#,$$%%'')))'++"+.,$&().$&-%'1"$0!*"&*&%+3&0$&"&*#+."*$,udwOh<X$8D6! -%''#*:7>}~YeT(2'325-%0-(-./**-''MIJ\a`"!(!!!!$$%%$#&"#% )! -'$ !%#",.**$'/#..'/%!) !&"#$%# &("+# &$#'!$#"$$ $$ !( -$&(((-)-1*/-+, !$>C>PU_S[]LXN2:789<CAB3//0-11.:*&.*'*GJIdnlettcrucry[jpdspdsqcqsguxdqvivyo||p|kw}izj}zh{ntvu{vq~n{ozxy~{l{|?IP!'/!#('+*#*$&+)"! &!("')#%%($$(##&#&&%+*+(%(&"&( '/%,("$##!*#* $,'()'(((((%&.)*'#!,.!SZD_kT\i[clk`ficgfacb`be\_j_bsadw^ct[`nVciQaf[_sWYpU]iTXYZKJdBFX=:C.5&'!*&0(&)"" !$/5/?.F+D*?%>*K(C*>*C&B)C%8,707/71=6H3K.J.I)D&D'F/I0D8H6E.<5F8O4G8E7A6C7K0K1T3O2?,7-B7S4J8F5D2E*A,F1J0F0B083;7F3A4=3:0?0E0D6D195:8?3A/D/F7L6J9J@O:I@N;I:G9F9H8J2H2K1L0D2B9I=O2E7K7L.K(Q+L.G/D,>.@#3.~+c &W*,H"'C'B!+G'3H(1C((>($:$%;#(@$'E'%B&&@%'; ":"#>&'>')G23R96L.'I+*J(0M$/X#-h'+t()%006B;=17/77,5~0@:Jz48k+*c')c+0Z$(X$$W&X#/X-7S12M/$O0"U2)T-2W1AQ0=T9AK69F45O><_NJXJ@aUSypwȯɰͱд̰̮ܽڻطӳϭֳܺӳЯԯԩըմҲѱӳӳҲҲϯұѯ˨ͩ˩ԴγìƳ˾ɭʱȵȺȰȖd]D@9:/6-:+<2F'>"=*:-9.>*>,B.B5G3G-H2D0<2@1F1K.I.K6K390+,$*$|%%/3(+# .&4*5/0.13071A/J,D+5*((!y',u"-~*|(.&)"&!9?*99C74*.$82408:3:/8.30,3*2,.*z(%-*.*/'8.$24+6+<4B.3(#,$-'53@A=E.B+@:=L@|?3>53/66553295>2;,;,=75<1F1O)A&5/;3>:G6G/E2N2O7M6H0E+E5M2C8@7>:E9K.E9O5E;C<9d/-F()8%,@-;8#3A,=XBNxbkwZw@u:u4y0̆8̍EɓM˙YўgʙidnxjtMg\)80./%3(2%*-,/"0*%70+*''&"*"'3#-1#+1$*."&0!$0!6"%,,.$%)$#+*)('&%#"*!!1"#,!%%!) &(!$*&&/65DPJdti~zxcWWD7;F;A8354523:3fihwBE6))'$*''$!&)%',*'%*$!+# +% %%$# &() ,)%)"'+!-(,($+$$%"!(+%/.$+,%&$&"#"'(--'12%20,)&)&)+)(aMPňćo|[fJR{18F$+ 1"'3&-4*3dajyy191594./-+*++),/+/+*&984ywx+,%%!'"% $&$ "!"(%%,(&(#& $,$/' '&!!'! '"$+#0''&(& "#'%#$ &%&') %&"($#+%%)&&%'&),*&'%%(%),)$(%7=9w~z«edbkinacsBBN5270++3/-(&)((.,+0,,*MRIbkbjuuhsvoyxisqfotamq`prgwzetvn{|r}muowjwfvzm}|o|zr|{t}{syuwqtr{v{~gkp/08&'*#'""")#(! "$$",*+("( &%#*)$($*!$& %#"!$))$00,'&%$""% & !)%'% &""#!!&%!"!'&!QOEb`Yedfdfnfmr_jhYcf\dgcigdkdbifXbdXdl\hmXa_]b`^^^_[`ZU_XSdVOcXBNP;D>.51'.&!(%! '+4+<*>);.=,?,I'>.;,>(=-E.C.A0A(@(=,;2?,;1E3L0I+C&?#:0E0B2A2C,I.H/C2B*8+:-?+D.G(;4B5D1@,71<0F/I.H-G.H/I0I3F4I,F-D0?0:2@1G/G6I1=3<6>2=3B/?3C3C1A9K2F7L3H3E3D7G1@3A7E,;*:(9-;3>5A4B2D,E/Q1L/B0>*4#)p`$$L!C K((N(,D %>':(?%)>%&<#)<"-<!)C))B-'B.(D/)<$!>$$A%'B%(C##F"C!"C$*E&.A!&D !Q)&`%)m-.w.,41659<{,0z19{0978}:8m02l3:b/1c51b.3Y'2V*4X35K-$F(F& G&)F(/G+1F.1H34K76M66W?=`LGaMRfSaƲ¿˪ӯΪЬֲЬֱϪЩΧҭӯղѯӯӰԴյյӳֶҲѱִױЧЧϪѮίɮ˻ưƬêȵǻçɭëȺDZȘgfAD4607-8-;3C,?*?(>'=+A+@*<,;1=1A(C*;,249>G2=.9+819(&-&/**+,306+*,$1%0'{# ++89+2#3*>0=-*%*})%+- )* * ) 0+8<5A175/7,2-75-)336=7D+6%)-(0&~1){-)0-4/@65'-#%&)51B4>,*+",!'$5745032@5B=8=/;1A8?4:1;25.:50'~0"5&5+996A6H+B'@.B0A4C1C,@/E:M8E4<1=2E0C,8/92>5B5F,A2H2D)4_6-C-#:/*=14;'1N6?u_ezyz~a}Fz5{3y4|8̃9ύCʏNʙXdժtǟpywzbv]HUG574.'-,").$'*! 0$'-"(/%.,&(53,.-%-++3)08&-2#)4',7).4#(0#5%4!3!!2%&-$&,&)*%).'.0%+7'-3$,,'+()%' ""$$*-$05,ELCfmfpblUFRC7B>7>QNQala2:/') 0,&+$!,"#.%*'"$,+*&($'*$''%)$', (/"+)&',(+)$+#%)$*'#%*(&'$)( (/"'*! %#"$$&%).$/-)-#"%"$$!C49r~s[nJX3;P#6 &2$./$.8/9JEO~<?<.20'+(*-*((%*)&0+)30/gcf>7@%()$'&!%*$)*$('!"(!.($%"#'#,#-&"++)'(& *)%("$(!%'%)&%*$ (#''!, (#&$##!+((,()'%'#%&"()%#&/&+2*/3/3+*,?ABåxyz}{yxDBJ0,1/*,0*,%#(#)/+.,,(`e[cnfeqsfsydqsetrdwubus[omcuugvwmvzu{qzjxqk}|pp~n~o}~txvzyhfm)#.-$,(""*%$##" "#%'" # "*#))!(-%+*!&+"&(#'!%+%*)%)'%%+'*$%$$%$% $!$##$&!&-)4'$/%$("$BD3ceTde]eggiondmgakicjiglkgkjcgj_foYco\ep\_gY[c\[bWW]TV\OT[PU\PHSQGNJ@CD::<23-&)% '!")8*;);,75;1=*B*?-;(8*<0C0D0D4G*?-B+@*?(;/=/;8C/<.@,C.F.B3B2?+D1H/E1E6G4D5D6C-?(A.G-?/;/=&;!:'?-B-B,@,C-E/C2H.I1I,=2?2D.E'=+>+<)9*:2C0C-@,?2D:L7I1F1F+B5M/F0E2E2E2D/C+E*C.@-:1;0?/C+B0G-?0<-6(-#!f R"J'$C)$?%"=!!?#%:%&3 $1"8!":!!C).<",>',B-*C.(=("<$!?'';!%D)08&9#C")A")A%-B(.>'(:%!;& K$)W*/[(*y57.014y,-y47x69x44u/2s0;p1@c.5a20c35c4=W,7O(-J)$O2*M0.G)*K+-J..E.,K74I52O:7O97K68YBOoVmŲƱɭƦέԴӲճԲղشٷճձҬΧŠЭЯҲҲӳϯյյҲұϬԮׯϨǣЭӴΰǹŰǯưŴïŮǰ«˻ɳŘ^\DJ6B(8';,@3F/C+B&@+D3I,>+9*8)7(:4.80/-*"#,/)-(1/5)(+)3202~*/x$')$8)2 4'*&'%,&#"#)'/&(*((( (%" -)/*,&-&z(!/'&$*.(%+!0'126;~+':78=4D/D#/$$6(6+|-(|,+1/w) }0/ $#'4/?+0.$4&%2/7731331=1?994/}+,4.|4(9+<04.-+.//)4+92;;:A:G0D(?.B3D7H1E2H1J6C8?48094F/B.;-</A4B2>0<5C4AO%=*'0'%.(*<07P;EqvwZFx1|*,Ѐ.~5̃@ˊEϔPϚaФi̩mŤmvx{lO[D!(0/)5)-8'1/*)')$)3('2('3./-/)).#-/(2/4*!).#%,"$2)+1(++ %. '/&3 ',#+ &+%)-+-%#%+(+0',0*3#/0!-*(,"(1*+,&#*#'(.')415IKN\cdp|wy{pepbZbvpuCND./1)&(,&&*" +! /&&$+(+++-+**-*'.*',&'.%).$*-'*-),'%(&&(/-1,(++%)$#('$(+"$)" )''&$)-$1+.%%'!'& ""9)-WcyzwezZm;DW!$8 $1)01&/;3;@=DjkpabU<7611/+/,+0-,.+*'&,&&bZ[cff-).1(,,")*"*.&-*$(%! (%""#!"###*&&((*#*,!(*"''$%!#)"&$ % %!''*%# #"'#('$)'&+().*,1)(*2.-3//,)()'&CBAáǧīz{~yx{PPS)+-*-./01-*-/*.*%'31-bf\alebpsfw`rvdvufys`vocytdyunsr{lxzivto{xt{s}o~lovy~}~~WT[&))((#-$''!&"!"# (%',%+* #.%(-'+'%)#"&,,-*)( $%%)),&#)"#) ((#&#!""% $% %)%+85?-)6$$+#(8<)giUjj_hhfknoeji`ficfkghoffmbck[aiXahU`fXbiV]fUXcUWcTXaS[`S]aLScHLYJGMRGIRDD>/39,5&&" # ! !&9+=,:+4,3+7&;*@+>,;/?->+>4E;J4@1E-J)J$@*;,2050;1C/F,D0D2@5@2B0B,A,E,F)A/B4?/?,J1R0I,>+?)A+?*<(:+</@0A->1=0=0A2C3A0?.B,C(8&6.@8K2H-E'B*C+D1H3F4C8G:L.E-K.J1L/I0I)D+G/N/M1L2F5B0>/E+D*=*:+7!) #_#!H'!C"#A"%F,/?'*:"'3 #) !,#$0<&&8 ;!&<#)D00=+%E.*G.-Q8;I07;"*7'6&3#4%5&;!*:!&:"#9#<%I(R)2L&*[/1e,.n+-r*,u14s47q26m*5r+?h#9e*9^.3Q%(W-4Y29O),J($K.(M52L10R10I-*C,'E4-G92N@=J<<N<@[GU}־ϷDzdzḭ̆ήвϱӵԶѲҲͰѶԴղӭӭϫЮӳϮѱѰϰϱԸѴϰϯҰӲҲѳԶίǺıìɴ˻ȴȶȶIJϿ˸a\BH/A)?+B+@0D+@'?'A*C'<2B-=,=+>+?):+3%%$"*+!%*0)5+6'*'&=;45{+-y%%+#?0~5"z. /()!'*0++++('.%.)$$)*x ')}()w%$52|,'9.4,1.~)"1&-&'*4995+&355F5L)8&(, 0%,'20+)-#1z./,.:,:,-u/"}1!0&527798:@-=);-2*-18|-,~6/|6+6-1.|'))23734:<8@9F5C0?0C2H3J9O6L1J4M8G8D6?/:/?/B/B,C/E6D2:1:w-7S,32 %9(07&1@0=eS_y~}lQ}7))̂,ч0҉9ΉBΏOΖVў_РkɢmŦrwk|bER;+3 1/#.#/%#5,--$()"(($*++/7/*4,*310,.*$) (*$0,11+3-((-)&2.-0,.2,23,53*3/%/$$!!$!#&&%#$#&(&.)-4'31"0-,))'%)!&+%&,#1%"1$%0$+*!*+'112:BHJ[eblwvs~||?KB#+'+$+.#*2&,+ #/&'*&%''%,*,*'*+*()'$+)(,)***+''',&$($!&$!%#$&#'%&''#!&&%(-(+#%#$%$(#*$)"%!%'_BJkyy|}q{Xk@MS"$7# /)./*5717A?BgjjĬƱs::-.*!//'14.+.+*+*+')IAE}z754' %#&!('$,#"*"$(%*#'!$%(""%*-!%')+&&%#'!!'%&&% %&( &)!""##""%#'(%.,(3+(2+*,+-%(+"'(#++'NPF}àţRWV+12#*+*/0$%''$*'"#B@;fh`muoivwm}ew{dvufvobumdzsawsdywom~jxvlxrvzwzu|n}zl}}qx}z{z{z~|<=F '#%*$%*$$)%)#!"'&#! " %"%$$'!!-)(%$% #&$*+$*(#'"(,&%+(%)('&))#%&# &&% #+(&><<[Y^B?I")##"&)TWElldihjjmocjh_eldgrhfsd`lb_i\^cZbbT`_Wce\ckXZiVUgXYiOT_MU^GSgGPaNPYUPSUJLP@HN=J;0<5.8-&1")# +! (-E0?/9*8)=)A.B-@'8)5+9)?'?*>0>.C)@*B-F*D,F(B+B+A,D,E/H4J2C0A0D3F-E)K'O'J+A4G/F,H)H*G)@"1-63=(7)>%=,B-=,60=0=2>,8/=.?/D)@+A)</@3A3A:K6I4F1E/F2C/90:1E,I*H1K5J2D/A,?/D2D2J0P4M3?4<7E2H.B(:"0w&^$R#&N%*D#&A#&C%*?$(;"'7!%6"&7"$824%9,8 )9"%6!<)'?-,I56D02=')E/0M54B*)P8:J279");%,=)-9'&<)&E&(J*-A%'F)+D"&N#+]+5o.7t*0n.5f'2c-e/a&1[/4S'/P*/I)'Q.+L'#A#:'!>,'>%&;$&A-+;-#A6,A66C8AF6:]MXۻȭʳdzĮɮϱұѯұѰմմմյҴҴԶշٻַά̩Ƣ̩ҲԸҺҸҴѳбѱаϱԸƯñȲλ°ůȮȱ̿Ǡ^\:B/:(2+3.5/8-8.<*B*F+D6L*=-@.@+=+=(7$z "00+/>F1>+4+-500)0.})/~'++#~+"z*"x( |'%() *%()588;77*#/$1'|$+./5(,77.(0(/(3,1,,)++3411400+226B1B-8%'*%0'z+"7.0))|+x+)',3(/+(w.#)+'+2-70=.@+<+4-*5043|-+41}1.~1-1,/+4:-4.67C*;.>3?6A1E4T.S3Q4H6F5G3G4J3G6C7?/=*D-N.I4D5?b'4B)57*1/#$6*5K7Jq\n}]B5x.}.̀)Є03̃7ϊDʌMʔX͞eϦnΨtʩ}zL[H-8,()$0%'6#*4!'-"%,'),#'+ '$%"0/.403.)1,'+-*'.+&/,-1.31./41+2.&1++3/7-,5,-111/(%"' ,#!*!"'"%"$*'/2,4('+ *1%0**( *'#+#$"##)(#($ -%%/$)3&.1$-3,2BGJS__hqqz~}esb,7+$+#!#($#+"&,'+&,#)*))&,&+++0./&+$"% .(/0&2+',%&$($"(#"+%&+#'* '+!)2'05//)&'% * %% &/).' +#)'#%)#%C.6_mluxwzbk;LT+/!'.(+,31+9--4MPUƩ@F;*0%#),0')*%.-,/*,701unnDA>&#&")%!! &##/!,#$!!##)($" (%"'#!"!%"*#"( $$#&$!#(!#) #$$'" &")%%'#-($3)'2/'22*(+&(%'&&)OWF{ƚˣdhl22;#"(*')'"(,$1"!#:=7dhcrurpuspxwjsshrsfvsdvscxtlh}|k}}jz{lzzn}zpyowr{oyn{uy~w|v{x}}~qo)':"&-#*$()'%&&+" $% !( !( "(#&(&)$$# &&(''(&&$!"$& )*((&(,'*"!! *!*#!/-+OKNmerVJb9)F+1&% ;?.pqfrqpglihumcoscisokxj_npftgdmafk^eh`bgdan\Ti[RjZVkRUcNX`LQ^LR\KSXHNPIKMLHMPFNH@HDAG:8A+)6.,:* **>4@5;0:-@(>,@+B)A-=(8"9%B,E*?*C,G,H'D(E-I/J2F/>3F,C.G-H2K0G1G4G0C+F,L/L.@/>0C.D*B*C(>,@,=+9,<,?#8)=#4*8&8(=,>+:*:+=)>+C)B.D.?)6*5&20=1:1>+A*A.?(7%8&>/F-B-A1B->0?0>+>.D-D,A.@->0B.>*6!(gT"#N&(E!%H&,D'&A'#>%!6=%#9#"48C%(C$*<&<'=!(7 9 !6!&9&'7$9'7"6": -:"04)?+4A.6=+0=*.J79C00:)%5%!4&#;+,>(-A ,L#3X%/g+.l.3k*0m*2g$,h'/g(1]&1Z-4Q,.Q+,X02V13E)+K14F*1C(2=%*@+'J7/I86@.4A0:]L^ӳéîñ®Ʈ̰ЯЮЮѯӱԲճҰԳطԳѰұյԳѮݸڳծЫЯѲаʩӲԲմ״ճӳеʼĭĮȭƭůɽ̽ɠ^X;=16,2+5/<,;*<-@-C*B.D/B-?.?-=/A':*:&-44---5,<*@2G0>(/*,,()$(%.-0/++))&()-&+#&-2,49C5<,-,%.%,%|&#-/).$)24*),3&0)()38192<6>:?=?.40@.C5F-5&#*#u%~.(-'-$7*~0&|&''*(',#.!-#2/)5.<-<-<2?-4%(--98|0,{2*8/6,4-4/42.18?4@2@5C1?5B3E2N/N+C.@3E7M/C0G,E4F4@1@8P5R4M6F_*1;%'2+-3)06)2PCQpd>0)́'&Ѓ+*Ɂ2·?ьHՖXЙb͟n˥vʬǴ|YbM6>-/4*/-+/%+4".,$-#("&(&*+$++!))#(+.,32-7233-43-251.8411+0.*1..-0/'1/$2/+203/13/3/1.++%!4.)4.)2+(1*+*"(&!&($&& #-#&3)+-%&% !!)(-)'($ (!+! -!#/ "."#*+)*0.?DBHJGdlc{{VeO#1&$(*(+&//&01$-2#+2&*-(&./)))*,-/!)#!$"1(2//0&/,'))%&*&(*$)(!(+#,-#/3*6%(&"&),--1*)))%'',,$3%""9#(xHVcxh~j{mx`j{6FS!36!*$)&&1*,*,5+5JDSxvİưRUL.0/*,,#%##%%--1;9?539hekhff&+!!$!&"%#$#'"" !!"! ""#% )& , )(2*#3(%0'$,%!&' #(")')#"$"$#"#!+'$2/)50&4.%'3.&201))+ZbQʣǛΦkvu5;@,-2($(3*3-!3+-1;FB[gcfromwxmswjlsgjpnwynz|cruhxzky~vqzqy{w}y}w|w}ozo}vyz{}~v{|a_l'$3"&*(-)'$")$))%"*$)"!*&#$" "! ")',)'+'&&&%!')"'+"%&!'#$& $#!$!"$"""*#,#!G?@{nu}i{pVsE,T-C" *&)bdM}{lhhggmxhr`gtpq}geokgqjjuekubksY_hWYg][m]Zl[YiYYeUXcSYeMT^KPYMPXLMTHGLNLQTOVPJSF?KC;I@7G7-<6,74*30@8@8;2:/?+?/@+?%7$-'0*;%@'E,F*@*@)@*A$;)?1F7C5<5B2G.I,J/O.M5M3D/<1C.F1F/<+<(?)>,@3E(90?#2'9#5(:+=);'9+=(A'B$;%:#5';$:-B5H/>,9,80<'6!1.42>)E'I$C(A'>2J2I2J1J2J5J1C.</D3F2>0?.G+F*?'2!&gDF('E%(>!"8&!6"9!<$!;#!@('@'(8!: "8 :">$&8!7!6!6 !5"#9&(4#3"7!$4";"(:%-5'2- ', 6(&2#6'#9'%=),8(+7*,5*-4&+8#,?".G#,T$+b%.k*2p5:f15f-4i)6Z-Z,5W58P/1L(.N*2R3=G+/O24Q2<M.9E)-A&'Q6;Y>KG4Fz˭ƭdzҹض۸۹ֳܹصԱάճЮִԲԳβѳӰͩϬԲҴϮ̪ǥЭӯЯδϿůɯ̴ϽǮȯǰʿͿÝcU?;.5,8(8$7(;/?0>.@*>3F?P3C+9'6*;)?->(2/2''.1091B+>(;,<*6(-22:;8A08-4$-%3,@%<(:,=$9'<$5,6&'*(.*" )(*+&)+0',,)%$,4.;,<2@2=6A2=/64</@&=(=1>',(*$$)(*(+$2%y-!1*F<:,z(-$'(,4-72<-817,-&&+-}'+21~4-9,:*</6.62004;8H8K4F,<0B7F3C+B*B6J9I/D-G*?*E/M-E4E/?-C/C0?T$3;'/3''3"#K4?gPcrI~*{z&|-~( ʄ'Ʉ0Ɍ<ΕI˕O˛]Χpʯ}̹z^nQ-5&*+$-*',$%0#),(,(-!(.%()(,007+%/-#+.((.0)31(/('/'-/'+:33=66@8@B:G2,4;420)$1()0*/0-1-,+7-/1&(0(('#"/),5+22%1+",#!%)$(*#%' ,'$)'#**$%&*'&+&$&'#$' /$'6*/0$')#"+,+(+,,+.747>@?Yc_mvxëì}yCQ>#-!& (%('#*&!+$$,%+!,#+!&("(.',*'%(+",-&2)-3%//'+)'$**'+*((&&$ $("(,%.,%.'$)$"(&#*'$%)& +'#' ''",+%#g?HRhYscwgs\cq<=H&*4#.$ +$',-.0849D;F~tnq_23,/00104'()''+().--4UV[71<""#&"&%)#"("!$$$##!!(%*$*!.$.*"0%!-&&/!$)#%*((+&#(+'("!%%&)'$$-&"1-%22&9.$,-(!-++*++bkZȡ{EJO,.4))-30:*#6.*0HKFfpictmavpatqfvuhtugrrkywiyxm~~hxyn{}r}us~xuysvw{syt|wv{}}rvtTPV(&(&)"*-%,((,$,! ''%)#*"#%# $"$)&()$()%*% '##*%(+'%&&#$)%%'&! ""!# !! !"$"$ 0-)]XQyxzvkUw)C#:#A?2|we|yvmo|hr^hpfnqhnmcihdkmdkrejvddu\]m]_m[_kW[fTYdOU_OXaSYeRTbPP^RR]NPVNSURX[KPVNRZJKVEEODBJEBGD@B):&1+2.7,:.?->%6%3)4&2(9*A/I+D/>0B.C-A->/@0E2@2;3>4D-A.F/J1M2L2D/=2>0B3H1F0I+E,C/A0@&6*9'7*>.B+=)8'5+7&2*>(@$;%:%;"9!9!9,@,?(;):):7H,>1?1C(D-M1O7P+@1H-J4R0N4O9O4D5@5A1>3A1D%=+A+9%-q#)RC I*,C%$@(!8';&$A**D++B((>##?"%;";%:'8$223 :%*4&9$$4! . , 1!$<))E.,5" .##)0$1$2& 5+%9.+=#,>&/:(.6*,1&&4%&4 !A%)L$+]+2c,3\(,Y+-[-0Y+.R&.Q,3P05P16F'.E&0H-9N1<D&-F./H12I/4M0:O4=N6=Q;UۿĦŻαյӲӱά˦ѫШ̧ЭѯϮմմͬЯӸպҵбͬϮҳմѰԲѯִִΫʩϵíDzȰ˵Ͼīƫìν˻̷ęjaEG->-?,>.@'; 7*D0H5I:H9D4>.:,=';(= 7.D*:+74A7H.?+;4B,8-706.329/;5B5A'3&4+<.A-A'?$>,H(?*:2:+0*/065;<C5<.5*2-6/>2E+B(>,=0<6?4@.=.@.B&<.F-D,<+5#(&'*'*#4(|1&~-%0&z*-/%142@:G-:,8-4.+8/=25)y/%~5.;5}5-@862:=8;;G5H3I4G4D)83B7J0E,@0B1C.D2L0F.C/B5E,;1A3Cw+7L$-<#.6'/8&,L4;dsq\yDt%vvz(1̓1Ɓ+ˊ8̋BϕL˜RşX˭nīxxr@R:'.#*-&*,'*% ",&(0&+/&*,')((*)(..$.8(29.40-.,('*$'5/7/+10*.5.02)-8192+54)./&%.,+%()0036.44+./&()!"#* %*%,&0"--$,1(.)"%$ !# !)!&-($".(.*'+" "*""/#*0&-$!#'&&&&&.+..(.3-7@>HBLRXdeĥt1=*$# " & #&'% ((!'0&)6&*0$4'0'$.0.2,+''(!'&!-*'-*)%'#!( "%"$#!*%$(!")!&.)/,*1!$#"0")1%'.)$##&&"#""@22OY\p\sauKZ\*5<"!."!*"')#+4-56/5OJRvr|ũȯëʴ~8;3(''(%+*%-$!$0.-))&HHIxx{d_d#$"#"" "!+$+-%/$!&"#"%$"+&')!(("."(&#*$$("$&"&& &&"() #&'$''#%.(+3.4/*4,*3)).+%//(/+)'*-('/$am[âæ|U^a',4&&/)(1!*,),OKKlooiusbtq]tnbype|sgzvgzwewuk}|m}}m{|q}s}o~xqzo}wt{s}xv}yy~q|v~}~z~w~zKHL+$&(#'%+(#)&''')($&(!%*$(+%,)$,('+&%*%#+%!,&!*+%(-'$*''"#, !) !$ !"?:/une}z}zk@0W."I"2+&%liZ}}pu~eksfmrpxwovqinmben`_n`^n[\n]asU\lPWdTZdTYaR_gMYaNW_RXaPS^UWbTUaNS^KT]JS\JS^DLWHOZINXHKS-<+8.8-7,8/=0A-=&4'6(9'8*<)<0A.:,@1I.C/?/A+D.B0=2A2D1D/D-C-E.J6M3E0>,;/D0I4I1B/A.@+?';';*@,G$?(A 6,?!2&64+B1I/F'=+@+?&:(= 6"6&8$5):(8-?0E/G0K/J1K4K8M7M6M0I-F5K7H1?-7,<+D1I0A2:&*qQE;518!>$ <%'?),>('9#9!6;!< &7)7 (7&'4%$++"-%.12 6!#:%(:%)6#'2"%7),9),. 5%&2%$4+'3,'E55@004)'4,(9/+<.)9$"; $D$.T.9X,6Y)1\.2W..R-*L./H+-G(-G(/E&.G'1D#/K*7G*0@-'B0*H04H+:J1>J7@|b̽аĦǿдճ״ٷӰձհ˫ӺؿԸֶӱشָշָطִٶ۷ЮʪѱҲӳаӳҳαǹɴ;¦ȽƶdzůĦƓicGJ2B-<'4/<+>)B*H(B)<,9,51<*9+A(?);.B6I5C+4/97G.D-A+8.422304136,81=4?(2093;08+8.';+>(:$0-2130226287>.508.70>0C+?(?)?3E4C8@39,;3H4E+:.B.I1F1A)2).++-*&#%"(&)*%!*!,%,1+=-=*<1E1@'+3,5&r)s'x-*}34633-548>:@?J;J1A3>2927+73F5H1D-A-B-A.D3D5C4?5@+8(4X!)=#'9*.:&-?&0X<GjwnW~>x*~+~-z(z#}&Ɂ+ā/Ą9ɋHϔVǔX`n³zXiR*=,##! + #.$'5-/#)#%0+-.(*,%'.()0..'%&0&,0#*5.3,.1(&,-)043:..6++01/17425020)13'/.$)**+"&'%%)4)2+"#+#!-$#* !.#%1$)/"'/!)2 -5"./!*+(.&$,*!/*)+"."*&'%#-'$/'$0%(-"*#"%"+%(-',.(00(5/(75;B9ADMVVqzx~MU?#% %!%($!$*"&"($,#")"#- '7#-4%/&!*$$)&%)$!%*$).*/)(++-/%(**)+(#$*!"*!)"$)'+$(,$&*)"(. ')"-')$%$&%$2')jEMWaXgI\h;N;&3-',%(+'(+&(1+.GBF`Z`|ªFKD569'%.("//(51,230,LKEzyx3.:$,#*#'"$#"$!$%#$"&#*%')&*"+%)(!*($(&"#'%$%%##"$##&!!%#',$0,"01)6.,4&*-'&(*&)+,.'-'1=,snȯưȯz}doq36B+%4*$.#$'.1-KMJfkifnnfrr^nm\qm`urbvycvygx{ky|kxyoz{oyzkwtl{xm~}mon||jwuwsk{vt{w|owvC@J))0#&& %$! &$'$!%#$#$(!),%-"$%#+'$/'#1&&#(0-,+(*$".&!!!%!!(#^VK|ygY~1#M1'E.*-EG4os_x~~rs{fjucjpdkkilpfesbap]`lZ_oX^qV]pTZj\`k`djQ[cOYaPZ`T[aLR[NR^QScQTbNT^IQ[NXaLV_JS]JQ\JNY+51:19,40;+9&8(9%4)8*9,:-;-:1>*8#8)B-C0@/@'@*>1@1C1F4K1F0C4F/H+A1E3F/B0E0F4A3=1>+>)>+@*<.C+I"?&B&@+C%<(=,D-G,D.D+?,=1?+<);'8*8$1(7%7)?+=+=.E-F.K1N.H4I<H8H1D,A/F.D1F4H)@%?*?)3%'}]L(!A$7 /4!;$"=#"?(+=(+6"!8% 8&15"4 !8"(:'+4&&1&%0&%* !*!,/!2!;")<"+8"+6#+8)32#01+/%8#*3 "2#!7*%:0-<402*'8.,8,*7''?+,>'/;"1F)7M,9U0:Y3;[59T/1N--E'(E(*D),L-3U1;K"/D %E+&E/+C+,J+2F%3K0EI5OϽбȬéɿɳҹծկ˧άԴܼϰĿϺԺѴұҮҭЭѯӱ̨ѬΨذܸظյӳֶյϯͭʬξ˳ͼ˼̷ɮäĠƏkaLI6:.6*6+;.A&:+>+>$6*;&6'8.A*>'8,48A/5*),&2-,-&42@'.''.'/%'("*'%"#!$#+*+*4512(,-4(0'/6<8:**--8835.3*3-:*:'7*:5G6J,?.?,;(004)7(:'5(5(:-E)>.?$0)148791424,0/86?.3,0,8#91>1@0B/>*1+)2(0&},'|,-|.-1*4+1/;CJVIW;G)/+,42,,,12=/?*@,C,A-?3B5@*71@*8e%1E#*0#'&%'.+.K9B|]j|yV}={+y}y|*}-~'~)dž7ƉAǐNQĔ[jwoTXE08* (%(&!*)#)"".#(3'.0),0.-,(',"'+),+-." ".(*/-.',+-(,1,21.4.04),../-01+3.-9.55*21'0.#,-"*,#**$)+"".$#*!.$%)!"+#&% 4)15"24010'-#!++&1- ,,#".$))*-"+1'(,#!&,&'*%&+"()'1$1(('&*&*,)164<;;BMQUdjjz}wuP^R#/&!"'!''"+#)!%'!(!#%!($$*&&+0)04&.,!&*&()$),#,(!*&)+%0& -1+8*$0.).+%()#",'%)&$%%$""$!%''+!)$$&$' 0#)Z8BHZFU}6EY"3A!31&1&')$(((+,,.0OPSȦ[^M551,)/-*2+(02.9,(4766zzvGBJ*,*,()& &%%##'&#&#$%#"')-'0*%+/+--)'*&#+&$&!!*$((!,($--*2%#+)(0+)1'%.,)++((%%(&+(8B6ztª˯py{>?I+"0+#,'&&'.$GPBlwjiukhxpfvsbttex{dw{`swduxlz|lyzlxxmxxkxtiytcws`vth}k}m{o{l{q|zwnqnB>H/'2) '% ###'#")+),(&))&*'"("#,&-+$,.)0)%,$('$.(%.&%+%$(#!#" " "!*%sl^zMAo0&P)%5*+EG/mpZ}jowelvdlrdkpafqciu`it\fqYaoX`n]fqU_fV`fPXaY\hXYfZZhTVbMT^JU]PW`WZeRWaKS[NW]NUZOTYNPV.215/2*/.8.=':%;#:!7#8+=*:-<-;*9*A%@)@+</@0H/?1:4C2E2F0@1:493?3B7I6L2K.I-I4E4?4D1E,@.>.8(1*<)<):+</?2A1?8J4H8L1E.B(;/B,B&>#6)7,80@.E/J0E,?.B.C/D2D/>6C.>-?/C0D:M5E3@8F2E1C-:%)_ J#FE$#9 02 6!@) >%!A($7 442/ /6"4"-)#&1!"6"5"$4*)0,*1,+4'*:$,9#,<,4<)1@*1?',;#&B,,9%#7'(B25A/4D08@,58%08%06%,;+-<*-B,2D)0M.6S09V/9U,/W12Q23F+-G+/I(/P*4M,-K2-F-3M/:X3;Y5>I-AeSqϸʷIJžȿħˬϦ̣ͦΧծٱŝͬѱұүֲԮҮԯԮЩϥҨѦЪήаںֶааέϿǰ˲ҿ;ȶ˰ɧƟǜȏlfOKB@;B.?)C#>4&9+@%=-D%;&7)619/3))#%(),),'--,2{$$-'/&4&1#* ,%0$1~,*,+#&!!$(#" !""%'*!" %$*','1+1/+-+0'2$3*:'80A!1)8'.<=+4$1&1'0*70E,A):1>0848;=4600*.+70@)7'2-<*>1;/;*;-9794-*)!.1~+/,+3+90=>BPPaFT39*)2+6/0...5:7E/F+E+A.=5?>L6G&7I(4'..&+;'.6!+G7AzfrzmL}0w"v#v%z*|,},~*DŽ,Ņ2ƋAÎMV]gĤ{dzunJK?41)''!#!! (!#+%.- ')$(,,0150+.1'(1#(0#0$!++,4+'/5*23'0/&-2$*3%,3)0.(.1/197553-3,(5,*('%.,.-!(2!'1*)!' '(#,*&,(%' !&+ $0%).$*-#-1-3 1,-(!*.&,-!#.,%,)+( -/%20(,1,),,'//',)+!#0 +4"/1$*-'!*(5*29.:1*676?DIKWa\{~NZP1=2'2()0+ "#" &"*%!*)!+0!-(0#,$!'/"%*$'&&(*(*(#"+)"#()'#-%+($)%%+#$,&$0*&5($1((-(()%$",*%&"("-$$,%(.(+(#$+)*&)&% B,/jBKm;FZ38A'+3!,3'7/+5%))$#%,(-HCIèĤelR02&,))% %/,-*+&--196EhhkuvjW^ycnO>C,#!(%&!!##)$%'(""*&)"&'&)#%()+)+-(*)$#"'#"1+--!*)"(&('&"# '')(")/)*)(&*+.-01MRKzīǩá~n{yKOW/,6,).(+)!,"CPBsqrrqus{k~zl|j~zeyugxwl{}lx}ku|oym}vltkvivezqk{rruuvrvtz~|t}gddZ:78("()%&'%&##)&%-'%('%()%*% & & '*#+((*%'&##"%$$" !*&(("&#&" ! & %!:5,|vf}rzwue5$\%;0&-7/%OI:nnW{szktgqyhq{fr|_nw[hpZclYbj\fmZhmTdiR^fY^j\[kd`r[XgVYcU^aQW`YYjWZhPVaQX`UZaQTZNOT0;+<*?);)4,6(5(6+5'0'6*C&D%@)?%?'A(A(?';*;->,A)@1I.F0F5I/A2B6E5G3H3J3J2E/?19/5+9*?(?(<*9)5*9+:#0'5'8(=+B4H/@4B1>,9):*>#=#B*@(2-0*.-80A/G+D(>1D,>,>'9+?,C*D.J-H'A+@,?1D0B3?-5 &o%U"F"<&=!,6'1!5 !9!>$:"= &:"5/6%0$3"96#/#."-. *-116##3 #4!'6!*7#)?.-@20E876((.K7:B,17$)9(*9)+6&(;,0:,3>0:;*0;$':#*:$.:"-A%-N/2N-,W3.\94X78S2=K)<K*;P06J,.C&0D%6G(5U8;R9;T>PuʿʿĽſ»ɯгбҲӳԲֲײְ˨ͩӭЦҧժԫҬЮάЯԳֵͬήԴͬ˨ΫӰѰӵԷͰʶ§ãȩϴʴ̫ĢȘɓǑurKO9E6F,>-B'= 5&:(?&>0E(=#6%001*&'&+-')+,++..)(+108,1-/x''z*+x**{,&}+++/)*+z(#x*v%y'.&)#)&&% (%-"( ($('$#)'/)1'-&--$&+-59,5%(5,0*0.++,,/0.2*//4-2496801-+9=3?2B1A+;/?$5&8(90A,:+6&,(,0358/5%.(3'3/77=>C:P6K!/,0;63+-'-+275?6G1E2E2C0@':f)J",:'.6(0G6=L9>_FPiw|gyFu1v$x{|!|.|3{)+LJ?ɍJȓRŗUXƣm˫ŷamY7=424+:61/)-$'# '#!!*&!,+%(,&#%#1*,3'+/&)/)+)'+'&,+'0.&1/$/4'13&..%&/&(3(-3(0B9?856,-)*.").##)%4$14%3+"+((***)-*'.*($* #, %)"/(7'37(42%0/$.4(27)34#-7'15(//$*.%,,%..&/(!%(%#*&(=.>8 960:#.0!#,"!5.35/2/**0.*5916B7RcWiwo]ii"+-*-1,,/2/2/+.-'+)!(* (,#)(!&%'#(&*) +-!,'&)"'+#0#/#'+)+()'('''%-'"1".#"++.-),*+(2(&/(',''('(%$& #&%'!#%#$$((&0'#-1(0A36B028&+2&()$%.,.*(+)')'$%(!(E=Eu}f+2!,*$+#'-$**&%#&540icfF@F!+'$+%%-)')&$$!%!& (#$" %(!$')()($$*%".*&+(%)#')$)&&*!#'"'(+1/'-(+0#(+')"$& PUKåǩǪ~|t}yX^a149'*.(,,(0'@L?bqej}qjvmzj~ui~viyh}si|okzno}up}|l{}i|yj|vkxtpzvkvpetkj~qtz{wffc2.3!$#))0$',&),&'' &&$*##) $'%*"*-$,(#'#!'&!$&%$&)$% !)#'' VK@ymrnjo{h[1R5"D4,1*,EJ.ciKpqzmufmuovxmuv`npWfk[em]alY_iS^eR_cU^gW\kWYlTSfVVbUV\XVdWRjSPdXWfWWbUU_XXbWVa-8*=,E&:.9,31=&9!6$4(:*E#C$A)?0D/C+@.B,A-B0E1I0K3L.D0E.C*A.E5I8M1H0G3H6H0A.=-=,A,E0G&9)7*4(5,9'2)3&3&7$8+>)9$2-:-:*8+<"4)9):$9-C0D(9+;*B0K,E/D&;'=)B(B$<'?.G1G(;-:,6*9,8.2"${` M @7 7#7&8 (9 &666!*3!+4!'5#%21..13#$3%'8#'466&"2#!3!#2":!*7(>(3:&15%':,"4&5($2$$0$<(1@*6<(06%(0 1#.!*1%$5((1#$2$)8(0=)1@(-I,-L+*P+0R--L*%Q//P09F%3H)3E*0J2:X?IG-5E/2D38oas¶ķɹĽĽɮг̭̬ήԲϬӯӯɨ̭ѭШթԨիΨѯέϯ̭ͭͯгѲ̪ձѭӯѯѳϳʯ˷ĨʧѲɮɥţǦʖ̒ǒwvLT4A4C,<2C3F*>(<'?+B*=,<-:28+*#% /,-,/.{*'~-'/(~%(-3-224/+707,y, })2%-#,*)*|)!|.4$~/$.%)")%)&%$+)+()(*,!$''/*0'y*+"&!&!-(z%|$)%,$'*"(!%,),**%/**#*#%*$942/&*).05,2)22;-7*6*6/90:".,;,>+99C)6+:-='4+5/61C6G,543:1|2&/&),+9,<0A.@8G9D(2\".H%.F3:=08?.9G/=tYg|~sWv=p+o's%z$|!ys!{,21ŊFɒUʙ_ʢhĥoŬŰūYgS'2%,/*-.'%$#""$ '(&$.(%-*'&'$)('2(+1%)-&)'&)!#&$$*'%-&"+*%-.(..),,+(5322-0I@GI?G7/3/)(1/**,%*(%,%)1'.0'/.(-'%$&& $#)'%-)+.(-)"*%&/%38.B:2E81D7.AE7LH6LQ;TO5QP:NK:HF8I?2I<.E9*<5)41)4) 51'?/%53(1, ',)*(176=1/2&$#,,)1307;8AGDdmjeenEAM5.;4*83)5,%0+'/5+42'/1&.*$*(#)'"'+$*&#)#$+().%)+$5(3*%0&!$($'&""*&$++(+.,.22,*.$"&*(,-,.&&' !""##"#"!!"#!'"'+$//'/4,1-*+)-,-+-3+/7-1-'*+,,$,+99>zï}<D2'+)'### %!!++'\ZY}pns2-6#&"'(%($ !!(%%"!$ % $''+%"&% #)$%)%$&'##% ('#'%%%$&''+$&*$&$$#)(. &*"),%loeƥŧuzv_ad428*'-.+/&+%HSIgumatmdxrg{unxl~ueysi{rk{pkymo|sn|ykz{n|{mxvippkpokqokwrs|y{Z``## '$'& )"%%#(""'()'*$",""(%%"(&$'$$%### " !%))(*'"" "!!!$&!)$%!0%'eXVspilitrz~\Iy2E'/&'%..&6:%[eEwqwytvqq|jou[joWho]hp_em[ciUbcUf\Wf_TbdOYbQYfV\iNS_PUbTYhTYeW]dW]aRX\UZ`W[c$.);)?1D+5-3*4$;!> 3+<.F)F%?)?.A-@*>0D1E.B+A-G1K1G5G9J:O-G.L0J7O4K1F.C-B+B-G/K2N-G*@#6(8'5&8$4&3&1'3&5#3+:)7'7$5$5*9*8.9+2,9,C'C,F%:+;/D/F.C+>.C,D*D+F&9';)=.A*8,6,3&4'2.0! nV!#F%#;"8! 6 8#;(7%5"/)1#(2$':*-2"#6$"7$ 1 ,#*"4#!46 "5$%,"!1%'2!)7#.7!.9$.3 &3$!;.#8(!7%"A,/='.?)48#/9%-4!%19)%9+'7($6(%6+*1'(2'(9-.<,,>*(A&&H**A%+B)&F-#B'!I,0J+4J+2H.3B-/K74bNLfRY[K[ĽſĽȭϲӴѱձ̨կҮӲʭëѴѯЪլШͦԱӲҲԴб˭ӶӴӱֲ׳ճӵгѵʸ¦ĞŢǧͩʫʍŐswNZ6>5>5@.;'8(<'<&>%;%7.734%#'$#|(y'0)71z,${+!-!w#)$|'$y%#}*%y)}0}.}+ /#}%}"!&(+$7#~0w(~)+$&!~#*$|&!|%"~%$*+&')'( )~+#2*z*!~)!|"y -!3*+(%|#(0(+(}&&-+$)4(%("%$*') ,$*$'#('.-,-$*'+"$)*.3,5,8-95?+5-8,55=7?/73>9@762)1'1+.1)3+8,93A5C9Dq$.N#0%'6+.A15D29SBKkyyz}ctAx3v&w&x)x'y(z*|)}'~+
<ANȖ^ȜiţqDzż`jY,6.)/.***$% !"!%'%.'#(*$!2('/)*)((+)*3*..$))%)&'+%$*#"*'%/&$-,*0+),,+*-+'*'&,*,1.3,&,/'+2((-$&*#(,)***()+'$%")&'"! /)-/%/-+:'4M7BsR`rϙԚٚۘԔלҚƎskUtkEbJ0A3$+!!" )'"4/+:/-61/3*()*%&( ".(+?>>`bbz}vkapTHYD8I:/@+#30%11%.2)/,'*+'($ -'%*! +# $# ),*,*-) (1(1&")$"+&"*/*+4/)4/'*(#-.0*,0+**'%%)')*'*$!%&#((#)# '&"(-&+.%)-$(.*.)*/%)''"&(0+02'.3,2--0=DEsv}īêdzGVD&, ()"&&!-*)(%&*&'`]_~z<3=##! # (&+'&)$#$"!!$## #"&##&''((&''#$'"$&#$#$#%'%+**+)**((***&%'%$((&-## ! ('%330{|tʦ˪tyummqA>E1+24-2+.+CNIbqlcspevsiysp~un{rkztn{vo{uozunzvkywk|{lxvqzxgonjrqvt~txx}HRT $&%#%& #)!%)!&*#*%#)!"%$'#!-##+"##!"!!"!#'*$),$#!&##& %)("(% $+ #("(#:01oc_ifz[_frnzz}w^Ik8*G%!6# ,&&(&*HP3x]s|v~rqyenu_ls_gq_dl^fjUbaRb\L\WL[ZP]aQ^eS_hWbmR^hP]fS`fUadV`cU]bU\dQW`)0,;&<.@*4',-5"5'<.9.70A$=)D$=)E$?%>(>-A/@)9&<-E4G4B7G4I.L2S-K/I0E+?+A)A'C*F)E.H-E+A,C-D)B#=#;+@%7 1%6#7*:+:*<+?';(9,8,8&6&5%2,8(42@0@.>.=.;1=1@);)?#8".#2!3,B0F0D3E+@-=*0 dM!'=$'02 5!7#:&5!46"!8# 46""6"&2$1!.3#3$"&&. !4'0*/#0.)%3-)1%)3#,8%.8&(2!3&7)%:(&8"$8#20. 4 &0 4!#6$&2 $<+2:)2<*1<',9&(<+*;*&<'#>'#>&"3-!3-!6,$9'+<$0H+3G)#N3/M65@) :"E-:r]v³ǰžDzDZѶβͮҰӮЪͦҭȨɶ϶׳֯֯ӭѰӲаԴҳΰԷϰЮ̩ӯҮЮͯǫ¯Ɵɣ̨̩ɫɥϖБΖzUb<@7<5=/;.>-@&:,A(<*9&)%w"*'z!}(}+3(5+-#/%1%3"8*1)|(&,-3/~-%|,{*~+|$&$-1$"'|,.!1')!& }$y&y'*, {'*$*$|%~&,"/#~1${&"% ,${)"}$#z"}%~%*"**.3//z%w$,/#|)"{&&z&#."x$,#*%+(+*))(,%&$(0$,"( /-$("%#$)(""##!&*./)$-&515=(=,9569<3: *l'K+;)6%'%1%%F,.iPR|~wI{;z3v(x&z'y'vz&3ǂ4ƅ0Ç8NJPȏ^ɛaoʤtƫzƻȮƯZdI,3&'+,*+1%!%! $%#)*-&$+,')4-(."#)!$)')#")!%0'.,*/('--$0) -1(45-85/51,-2-+3((1&),&)*',,+0-+-4/.0&)0$*,(&%+ $.")/*"' -1)1/$-:'7V:O~ZoجѤpwlTXC5?;+@7)</'33041/./,(+$!9*-7&,A:=QSUs|{{rgt^Rb@4D</<7+35+0(##,)&.+&)%.&!,$"'('!((%(,&$'&!"%$ )'*2-54.6-#.2+4+&,.-/$%%('+('+'$))$(&!$("$( #(!+'*'"*,#!)'("(- &-(&,%%*015/15:8=ZTZ}JQI#("*+&$$ ''##" VTV_S^)("!(&&'&'%$#$$!%&!&&""# %%%#&$!&"%'%(%&.(*.'+)#(*&+'%.+(/,+-&%#'&$($$'#''),#'945{{uţʨx}v{~RRX1-4+&+*-->IGesrdtsdrqkvslwpoxqmxroyvs|{t~~mxyguuhyxhusnyvr}ql}}ppsx}9>C!"%#"! ## "%"&" !#!!!+#-!))!* "%!')$&#% %%"*'!/++"+#$ ' &"# @:/tkYna~_^`kjwlr}{vm\|E=]-(>'$9&$0)+ ?B [^3d{wztsxos{mmyjguhhu]cnZ`rU[lU[iU^gU_dS]`R^`TagQ^hVajXajW]gUXeWXgVWi.!)&)3)>#:(:,6%7*@-:/;.B";&=#6+A*F*G,C(8&4$:)9/;/@(>1K0J0H0H*F0B/<->0G.H+A,@)=,<,9(5'7)<*<(.%;%I=&:'9,D/A/;'50?"1.'5'8,E(E'B(9-8-<(?2F3E,@'<$8"3+8&.%(".%<+H5P->/;'9.?(.zY B!B)'6$2$7"?('-/$%)+.04#2$"2#+2#(7(4&/#//"5-;#29!+8!%4 2 /$0"(3&*7('5$!;'';(+6#&4!$4!";)(:('4" 3&)5'*2"'7".;$69&15&$9)#3 7$&3"$:*,9+*6*&9/*60'7*"A.*<)/<+7B/8A((E.'>+&G.4R4FV:Pů̳ϳвɩЯ̩ӱԳѱũȼ³̲ӶѱаӳӵʮαѳԵұѲͲӺӴΪƣάɧЭԳϰѿīĠĚ̢Ϊåɠțřu{S\==59,75E/?)5.5*80>9@.,,$w(~,"~*z*}1:$w/}4&5-2/|.24:24320+3+/%1'( }&(*~)|&z$+)}"!|#"w" })#~*y#)#$#&{"%y$$|'#w &z's!{($~)&{#~&}#|"~$!v* }%()+&%#~%s
2"0'}% (+ )(()!)!*#&"(()+'&%".(*"(&$!(&!#!!(})#& +$4.3/21*+3<1B7E0=\%,J+.@107..0 /F4?u_f{jP~7˂.y!s#w/x)~!x&z39ǀ<ʈHƍSĒ^ƙd̤lɧ|enX58.-(*- ,.+,%+$4&,,!%8/1&)%"$%!&&%,%)/&&0)!,+#'+,)+4+&00,01.+1/(0-(-)*/*0-')/&'2(*0%)2)-)&)+,.*'&0&!&"-/'"$"!",$-0!1E-?jJZ|ԧվֺȠ{~QtZ7W='=8/;5152-,3+(6-.2+2217/497CC\kg~{q~\MaM=S>2C3-85376322-)3-&4,%(!"$ )('-**+$##.(/)$:+)@0.:,)22+7.#23(21'.'#0%$2$$/%'' #%#%#**-#/""$"$-( 0($-('*%*3)7.+6=?FHFLxpyȣ[b[.*(/%5(,5)/) ",&%PMJzzx~r{?)@/3/&3'!&#*%)&#%'%!('/)'."()$.$+/%-,$,/)0-$,-&1'-)'+%(+#'*%%*'")*(+&*"$CA>ÜȠxw~c]l3+90'13)+@?;_jfbsrfrvckrfqvlvyuxxqzro}sm}ylxpwvx}wv}tv~nv{rk}nusx>FF&$,%)!" - . )$*!"%&!*$*'!/'),&)(#%$$#$+'(+$&+"%.%'+#" #$$##$ !#TG;weXc_^_\_ddsowr{}iF@c1)M0!@.03%')$37KS-tz\lxtjulnqisniw^\kbep]_kXZf]_i[`iW`gN[aKZZS_^VZcZWj]XoYYlV_jN^b%'*3"3!7 5#2&.#4'>2C-?'@#@*B!3&;$B"C$A#7,>#9*>+:-93?.=,D(G.Q-D/<.8.>(B$D)F+?*9,9+9*8%4%6'7%/"2#9#9'9)8&5)8)9$5(:#6,?*:%5#4'=&<'79C2>*<.@/A/C+B)@$9%7&5&4"4#:)C.G2H->0B-:"'tV"%;?'$9$3!:!!<!%*2$&5&(6!#.--,6'*2$#9*#4$0#6)%1$(+&2#,8',>,-9&*1#7$*;(.<).6"%3!4!%3%,0#(6)+E98?4/5*"3( 5''5&%7%#@,/8$-5$)3(!3(1 !8&,5#-3!-7'0:*/7)+;20:*&;&$=*-;,5;+1A+*D/*<)(E/8H0CmWmۺDZƮū̯ˬѱЮ̨ϫѮִίƾŵǮ̯ҳӳֵַˬǢ˥ϩʦбʹκԻϲ¤ŤŢְ̧̦ĩɲĢˡ̡ΧģĒ|ĂnwJY;E2A.C&?,B,;%-%+'*""%!/*y'#|&%&$+"0#/v({-/ 2&x( |,#,#/$/&*#}("|')''&'$}""~"%"#~ ~$#}&$~(%(${"(!))+#/(*$x#,%w%v#v" +*,*&!)!+#x}$}$'|$)*!x"|(|+/~+$.-++*'*&}"}"#"$#$$}"#%(+.+,/.0.0+/(*($'((-*.).)%" #!+(83=15'909;,9.;j!)S*-A//7-.5)+<*/R8Gfp{`|B9ȃ5Ɓ,|#&}${ } ~/˂<̆@ʊDʑM̙Z̟dΨlɯsȳŶ]iP+2#+-',()+"(*&,%,$3',+!$-$'+$&)#$&& &%#)$)7033.+&%!,-0(&-3)-3**3*(1(&3)+/$,.#-)&'-+(30.1,,3-/,')0+.1)-0%*#.'(&!+&?-:rYkֱֿԿҹĜsUfR<I;*53'/0'-5,-4,,2,/42945>59CMR\qxsm^qJ<MD8D<09>2:6(0;(22 02#52)2-*)0/+212,,4/.3-.*&%"*'+1,5,%,+$',"),#0!(#%' "*%,-,-()"#&%)&,-*/,,(*+++15.;TS]mpv§fva*3'21,3,,-*0(%+'%)LIL|z~XJW0%3.*0$"! # '%(&'* %'#$&%$$#!#'#&,&+0'.,"*/$-4'0/%-)%*"!'&'-)(/*'/"%%!%'&%RUP}|ȩĤnnqhhp54=((.((&;D=Yoeazrfythtoivndskgsojyolskyl|o|~q}zn|wn}yo{s}r{{{~MLO"+)!0"&!!"+$"/$'%%%$+(,"!&&%,!"+%'*"$% !"&&(*)+%"$%""'%#&$$&$',)/$ )(!.,".aK>y\N\YW^Xa_bhfnjzy~yri\w<3N#5&"0)$&,%/*0/B@&XW9hgJsq_roiigkfiifhhdffdff^cdYbbVbaZ`bXW^QT^T\jU_oW_pTZiQUc*1#/"3 44&8(7%6 2)8'9(B+F,?1:'6)C(H(F*>#2(7*9-=$7(>#;*C)A(@)B*:(5*;)D$C&A+>)9':(>$:1'9/#8#2(3"23(<$/*8&;#8#8!6&:0B,<(5&<'B/E*94B7J1A1B,A%=%?&@%=#9%8"3%4&6!1(9(;(6(/"$jJ!%8 $8 <";$$<#8"-)/5//+ ))2$3# ;&'<&$6$$- #,!%0#(3#&8%'9)14#,4 '3!:!&<#):");(-6)*2&&9-,2'&2'%5*'0$!4#(6##6$>+*5#&-"0'!-$1$2#&3!-3 15"08%.7%)6',?,.@)(<()-"6)+<)&A-)=*.C1=L;Nǽʮ̯̭ȦϫײϨѬЭάæϿ̰ҳέѯȨƧ˦ϩӬЪϭʭƬ̰ع˫ά˩ǢͨϫҵϻǚˡɤǛĐǏǐʇxPZB>576B/A+;$.#!!)&u!.*|(&z""yy |& 0%7)9,9.1)/'1'/${({&|&{% }(}-y(|)y$~&}"$""&+#y!|#!#%&%|'* .%.%y#|)$/+}.#}+!|)!s|$ .',$z z!' |#.('()y#%}($*z$(#,0.3--$ +((&+*--$%$%(*')~'&~'%~($+${( {$"(*|($3(3(,%(( $}#~,-}24}0&34&58|/:`(/P*1<*/-%*6.4D4>V?Io{hQy6z-}*}%}){'~$}z#|1ʂ<͈B͎GΘR̛[Ȝcϥtѯ~Ĺü¥YiO!,&*#+*(&"!'#!% )!!,#$,$&% ,$'0',+#',)!)'"*&++'..,.&%'((.'$,6-/8.08.02(+6+04)/0%+0)(1.)32-./+./,&%$/,,,&+-#,/#*3$)=).qX^ٸվԻ־Ը|bHVJ6BD6@9.45*-2&-8,64)52*50,4HIMYe`|zvgt\N[VJV@4A>1A7(<7)=6+51**/.--.4//6....0&24*332--1((**'$2)(3(0%(''.#'. &2!,/!-'"*#&+"&*,)/2)1719VR\{uênxk,3$-.!,&$1&0)*/*,/>;>qlp}xy;7:%%$! "(! ,#$& ##"'#*#'*)+($$" *()/-/(%)3/30')/'(/**# !&$&'')((*+*-*),$$#^`\íy|xqrtIIM))-#)$5E9Ysfc~thyumvtjqmlwrixuk{qm~rm~vkzyixvl|rh~sjwewoixoq|tz||~PMS&!/%,!#($&'"()",''( &!# #"& %""&!"% #!!%"#'*+.&(+#%(#&'&&(,(,,$*,#*)")3*0aH<uSJ|RUWc~O\X[zWP]Wehrszyzzz|y}r}q^sSAZ>/D0%/,#","$+$)&))==*MK>SQGSWHX[MZ\PUWMVZPT\ST`VW^[[Z_Z[`Y]dT[cMV_P[eWcl',$.&5&6&6&:!;&=%5+;,>-D)@,:+2)2*<)A(@*9%/#,'3&8&B(K$G'B*:)6)D,B*;*<%;$<%;'<'>$=%@'A%;*< 4!="3,8&79'?!.)5);&;%;.A'7*;*:+;'=+F-F%9,?)>-B-B%;":%?$<#<!8"7'7'3!*,5&3$4*6-2bC"9"&<#$< :%<(7&1$+6!#: 01,#* ''/4"7 3!5"'2"(7',8&)5#%1#)5(.;*-=()A)+=%(8!'6$'2%#5&":)'6$%<+.<-02%'7(+9('6"!:$%>).6&*3)'3*!/$,7(.;*5;'2>*1C04=/.D20E.-@,,<+-?/2A.1<),C2:@0?sdyȼȲαϰǧǥЫѫҫֱղʪŪïɽƷŰͲҲصҰʩžȨΫͩͨʥǥʪͪʦѰѳԷӴаЭΫʶΞ̠ǟɛ˖ΖƑőĈu}YcLJ8<3A%9)=#4- )v %y&'|,(|-'z*#x&v"z($~/)y+#~0'6/~/+22+*}&"{& |)!~*!)"'#|$}/y+y(~,#sz&x$$$!(|#}$'''*{#%|$#)%*$.'|*#/(3%/ -)* &( &'#$| ~#&$& &.')+!).&'&~##*(1+,&.)%"(%)%&"(%|"*%+#}(}+!-$+&/-u$x+|/!|,"x"y! .(13:B897,1$t/+X..G/0A)-@+29(0F5>gU]|e~Gv/v'}.w'tt!u&v$z%}3~C˃DˉF͒OО^ͣj̦vʥϴ¨â_kR'1#"# )'(.*)%$""#!*'%*'&3.00(--$+/($/(&& &*&.'$*'%*&$,,'15,46.24,-5..0)*+%$4.*4,)0)&4/+1/+()%,.+02/002,%.:+2fLQطؾּγgsbJU>.9++*(9+47*/2)*,(#12)09/JWMaphhcjHBMD:I>2BB6B7/662:42@1,;/*2-+)-.(*+)*+,*++0/-4+,1'.%'%%-&'0''3(*."*-%.&#,41;SJVscq}443+("4.%/$$2$1()+=?>ljk[ZW(%',%/-$** #' *#+$",%'+)+*./.(''! "%&( #%$'()%$+$"0)'*# %'$""!($(-(-321loj«¦u{uuxwNNS65;%,(.>3Vl_dxoivulpuknsjptlxyivpjxomztmzyo}{l|vh|ti{xjwylvwt|xy|{~LKM(%.&!)& "#)")&& $&!#!!*'&&"")$%(''#$(#!"$$%(%#&$&*,"&" $+%)("$% )#7.(fLBsPKwKQ|JVLVzKJ{PDTKZ_deifplxuxxrof]~sSrL3K=,;4$/,%,!+(#,#%%%-+$+'(+,.--78*57,=C7?G;CNDKVNMQMOONTUUV^]J]YE`Y(4'9&9'3-1-8$@*E%8(=-D+C&=*@(>*:*;);(9%4)3-3)7#7)<-?):0B(9&:'>0E,<-;+9'9'=!5':%9%:-B&8-;$2%9'7'3%6 7!7'5&.)7'>)A);+8%63!6*>*=,?,@"7*@&=+B.C)=(;%8 2$7&=%9!2$4&8,4,6.2 h"$J :7<!;(3&:%27%0."3/04 /1%((*+-+.!4(8#+1!$/$#4)&, 3&$-7&':)*:')7',/"(4#"?((>$*C,4>.0=3,60$.%/<%(@'.7#(1"$2%$3%$3%%6')4#(6#,8$09&/91 ;0%:*+7$):%(:'+9&18(5A2A]K`ӵíʭȭǪɫȦѯղүϭŧĽĽįð¸Ŵ˱άϩҮԳͯɫέΫͩͪǦͮɥʥԴԺҹгѭΥ̣кɕ˚ʜƜțϝȓɈp{ZiBL6B1=/:'/'0*4|&1x"+}+-1-z-$y/$z2(w/'v-&z1(v+{.!u&2+2.' $'y%|' {"~ } }'#(&|%#+(w#{)x'x%(%%}(-!&'"!|'!+"/$$++-",$+&"$ %!&$#$%"'%% %&//.'~-+,+ (!&$)-$))/*/*,)'|"'#)".#~.!v)w) u& v&!z("},&|*%v% |,&}-'u$4-z"''/1~56c+(Y.'B-$:.*A*,>$+L8>ugjZ;~,}0{6}5y.y0{.{)|(~.̀8τBωLΎQЗYТdͪrǮıø¦¨`qY//'+##%& &"$(#%$ #!!#" )'$'$ #0+,-&,/&/1&+2').$&5-0503,)-+)-+&.2'18/.4-&0*%.(')$"3/(1-).('0)(,$$5.00./-.0(%*I8BfmȦַҳնԸؿ¥yYlB(=2 .4+0;9710,31-*%"1-*&.(3F>Sf`p{y~oykZhZI[OASG<L<5C@9G4-:/,2)**0321151-9/(71*41*41+3&"(*'-'%**(,9+6TBPn`m{FJB!!+$&,$#( ",%+667ghi~E>F&.% #$!$ !(!+*#/#)(+%(,"$%#(%''(&#!)&,+%++#%+"!+#!)##&"#&%*$'371o{sz{wwntuUX]46>*161<8`k`iujitrdopfqqgrqdmqbmneqpgurkzxm|}m|k{wn|}s|t{uz{|{|_^['!&(#-#"(!)#$) &,$####"#%##!&!"& ",/.0#//,$#%604(!& !$"G90qQH{RLvGDyEFIJ{JExMC}WN|XWe^m`m^uh}uvkwkode~bdrRYkFUQ0?9"/*)'"0'"&,$&'!".,"-*"*(# $$!)*&%& (.$-5+%(".-*11-7;47C4<N;-C3F/>1<09/9/</;.9,;,?+A)A$=#:-A+@+@,@-=+5+0%3!5'7*7+9);4!9"5(8-9,80='8)?'5,6'6%:(@)D!<+B/?2<*1'5&<"8'6)5(; >=#?!:">"A 6(=+@(>,D.I&C"4'4%6*>$9%;%9&81$4&5&7"5#;$>)0*'bN#?!> >@+9&3"5$7%5 7 31)1#,.35!!-/.1 3"#1!!4%$3"A/-9&&5!%3%8#,9$0:(35'-,5#!:&*7$+7(*2&!/% /#"0 #4 '6#+2"(0#&1$&/ $1"'+!0'4!+8$.9&.4,!<3/B5:=.39,*<1,<32</>bOkʾȬȵδʰͲʮѳѱЯάѭѭƣŤǧʫʬȼíϵյղѭɦƤŧ˪ӱձ̩ǦȩǠ͢ʣȤЯҰձΧ˩ѺʣŎËËʎȌŃpwYgAV4I);/:-4*2|$/z#*~)*y('s%!q%u( y+!x*"y/&6+z- 5(w',$~*%~% '.#z-3%3(.&,){&"{'$|*'})${$|#| & $"($'"+#*$%$&', .#,#.**(+"+'',)$*%%%!%$-#(&$( ' .$,,+,$/.'1!-"0#1#-&-),)&1+%}$z"{"~$ -%}(, |)+&|%$.1-21*/$0"|, j)!R&$J/3<+.:&*H+5_@N}fru[{Ay-x&z0v4s-w/v9~7~)ʀ#˃)˄5͇DАOҙVӡ^ϪjջãŨU`M!)'#!/&*("&%#+&,,'-)$+% %.&&%+"","#4'.('+,.%&.)$0*&&")'#(&#--*-),/&.-&&1+'-''0+.1.-20(/)'5/3.*0'%-/-5,(0-'/^IQӱٻ۹ٺپϷZrZ8ID29;45502,&*/%,'$*&+--849A;MOKmpm}yn}ncqe]fWOWQHPPDOSCRM9OS<XN8SI9G=.;A3>D7@QFN\QZqgn{ŴGTD(0)#%#'&")& ,('406^_`fhh+,3'('(+ "!""'!!&$&+%'-'',)')'$"&&'* %* %),-.2001+*)"" "#""#%!#-'%GI@xz{~x}|ruq\Za1,8)&/-0/[dYerfdsp_oocrqfurkxyjtvjsukstnvvgrqkyvo~yqzuwwy}}z|bda "&!!% &")$),!*'&&!("!'$$(#"&" % "!$%!(*$#$() ((+,%$$#.)+! >0%Y7.b81sC=u@:xB<{JAwJ>zQHwSL~[O_L~`LhVpc|twltchxX\vRVsKRoDNj@I\8>P4=B2<&!*$' +(,#"$%$"*),%%)%%*$%)+-.#&%$('&'''&%&$!$%"%#**B+:+4-:,@+>2;060<+;,?+@,@.@.=,9&6)<)>+?(9-<'<#7)9+7,:*="?$D'="4)9)9*>%?%D":"6'>(@$=%<4G+<(3%)(+'3&:$:,;+1(1&:$<*>*=,C'@(8$40A-A*A,G'E.D2G*D= =&?(;%6!8'<1 0!2+?';'.'%ZC!4!. 6 =&;"<!B$'> %8!4"0.-4":"(7%1!5#11)/"."/*7$$7$#6%%4#$. 0"#. "2$)7(.8*)8+(7*,=.43$&2$6!5 ;&)?+/:'*5$$4# 2#!0!#8*+2"$4$&9'):')9'(:/#8+'6&,5$)?10>41?87:,>sάżȭ̴жѷԸԷֶַԴѰɨӱصشձҴ˱ͱӵά˧ɢ¹äѰͪͩ˨˪ίͨͣ͢ΧѭѰͮΰαóɪȟʙ̕ĉ}}w}{ĀuxT[>G3;0444}56z*1~!0&(})$z+&z/*y-(z+%+#-%w(%|.)3,0(~+%{&$~'(05446/9+w/ 7--(.(-}.!u'q x "$*+.2,/#$&**6&<%:*3.3+,()(,1;.;.8,7*8->&7*7!+!$%"$%'+.5%-#)$',+2001',#*#)+.00-,+-',$+&"'(*/-*$""$% "!*%)$+#615505*13.6/w+&`&#M$$E(*?*.<+.E14^CJozj{IyEx>|3Ђ8y7t7s-s'u.|68}6|5Ł:ˊFѕYӞeҧm̮wŴƩȧǤgs[ %"$-$$'!"% ,'*-)+'#&#%)!*,$++#'+!%5*/;/72&//%&1(&,#!,#"* * !3(+7-26/61+-0++-)/0-410/00%-)#-+*&&+$#-*#0<*6rT`ҷֻظֵֵֶڼ۽ڽھԺپҷԻҼҼҵٽѺѾͫ{ZgO4@E4?2+6/0;)*/',*/3.9614-+FDF`kn~wìƮLVJ"+# #!" *&,':41SLS|{[Zb+),## $&$/$"*'#*("*($*!# ! )*)!%+()/**.&$&&&&+,.!$'% '-%)YXSz~y~}xywnlxC>S+"1-()QWMdtg^rm_trbvsauoc{vdwviuvirtfpodskf{nkzvm{vowrrx~gnc&)& !!# '+")*$(&%% &$#-$#+""!#%,-$(&'%!'*&'+*)"!% !## #!"?1+]92a5,o=1n8+w@3M@RFRKURVOzPCzSC[Nc[plqyjt^jzS^rHRj?Gg>CbB?WA7H617,,,(+"$$ #$'"&&%""$%%* !&'*-$()+//%('"#%!"# ##""#"%1G*<'7)>)C(A*<+:*;-?1B.@*;)9(8'9'7(7);,B-B.>,:,7.403*/-8%6(>'<#9%<$<$=)C'B$D&H,K)E&<*</=-90;*6#1$9&>"9%8*4/7.;-=,<);%;(?%3'4*6)7$4+?)@-A*?&>$@#<*=.9'5< <0#5#4':)=*6)/X=2!,1":!99="#8!!9$#9&'3'-#*&*2 $4%2")+'*+3 -,1,2##,!+"( $*&.()1,)3((7&)6$&3! ;"?&!<'&7%$7$%;'&:$ 8&5( 4'#8,+8,-8+-:+,6%$7(-9*+=*3?)6=+9C9H`SjɮǸƭʰʴ̳дԴԴԲѮȦȦɧϭհҫҲβͯӱΧЩѩßêçɫɪ˫ʪ˦˥ΩϮϰ̮Ȫͮг˸ɮşǘϙɎɌLJ|ǁ}{{wwzwtoa[B=;695|31w..~05-7/0u'$v''x)*ox$!|( v#s%$u'%/,2./,8754*+}*&{+ z-"}/(20x'&v"}&~, y+x(} $$(,&2(3)/1313-71E0C(2*6'5'7#4(9(8/?.?':(:#4&7!4 1%1$,!(+2",$4$7'31:.8&2#2,;.<,5/102+/&+$'(*##,+*&' )"("&$ ""*,%'+.*1&/&109/1{*)[()A!%B(.<");%(E-0rW[}hK|:ǁE}?x/z2z>x?v0y'w&w/z<GʆFʌIИTԠcթr̭}dzƹÿɮɫȩ\kR(. & *"(-%"(!% *&)*')#!"'"'**-#,/&+,#&4+,+##) !4)-5%.+$2%-)"))%+*$,,$+,$)1)./(-2-51-5.-0,+**&$,((/*,,#(@/7~am۹ؽܾۻԷԶۻظٻֹԸбЭڵ״ճӳԳӳгεսѴǟ|lI]C/C3,<-34).+-+'90-2%(1+114>JXZhyu{LVC$,""$#"'$ %,$(0(*WQVzu}XVY,'1)#4+$8(#1$"'+$(.%+%"%"# "!#&$)%"'(&()'(&%%(*+%(*%!$+((\c]v||ƨäw|tlksOI^.$7'"'DJ?evfdzn`upewtatl_tkbuqfutcqljzok}oj~ro~yu}t{q}qw{|v,//&")$& !&#&&'!%(%)!# !%#&%!)%#)$###"&%!)%($%(#&"!# ''"&$ &$%%%%"!50%Q;._</oC5m=-l;,o@2o>4wCAyGI{JLvEFwFF~MMRQZYe[d`]dU]{NRhEBZ@:C5,1,!(#1*).'+)&*!!%"%'$'$ #!"$'')0$'/*-3!$(*))%%% $)#((!&-&/,#5'7"5)<*<(;)<*=$9"7!5,=2B3B1B/A+?'@$7.<,=.C,@,8-<0B,=#2%5(;1F.E%9"87!;*C*A(=&?)D'A*A)?-B&;%9,@&<'@&?%<%8(7#1%5(9+>-B/E-D,C.>3A+7(4(5#4+@+:+8)9%7"4"0)1)7"B&D#=$:&:(<)?*@,<!'U>89%7#6368!"0/1!9",5+,!%*(-. (*!(*25%, 4411/+(%()$,%%-! 6#$4!"6''=,&8(#/ - 1#%7'+5#&6#'6%(1$#2(!/'2*%5)+:+1>1191.82.9-.=)3H3FQC\ά³̵ɫɬ˱ҵұӰԱѯѮЮˬɬѴαϯϬѲϳˬͩϨծױǦƲɵDZɲȮ̰ΰѱѰ̭ˮӵԳҭЪȧūĩșΗÇĆȈ̈˅zzxumeRI8/7/4/v)$w*&}3/w0+x1-t*'v((w&'../+z( v&z)%2.610+1,0)~("841)<.2&y&"0.3,y'x#w%x'('").#0 4%6/:6:04-43@2>58*2/</@*;)83@2>,;&9&:#5(8"4$5*8&2#.+6(6,</A*8)5'2'2-9,9*:1?(24>0:/9&1&1.9-:/8).02+,(+&,.5,239,5$2"10>'2v%.Z&+C&*;(.='0A(/G.2vW]v}c}By2x1}:x3|1y2u6q3s+u#v#v.ЁAʁ@ɈBƍIΛ\ΠjȧwżîïīɫĥSbJ%/$%-$%0 '&'!+#%0(-,#+* &) $.%'+#",$$,%&0)-/)-5/42.3+%,.#-1#..%.#&+%,0').0,1*"#0)*2+-0+-2.1.*./,1.(/3)08*/O=AxħаֻֽѳַٺػԴѰӯѭѯԴҲҭխժ֩ڭԨ֬ԯҮճնҶеԿ ofD]>.=:0:.'-0*-,'*2-20'0044@MFZhc|ƫvf>F1$+!& #$!!$'#'+&*KEJyvzWRZ1(8,"0)#("&.''%$!#%"'$!&&'!""#$$$"""((&('%#76.itjx~}y}~[Zh4.?!%2:/ZjWdyjaunduqcqkdqierofsscskk|mj|mj{skyssyv|rzu~xx{tq',1#'$ (#$$ ""$!#$'("""!%#('$,%$('%(!" &!%! " !# $& #$ $!"$!$'V*Q7-b80o;2k5*j8+k8-u=6}DB}IKt@EzFLyEKLPOOMJ}PFRT|IXxI[b>JT>D1)/('+""#$#$+')'"&'$('&*)*-(++#% #(!($)2+19&,4(-4&(4.-@96H?<INMYLM`?@\-?)9(4*4)5"1!3)>&<&:+=.>-;0>/@(A':*9*;*?)=,8-;(<)?+B&?)B.H*B&;&=";&@%?$<(<+=-=,<+</B.C!7":$:'=(?+@-?,9)2#.%4$7(?&>)@/D':+=/?+9"00@(<0G*9(4(4&3"0 /#0'9$>%A'@!8&<); 2$:&;&Z?9:<!: <#45 A*-<&)15"7$5&3%3%9!*3"3"5!&1%%/%"-20 )1..21.0#4'4&1 ((+.--"",! *+1$&*"0#+0#,2-8"29(+2(2,2, 1)&3',7)65,4.+(4-):(,U@PֽíƩ˪ϭͪձүѮάбũƮſİѼʹ̯ұЯϮǧɩѰҰӰɨżŭƮȯ¨ǭ̯Ͱȫɭ˭ЮѩЦʨæʰȚɓ}}{zyŁzwqmge`SMA84,6/z,#x-#v0$l)q) v*$u)!u*!s'y,"u&{+%0,402.}'"/*)#|#-+-&7*0$.(~-(}-!{-x)x%*!'"%''2/#2$2+5+3&/+6,;%5/=.9'0092<-<1B4B)6)7".(3)5/$6#2!-)2-5-6-8,9(8-;-:$/,81=+8-;/<1>-:,:.>$7';'?,@&5,6.4*1&0*4$-/7-6-9/<2<y"'J$):'=&1;)5<*6N7?agywYAy2y1{4w/w0v3v4r.q)~4z.{+p(=ΉEДM̙Wϡjɦ̸ýħ_jQ)/''*& .(&,$%(#"5111+.3+2.$+,!%."$0&%,)#0-)*('%#'*(/+)3,+6)+/.+..&+*%)%()#))++--(++&'-)'2.*4/+2-+*$'.'/1&27)6N;F̲ٿտپҴѯӯկѬҪХΟРզѤΠѠН՞ԞףӢѣФޱحҪӮִۺظԹٷǼ{Qj`>U?);3(51+5,&-+"(>586333;:I[Ycwuä]dP',$'"#'$'$#A;;qll`XZ3)-$%$'"!+(&+)& &#,(#*$!!"#%!!$""!(&!*(%"GG>wu{}}}}jtpbhn;7F%!'-3)UcQcugevofurgrnmvsluxisxftqi{og{ph|wiyulyso|rrux{{{yt~r*08!$%&%"$#!"!"! #""&""&!!&#!("%$&!!"''&'(*'*(& !%(#%5,&R.,a++l-,n/*p6-l2*r4-{=:v?={EDv@As?@yFDuD>qB7rH9uOH^=?U<<>0*73," '',$"'!"!$$%'"#)*.-/2.268>BBJOGPWDOWJVaNWjQWoORgSTbY\fUYhSXn)A+>,9,6.:):'>)B%=$:(;+<*91=-:$<&9'5(8-B(<.:08(2*7$5)=0B/=+9,?(?*C*D#<'>)=*<*;-<*8-;/?)9#3-#3'8*;'6(6+7$.*6(<'A&C/G3G*<(<(9-=*9,>)>&=)9)4'1%.'3)8+=+@%=(@%?+D%;)9'4%2(3$[:1/7 :!= $4337%2 /00 -!5*?'2<%.4$4#.! 2)$0#7&$/!!$&-+,,*,/!7!-/$*+%$(.3%%6-,0%&/"#.24 $;+29+55$,='*=*,6))3*)4.*3.(:1-J04E17<17A4?M8Jyվ¥ɼűȭ¤ġ̣ЩЭֶϳ̳ŮĽȴѻپַشӮҭѰгϴѳӱȥƷ¦ʯȧǣ˩˭Ȫ̭ЭЭȪ¨ȭ̞̕Ɖɂɀ}~vzlwgl^g[TMC=x/*|1+y+%}0)z/&u,!w)!z,"u+z5#;*;.}0'~/+y))01{**x&%0/2020/(-|)1!1+301)|,#})(+).,32-/&,#,&.(2)4+6+81A,<#7'@'8(015/62B3I1@'22;)/266>)8$6+:*6,4',&+(..70A*:)6%0-8*51=*;)@2F.?,;+:&8)<"3.<,6.5,2&/*6-<+:+8-:-90:x*/G4*#7&&>"-C+9J9Ds^h}yYD}=~B}>~4x*x4w>|CЂ>z/y1|7~.~3ǃ<ΒIԢXϧeѯzζú£ȣTbH&-'%(+"#'&"*((1,/*',&$)*(*))(&&!'%!*%$0*+("%,'--(0+&1/,6*-/.--4+-.'))'''%%5,.9/52.5,),3/-/*()"$,#,.#2*.O<N{ͱٽԾ־ӹԷάЩը͝ɗ˖̓ˎϏАˌˎӖғБԙԜΚΜӢРԦӧԪիӯѲؿǙx{OjT2LH2H6.=23=74=-&0/.8.3<6@GN\^lzxĭĩĦ\cQ$(*+#! &$((#!"&&(514pknZMQ0!,,- #'#(+%+-%*)$&+ #) "#! '&+%%)(('$'%)(+KPKw}~~t|wfikIBO4.5%) >K9fwibqkhtqhrolsslrzhqyfssbtl\ri[qnfvtrxn|pts~{}~y==I -!! $&""#$%! $# '#!(#'$$& &&%$!'%+ (,*(&'#" "& 0$G*(S'(^('f-)e/%k3)m.'u60n3.o52q96o:6l92qA5oC4W=4O>:@431-&&&''(& +)) !##&&*#+,&/*&1/-8A@GVW[VZ\U^^WbcYgl\ksYlpVilWgoZfmYbd^ggZciY`p,A+4+,/0(2$5%7-</=):':%:(:)9*8+C&@(D"?'D-I,D/=*4,;,@(@.E-A!6 8&:/>+;*="=#F'H&A&9-:.8'3*;#5#2#4%7*8)4$5"=#<%:%:#6&8+;/=.?#>$=%;+?&;'@$A%D*E': 2"7)=(7-:*=.=+9-A*A*?'6*3+3!)]!8).35 4 12!"0$/$1&/02 -,.!5&'7$'8!)1#$1&"0!46.*+++/1 +''',"%- $6',2#)2#*)&&,'$-!5#8&$6((2),0)*.&$,%&-&*5-1<11<-*@/*H32=(%D,0O4IhIJճĽȳˮʩϮ̬ʦϤΤӯӵ̬ɧİɰұϭͮѲֵͯѳظزحϡýƦʭȬʮ¤̬ɨǦɪ˭ʭȬββɱαܲИŊƆy~yxtmjf`g_WPAF{40x2%w0#~4+w.#x0 ~/$/(y,%w.&u,$y,%x) x( ~0+{+'0++$.&0(z+#x)v!&#.1-3~.3+-)-,80>*9'6*8&3'1,),,'2,:-7-61>.?,;*6,6=F7A8D7E1>0>+9(6*:+<+?)?+@*<)5(/-3(-'0';,A2C-<,;-=(9$9&?%?/A.5010218$1$8!8+9/0&%08(<)@.@=I17v&)X)+G./>.+9& >)#Q:;v`hUA7~5ǁ;727ł8Ā2ʄ7̓;}9x4̀9͈:ψ>ϋFՙ[ҥrȭǺN\E$)#"'."%% $('-0'-+",#$% )'"#%$((-"))'+%)&%/ -0$-+%&/-)++&/+*4/0+%))&(**(./)/+)1&,1(-0),*$((&-"-1"1L4F}ũͲҾּոصөѥΠɕϔҎ΄}~|x|}Ђσ҆щҏҕϔАҖןӞКћӢЦԬձԼέ˝yYnM6F82>0+6* +.%1,&256?<FM>QUTkkz«ŰůèVeM#-!&#&" '$% # $$"[_^}vss9.91 2%) $&%$#&%)$&'!&#%&&""'%)%%,"#+'(,('%*'1/(jlh~|}~wzipvUTa;5?.(-354]kcdypgzrgsqjnrjotnvzjsugqrgoqhnrjrrmytgyoaykowyvxq{sx||~~~}{ts2;=%$'' #! " "!"!-+/*)+)$!#!"!!%" $!""" !"& 6()S16Z")d(+d2-]2)[.%i5,m7+h6%p?-r>0k92Z20J//=/-620*-,$%(###$ !!(%)1*2=7@JHNSV[Xcddnpfnsagmbkpdqs_po`mndko_lmZlkZmkXhgV`e]dn1A/6.0,2%6'?'?+:/7*6*:)<%:&;,@,B-F-H$A(E-G(?*>)<)>,A)?,A*<&9#6)9+8-80>)?&B*E*?-;,6,3&0,8%4(6(<#7 /'1&4!8!<:(@,A+<-8+3)3#8(="6(<(>-E&@B$E&>'@*F!=#7#4%:/=*6(8-C0D+8#4)</f",F((5%$285.18#'5%2#4$+0 2 #0#1#4#/7 9"".!.$6&"?'(=$(/+#*$0&+/ 4"%,(!%'*3)<%29!/6*.8-,>0/?.-4&$+" ,(%2*)<./4(+7-05,-5++@54:-+>-,K68I1;iOeƮɸ̲Ȩ˫ԶұԪѨϬвǨàIJʹͪӮаάԭͩЭ˩ͩͨɣĻīɱʽǹĤģʨͪϫӮֳԴǭаȜʓĆ}wxztsmmdn`iTUD=>81w1"|4({0)u*!z/ {,{*"z*$}/+2.}-(~.'/%4&~/#}*!y$})%|,'w($|,&0.*.)//2|**|! !'-.7/:)5*6*5*4',*6+A)=-:.6-7.;2?4C-<0>2?.94>+2-6'2+:->&8!36#>'>%6)6*5,7",)21<9D/=+<)=(>*;.:$2+60423271<)7#3"4'335--16.>-D,<29}-.[)(E-+6,+?278''K85{eg}zbO?6Ł4|7z35ƃ>ǁ=ˆ:Ȁ5<|@|>~:͉>D̐Nџe̩{®ĩLYC&1-1'%*!*$* &-((-,&,$!,!!/%$'",)&('$/0,//00+53*2+#0#$/#%.'+,).,)*0+-*'*/-2)(,)(+)(*.-.*)*$$%(),,).-&-1$-N<FtȦԶԼؿּԹйкԿپѱѫФϝѝɑƇʃ}xrook{m~qn~oppt}уЈΉӋюѓ֚Β͐њΝԥ֬ҭԴֻҹչۼݼǨmQbI8E6-65-51+20-4+-1(.1+68;KL`ooƬɰHOA*-'%&#!!$ "& "(!"/'&RNNyxze\i6&;5*;()0!##'(%")#)'% ($ "$!%('-'&/&&,%%'%#!C?>yvw{z~z{`jhZ]_=;;4/./0*HVMbwni{ujvtlqtjqtgqrfqrjtupxzsy{lvrcondqvWdnYejlup}rzkwozs|vz~}p~p.88!#% !%(#(%##!% &)#%#%!# ""& &#)&!&"#!"#$% %)##2O%&\&$]$c*"o4+r2,o-'h-%g3)],%T)'; 0#!3)!(&"& #"%#2/3FAI\Yadejahjdpjfrncmlhqsgrscqn`qkaloejvcms[kl]olYig[hgakl+:-5/4,8'<*A+=,;$0'4/<.<0B)?+A,?+@+A(@'?'<(;,A+A)=(;(:,>,@%:#7-?)9(6+<)=(>1C.=.;/<(5%2+:(6(3 2 7.@&/#-)8!7 8'=(<'8)6'3,5+5)6*;,?+A/D'=%D$B&=(?(F*I(@+@&<-<.:0?$8 0.7)=*@+e#H'$5"4<:!9"5 5"8%?$+?"+5$%6))1!$4!'0!9 &; $733&)/:"'>$,9#*4",7&24$-0&1#7#'7 #2 *. 7$(7")3'=$0>%1>-1:*-3$&;,.9,+;1.<4-*8#&4#$2&%+" 4+*<137+.:))9%/[F\̳ǩƥɪˮȭͭШԬЯβƨƺɯҭҩΪӯШˤȣȥʩ¤ƸͽñŰűн¦ƧǦ˧ɤƠšΰǝƓ~yoqirn|xvroij]_JM:y5.;0~7+{0)w)(x)$y)w(z+|*#{'%.,-+|+'|.$~23%-'x#!.-},(z-&}-*3434::2-+'*(&**4*4*43<,509.5/:,?(D-I+>(6.>1C/?$8+A/E5H.</:)7':%=.J+J%C!>$?&?"7"3,:/;.;#169,/.7&44D/@*:(4)1)1/7)1"+%0*90'6#-%/.;$24>0<2C2<{-0T%#<" 7'(6)->-7Q=Cwbd}{\OJ<32{6z6ˁ:ɀ?{:}4|4|<}CˀD˂AЎLΓTСgɧuįOWL*1((.!+*#(!#''+$+)$%+(#/('2),1),)#%*&'0-.524-'.1#36(3.!&0%&+"$+#)-'03-1/--//.*-,#$%)(,/(1617/30'+,#%)(%,0'-I:>wyƦٻּڿٽغ۽շЫУԡΔLJʄvsrlyhvm|etcsfv_p^o^p^qbvg{mk}k|u{؈҈ӍՑהՑϑӛ՟Пҥٱձض۶ݽػͪsdTbM@LC9B:3:5/41+.513*133>>P]\jxt~ĭ|s<D2&#&,%.(#('"#/*+-((*$%.(']WRtpoWKU=1A,'7'(3**1$#($)($ (&!%$#*$+,'.+(/,-2.04WWZ|{}yy~|~zknlAB@/-*$' 5B9\mecrmisrhrrjvudrqgvulzyjvukuujvqhsshoxdfu`bkagcozgodifnltuvx|}zz=EG!$"!!!"$#&&''#!!"( ))(""#!# !"'#%% &#"&#%!/AV$g0'c/#^)]%"P#!C <6/$!##((#$ +*$$&!:<=a^emjteemdjocnphqiitmanjesrbqq_nmcqoepudlwakrXegXgeYhfZhgVcc':)6+4,7)7.9#')5(? 2#0+7(6-=)=+:.@+?*@/D0C/A+=*:-;+9.?-A/I&C":&>'?(>(>$:%:,<-<*;,@/D+@+@&6 +%8&= 3)4+1'0'4'6"2(9(92E)=!2)/+3-;.@%9(;,>.E)>*:&8'@)C(>-@)A)9&2+:"3&2-2*51>/4o%"I%:$>#$<!433 5!#2!6$5"3 "=,.3"&2%4&0/40-, 6$(5"6 6!!<,,1""/%$5)(8()5#C(.<(8);,;"-8!)7"'5#%3!"2%%-!#- #2%(8((;)&?,&8$"9&)7)(<1-9/+3'&1"(?.6E35[IXӫóŭʮɨţɪ˰εԵӭЩʪ̱rɻʱѬժծִѰͩѪϨ̧ƧĴϻ̶εеðdzDZɱʹҵŧȨҮ˫д}sbq`tfwmohofj^bRK:<1y4't+ t&"x').+w$z*~."+#)'---.85{-&6)/'-,.0.,0&|/2*1-.)0*}/(,*06+9&<)=1B/=.86<.258=J5N:V0I.F(C,D4E.C3M*E,C/>-537)1,:4G3G/@*7'4)8+7",/8-8+7'4/5073>7E3@-4./28(8.;:G,:$30?2C'D#6"+,;)B"<(89D2<),Y##D&&;&)@&-D#/V9Dms~yj`TM|?À7Ł56Ɂ61}9τBɀ5͆:ʁ=̄D˄CҍL֖]ҝhЫ}ɳȿƩO[L &#)-( #!!$ %!!/*--&..%1,$/*#-,%-&&,&+,(),)'/'';)08'2+,',) ,*$(.('2,),*&-0(&/%).'1/.1'.3)/0--/,/)$,$%A4;}kmɯ۽ؼշҳίѲ߿ۻؼԹٺկѢ͔ˋɂwsct[m]o]n]m]l^mUdVfVhQcMbUiUjRgI]Uh`ql{rw|}҆ӵ́҈֒Ӗҗ֟٧٪ԪШֲͨ״ۿ̲Ĺ}iYjOCP@6@=3<90631/-1-/83<IDR`\p~{±cvd-"$!)/)3+$+'"0'+/$+*&F>Auvm}|h\f4*?)%6)%2+!,, *+!,%")#$*$(,%+0(0%")%&.5:Bhpqy}|}y{~wx~TU_=:B,-0,72`phhwpjtrgsphyuewsexsl|yjyvhwskxtlwrrxsvytpvniwkgoiql|jrpwvxv|KQT ! !#" !#!! #!'),##" #"!" !"#%$(C%G&;%6#9$4&") "'#&!'%"&*!*!$# && &%==AU]Zilnllrppygksakodqskrnjsphtshxzjz~hvzitxmv{hqugprblm^jj]jlZim\kq+A%@'D+?-8,30<,D)J*J+E-=*:)C$D+;*=*?);
gitextract__rv4pmr5/ ├── HSequences_bench/ │ ├── HPatches_images.txt │ ├── splits.json │ └── tools/ │ ├── HSequences_reader.py │ ├── aux_tools.py │ ├── geometry_tools.py │ ├── matching_tools.py │ ├── opencv_matcher.py │ └── repeatability_tools.py ├── LICENSE ├── README.md ├── extract_multiscale_features.py ├── hsequeces_bench.py ├── keyNet/ │ ├── aux/ │ │ ├── desc_aux_function.py │ │ └── tools.py │ ├── datasets/ │ │ ├── dataset_utils.py │ │ └── tf_dataset.py │ ├── loss/ │ │ └── score_loss_function.py │ ├── model/ │ │ ├── hardnet_pytorch.py │ │ └── keynet_architecture.py │ └── pretrained_nets/ │ ├── HardNet++.pth │ └── KeyNet_default/ │ ├── checkpoint │ ├── model--1651.data-00000-of-00001 │ ├── model--1651.index │ └── model--1651.meta ├── test_im/ │ ├── KeyNet_default/ │ │ └── test_im/ │ │ ├── test_im.ppm.dsc.npy │ │ └── test_im.ppm.kpt.npy │ ├── image.txt │ └── test_im.ppm └── train_network.py
SYMBOL INDEX (100 symbols across 16 files)
FILE: HSequences_bench/tools/HSequences_reader.py
class HSequences_dataset (line 7) | class HSequences_dataset(object):
method __init__ (line 9) | def __init__(self, dataset_path, split, split_path):
method read_image (line 19) | def read_image(self, path):
method read_homography (line 23) | def read_homography(self, h_name):
method get_sequence (line 40) | def get_sequence(self, folder_id):
method extract_hsequences (line 73) | def extract_hsequences(self):
FILE: HSequences_bench/tools/aux_tools.py
function convert_opencv_matches_to_numpy (line 5) | def convert_opencv_matches_to_numpy(matches):
function create_results (line 18) | def create_results():
function create_overlapping_results (line 36) | def create_overlapping_results(detector_name, overlap):
function check_directory (line 44) | def check_directory(dir):
function convert_openCV_to_np (line 49) | def convert_openCV_to_np(pts, dsc, order_coord):
FILE: HSequences_bench/tools/geometry_tools.py
function remove_borders (line 4) | def remove_borders(image, borders):
function create_common_region_masks (line 20) | def create_common_region_masks(h_dst_2_src, shape_src, shape_dst):
function prepare_homography (line 42) | def prepare_homography(hom):
function apply_homography_to_points (line 55) | def apply_homography_to_points(points, h):
function getAff (line 79) | def getAff(x,y,H):
function find_index_higher_scores (line 100) | def find_index_higher_scores(map, num_points = 1000, threshold = -1):
function get_point_coordinates (line 125) | def get_point_coordinates(map, scale_value=1., num_points=1000, threshol...
function get_point_coordinates3D (line 144) | def get_point_coordinates3D(map, scale_factor=1., up_levels=0, num_point...
FILE: HSequences_bench/tools/matching_tools.py
function create_precision_recall_results (line 4) | def create_precision_recall_results():
function compute_matching_based_distance (line 14) | def compute_matching_based_distance(points_src, points_dst, matches, num...
function compute_precision_recall (line 23) | def compute_precision_recall(matches, true_matches, num_points, eps=1e-6):
function find_matches (line 51) | def find_matches(dsc_src, dsc_dst):
FILE: HSequences_bench/tools/opencv_matcher.py
class OpencvBruteForceMatcher (line 5) | class OpencvBruteForceMatcher(object):
method __init__ (line 11) | def __init__(self, distance='l2'):
method match (line 14) | def match(self, descs1, descs2):
method match_putative (line 24) | def match_putative(self, descs1, descs2, knn=2, threshold_ratio=0.7):
method convert_opencv_matches_to_numpy (line 39) | def convert_opencv_matches_to_numpy(self, matches):
FILE: HSequences_bench/tools/repeatability_tools.py
function check_common_points (line 4) | def check_common_points(kpts, mask):
function select_top_k (line 12) | def select_top_k(kpts, k=1000):
function apply_nms (line 17) | def apply_nms(score_map, size):
function intersection_area (line 24) | def intersection_area(R, r, d = 0):
function union_area (line 44) | def union_area(r, R, intersection):
function compute_repeatability (line 48) | def compute_repeatability(src_indexes, dst_indexes, overlap_err=0.4, eps...
FILE: extract_multiscale_features.py
function check_directory (line 17) | def check_directory(dir):
function create_result_dir (line 21) | def create_result_dir(path):
function extract_multiscale_features (line 30) | def extract_multiscale_features():
FILE: hsequeces_bench.py
function hsequences_metrics (line 14) | def hsequences_metrics():
FILE: keyNet/aux/desc_aux_function.py
function _meshgrid (line 3) | def _meshgrid(height, width):
function transformer_crop (line 23) | def transformer_crop(images, out_size, batch_inds, kpts_xy, kpts_scale=N...
function build_patch_extraction (line 143) | def build_patch_extraction(kpts, batch_inds, images, kpts_scale, name='P...
FILE: keyNet/aux/tools.py
function remove_borders (line 4) | def remove_borders(images, borders=3):
function check_directory (line 28) | def check_directory(file_path):
function check_tensorboard_directory (line 33) | def check_tensorboard_directory(version_network_name):
FILE: keyNet/datasets/dataset_utils.py
function read_bw_image (line 9) | def read_bw_image(path):
function read_color_image (line 14) | def read_color_image(path):
function apply_h_2_source_image (line 18) | def apply_h_2_source_image(source_im, h):
function generate_composed_homography (line 24) | def generate_composed_homography(max_angle=45, max_scaling=2.0, max_shea...
function color_distorsion (line 50) | def color_distorsion(im_c):
function to_black_and_white (line 56) | def to_black_and_white(img):
function colorDistorsion (line 61) | def colorDistorsion(image, lower=0.5, upper=1.5, delta=18.0, delta_brigt...
function check_margins (line 104) | def check_margins(img, axis=-1):
function swap_channels (line 114) | def swap_channels(image, swaps):
FILE: keyNet/datasets/tf_dataset.py
class tf_dataset (line 8) | class tf_dataset(object):
method __init__ (line 10) | def __init__(self, dataset_root, tfrecord_root, size_patches, batch_si...
method get_num_patches (line 51) | def get_num_patches(self, is_val=False):
method create_dataset_object (line 57) | def create_dataset_object(self, is_val=False):
method _compute_num_examples (line 74) | def _compute_num_examples(self):
method _parse_function (line 84) | def _parse_function(self, sample_pair):
method _prepare_data (line 88) | def _prepare_data(self, sample_pair):
method _find_data_path (line 111) | def _find_data_path(self, data_path):
method _load_data_names (line 116) | def _load_data_names(self, data_path):
method _bytes_feature (line 130) | def _bytes_feature(self, value):
method _create_tfrecords (line 133) | def _create_tfrecords(self, is_val):
method _create_pair_images (line 137) | def _create_pair_images(self, is_val):
FILE: keyNet/loss/score_loss_function.py
function ip_layer (line 6) | def ip_layer(scores, window_size, kernels):
function ip_softscores (line 33) | def ip_softscores(scores, window_size, kernels):
function unpool (line 56) | def unpool(pool, ind, ksize=[1, 2, 2, 1], scope='unpool'):
function grid_indexes_nms_conv (line 83) | def grid_indexes_nms_conv(scores, kernels, window_size):
function loss_ln_indexes_norm (line 100) | def loss_ln_indexes_norm(src_indexes, label_indexes, weights_indexes, wi...
function msip_loss_function (line 109) | def msip_loss_function(src_im, src_score_maps, dst_score_maps, window_si...
FILE: keyNet/model/hardnet_pytorch.py
class L2Norm (line 7) | class L2Norm(nn.Module):
method __init__ (line 8) | def __init__(self):
method forward (line 12) | def forward(self, x):
class L1Norm (line 18) | class L1Norm(nn.Module):
method __init__ (line 19) | def __init__(self):
method forward (line 23) | def forward(self, x):
class HardNet (line 29) | class HardNet(nn.Module):
method __init__ (line 33) | def __init__(self):
method input_norm (line 62) | def input_norm(self, x):
method forward (line 69) | def forward(self, input):
FILE: keyNet/model/keynet_architecture.py
function gaussian_multiple_channels (line 5) | def gaussian_multiple_channels(num_channels, sigma):
function ones_multiple_channels (line 23) | def ones_multiple_channels(size, num_channels):
function grid_indexes (line 34) | def grid_indexes(size):
function get_kernel_size (line 54) | def get_kernel_size(factor):
function linear_upsample_weights (line 61) | def linear_upsample_weights(half_factor, number_of_classes):
function create_derivatives_kernel (line 81) | def create_derivatives_kernel():
class keynet (line 99) | class keynet(object):
method __init__ (line 100) | def __init__(self, args, MSIP_sizes=[]):
method create_kernels (line 136) | def create_kernels(self, MSIP_sizes, name_scope):
method get_kernels (line 163) | def get_kernels(self):
method model (line 166) | def model(self, input_data, is_training, dim, reuse=False, train_score...
method compute_handcrafted_features (line 184) | def compute_handcrafted_features(self, image, network, idx, name_scope):
method local_norm_image (line 217) | def local_norm_image(self, x, k_size=65, eps=1e-10):
method compute_features (line 228) | def compute_features(self, input_data, dim, reuse, is_training):
method conv_block (line 264) | def conv_block(self, features, name, reuse, is_training, num_filters, ...
method non_maximum_supression (line 283) | def non_maximum_supression(self, map, thresh=0.):
FILE: train_network.py
function suppress_stdout (line 16) | def suppress_stdout():
function save_log (line 25) | def save_log(str, file):
function train_keynet_architecture (line 36) | def train_keynet_architecture():
Condensed preview — 29 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (1,889K chars).
[
{
"path": "HSequences_bench/HPatches_images.txt",
"chars": 29736,
"preview": "hpatches-sequences-release/v_churchill/1.ppm\nhpatches-sequences-release/v_churchill/2.ppm\nhpatches-sequences-release/v_c"
},
{
"path": "HSequences_bench/splits.json",
"chars": 9329,
"preview": "{\"a\": {\"test\": [\"i_ajuntament\", \"i_resort\", \"i_table\", \"i_troulos\", \"i_bologna\", \"i_lionnight\", \"i_porta\", \"i_zion\", \"i_"
},
{
"path": "HSequences_bench/tools/HSequences_reader.py",
"chars": 2175,
"preview": "import os\nimport json\nimport numpy as np\nfrom skimage import io\n\n\nclass HSequences_dataset(object):\n\n def __init__(se"
},
{
"path": "HSequences_bench/tools/aux_tools.py",
"chars": 1834,
"preview": "from os import path, mkdir\nimport numpy as np\nimport cv2\n\ndef convert_opencv_matches_to_numpy(matches):\n \"\"\"Returns a"
},
{
"path": "HSequences_bench/tools/geometry_tools.py",
"chars": 5425,
"preview": "import numpy as np\r\nfrom cv2 import warpPerspective as applyH\r\n\r\ndef remove_borders(image, borders):\r\n\r\n shape = imag"
},
{
"path": "HSequences_bench/tools/matching_tools.py",
"chars": 2353,
"preview": "import numpy as np\n\n\ndef create_precision_recall_results():\n return {\n 'recall': 0.0,\n 'precision': 0.0"
},
{
"path": "HSequences_bench/tools/opencv_matcher.py",
"chars": 1964,
"preview": "import cv2\nimport numpy as np\n\n\nclass OpencvBruteForceMatcher(object):\n name = 'opencv_brute_force_matcher'\n dis"
},
{
"path": "HSequences_bench/tools/repeatability_tools.py",
"chars": 5507,
"preview": "import numpy as np\nfrom scipy.ndimage.filters import maximum_filter\n\ndef check_common_points(kpts, mask):\n idx_valid_"
},
{
"path": "LICENSE",
"chars": 1733,
"preview": "The Clear BSD License\n\nCopyright (c) 2019 Axel Barroso-Laguna\nAll rights reserved.\n\nRedistribution and use in source and"
},
{
"path": "README.md",
"chars": 5989,
"preview": "# Key.Net: Keypoint Detection by Handcrafted and Learned CNN Filters\nCode for the ICCV19 paper:\n\n```text\n\"Key.Net: Keypo"
},
{
"path": "extract_multiscale_features.py",
"chars": 12984,
"preview": "import os, sys, cv2\nsys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))\nfrom os import path, m"
},
{
"path": "hsequeces_bench.py",
"chars": 11556,
"preview": "import os\nimport argparse\nimport numpy as np\nimport pickle\nfrom tqdm import tqdm\nimport HSequences_bench.tools.aux_tools"
},
{
"path": "keyNet/aux/desc_aux_function.py",
"chars": 5825,
"preview": "import tensorflow as tf\n\ndef _meshgrid(height, width):\n with tf.name_scope('meshgrid'):\n # This should be equi"
},
{
"path": "keyNet/aux/tools.py",
"chars": 1050,
"preview": "import os\n\n\ndef remove_borders(images, borders=3):\n\n shape = images.shape\n\n if len(shape) == 4:\n for batch_"
},
{
"path": "keyNet/datasets/dataset_utils.py",
"chars": 3363,
"preview": "import cv2\nimport numpy as np\nfrom cv2 import warpPerspective as applyH\n\nperms = ((0, 1, 2), (0, 2, 1),\n (1, 0,"
},
{
"path": "keyNet/datasets/tf_dataset.py",
"chars": 11582,
"preview": "import os\nimport cv2\nimport numpy as np\nimport tensorflow as tf\nimport keyNet.datasets.dataset_utils as tools\nfrom tqdm "
},
{
"path": "keyNet/loss/score_loss_function.py",
"chars": 9154,
"preview": "import tensorflow as tf\nimport numpy as np\n\n\n# Index Proposal Layer\ndef ip_layer(scores, window_size, kernels):\n\n exp"
},
{
"path": "keyNet/model/hardnet_pytorch.py",
"chars": 2412,
"preview": "#!/usr/bin/python2 -utt\r\n# -*- coding: utf-8 -*-\r\nimport torch\r\nimport torch.nn as nn\r\nfrom torch.autograd import Variab"
},
{
"path": "keyNet/model/keynet_architecture.py",
"chars": 11486,
"preview": "import math\nimport numpy as np\nimport tensorflow as tf\n\ndef gaussian_multiple_channels(num_channels, sigma):\n\n r = 2*"
},
{
"path": "keyNet/pretrained_nets/KeyNet_default/checkpoint",
"chars": 162,
"preview": "model_checkpoint_path: \"model--1651\"\nall_model_checkpoint_paths: \"model--635\"\nall_model_checkpoint_paths: \"model--1143\"\n"
},
{
"path": "test_im/image.txt",
"chars": 19,
"preview": "test_im/test_im.ppm"
},
{
"path": "test_im/test_im.ppm",
"chars": 850440,
"preview": "P6\n800 640\n255\nϻŸѽjf>dC(Q8'<-$.\"\u001d8('5!#5\"\"7&\u001d8(\u001b7%\u001f9&(3!#3!\u001e4'\u00175&\u00176% :%(:\");\"&8\u001e\u001f9\u001f ;\u001e'@\u001d+9\"+:\u001d\"p$0/@3B/=2EYt̻ͽpy`lV{mRt"
},
{
"path": "train_network.py",
"chars": 18441,
"preview": "import os, argparse, math, cv2, sys, time\nimport numpy as np\nfrom tqdm import tqdm\nimport tensorflow as tf\nfrom keyNet.m"
}
]
// ... and 6 more files (download for full content)
About this extraction
This page contains the full source code of the axelBarroso/Key.Net GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 29 files (981.0 KB), approximately 716.3k tokens, and a symbol index with 100 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.