Repository: NVlabs/GaussianSTORM
Branch: main
Commit: fcd2561de352
Files: 51
Total size: 771.3 KB
Directory structure:
gitextract_we0tkx0h/
├── .gitignore
├── .vscode/
│ └── settings.json
├── LICENSE
├── README.md
├── assets/
│ └── Inter-Regular.otf
├── docs/
│ ├── WAYMO.md
│ └── example_data/
│ ├── segment-990914685337955114_980_000_1000_000_with_camera_labels.json
│ └── waymo_train.txt
├── engine_storm.py
├── extract_sky.py
├── inference.py
├── main_storm.py
├── preproc/
│ ├── utils.py
│ ├── waymo_download.py
│ └── waymo_preprocess.py
├── preprocess.py
├── requirements.txt
├── requirements_data_preprocess.txt
├── storm/
│ ├── dataset/
│ │ ├── constants.py
│ │ ├── data_utils.py
│ │ ├── samplers.py
│ │ └── storm_dataset.py
│ ├── models/
│ │ ├── __init__.py
│ │ ├── decoder.py
│ │ ├── embedders.py
│ │ ├── layers.py
│ │ ├── storm.py
│ │ └── vit.py
│ ├── utils/
│ │ ├── __init__.py
│ │ ├── distributed.py
│ │ ├── logging.py
│ │ ├── losses.py
│ │ ├── lpips.py
│ │ ├── lpips_loss.py
│ │ └── misc.py
│ └── visualization/
│ ├── annotation.py
│ ├── layout.py
│ ├── video_maker.py
│ └── visualization_tools.py
└── third_party/
└── depth_anything_v2/
├── dinov2.py
├── dinov2_layers/
│ ├── __init__.py
│ ├── attention.py
│ ├── block.py
│ ├── drop_path.py
│ ├── layer_scale.py
│ ├── mlp.py
│ ├── patch_embed.py
│ └── swiglu_ffn.py
├── dpt.py
└── util/
├── blocks.py
└── transform.py
================================================
FILE CONTENTS
================================================
================================================
FILE: .gitignore
================================================
data/
data
work_dirs/
local_scripts/
sync_files.sh
*.mp4
*draft*
*.zip
*.tar
*.gz
# media
*.mp4
*.png
*.jpg
*.html
ckpts/
# wandb
wandb/
# work in progress
*wip*
*results*
*debug*
tmp*
# caches
*.pyc
*.swp
__pycache__
# comming soon
*viewer*
third_party/flow3d_viewer/
third_party/viser_extras/
================================================
FILE: .vscode/settings.json
================================================
{
// Automatically format using Black on save.
"editor.formatOnSave": true,
// Draw a ruler at Black's column width.
"editor.rulers": [
100,
],
// Hide non-code files.
"files.exclude": {
"**/.git": true,
"**/.svn": true,
"**/.hg": true,
"**/CVS": true,
"**/.DS_Store": true,
"**/Thumbs.db": true,
"**/__pycache__": true,
"**/venv": true
},
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter",
},
"debug.focusWindowOnBreak": false,
"files.watcherExclude": {
"**/.git/**": true,
"**/checkpoints/**": true,
"**/data/**": true,
"**/work_dirs/**": true,
"**/lightning_logs/**": true,
"**/outputs/**": true,
"**/dataset_cache/**": true,
"**/.ruff_cache/**": true,
"**/venv/**": true,
"**/data": true
},
"editor.mouseWheelZoom": true,
"terminal.integrated.mouseWheelZoom": true
}
================================================
FILE: LICENSE
================================================
NVIDIA License
1. Definitions
“Licensor” means any person or entity that distributes its Work.
“Work” means (a) the original work of authorship made available under this license, which may include software, documentation, or other files, and (b) any additions to or derivative works thereof that are made available under this license.
The terms “reproduce,” “reproduction,” “derivative works,” and “distribution” have the meaning as provided under U.S. copyright law; provided, however, that for the purposes of this license, derivative works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work.
Works are “made available” under this license by including in or with the Work either (a) a copyright notice referencing the applicability of this license to the Work, or (b) a copy of this license.
2. License Grant
2.1 Copyright Grant. Subject to the terms and conditions of this license, each Licensor grants to you a perpetual, worldwide, non-exclusive, royalty-free, copyright license to use, reproduce, prepare derivative works of, publicly display, publicly perform, sublicense and distribute its Work and any resulting derivative works in any form.
3. Limitations
3.1 Redistribution. You may reproduce or distribute the Work only if (a) you do so under this license, (b) you include a complete copy of this license with your distribution, and (c) you retain without modification any copyright, patent, trademark, or attribution notices that are present in the Work.
3.2 Derivative Works. You may specify that additional or different terms apply to the use, reproduction, and distribution of your derivative works of the Work (“Your Terms”) only if (a) Your Terms provide that the use limitation in Section 3.3 applies to your derivative works, and (b) you identify the specific derivative works that are subject to Your Terms. Notwithstanding Your Terms, this license (including the redistribution requirements in Section 3.1) will continue to apply to the Work itself.
3.3 Use Limitation. The Work and any derivative works thereof only may be used or intended for use non-commercially. Notwithstanding the foregoing, NVIDIA Corporation and its affiliates may use the Work and any derivative works commercially. As used herein, “non-commercially” means for research or evaluation purposes only.
3.4 Patent Claims. If you bring or threaten to bring a patent claim against any Licensor (including any claim, cross-claim or counterclaim in a lawsuit) to enforce any patents that you allege are infringed by any Work, then your rights under this license from such Licensor (including the grant in Section 2.1) will terminate immediately.
3.5 Trademarks. This license does not grant any rights to use any Licensor’s or its affiliates’ names, logos, or trademarks, except as necessary to reproduce the notices described in this license.
3.6 Termination. If you violate any term of this license, then your rights under this license (including the grant in Section 2.1) will terminate immediately.
4. Disclaimer of Warranty.
THE WORK IS PROVIDED “AS IS” WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WARRANTIES OR CONDITIONS OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE OR NON-INFRINGEMENT. YOU BEAR THE RISK OF UNDERTAKING ANY ACTIVITIES UNDER THIS LICENSE.
5. Limitation of Liability.
EXCEPT AS PROHIBITED BY APPLICABLE LAW, IN NO EVENT AND UNDER NO LEGAL THEORY, WHETHER IN TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE SHALL ANY LICENSOR BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF OR RELATED TO THIS LICENSE, THE USE OR INABILITY TO USE THE WORK (INCLUDING BUT NOT LIMITED TO LOSS OF GOODWILL, BUSINESS INTERRUPTION, LOST PROFITS OR DATA, COMPUTER FAILURE OR MALFUNCTION, OR ANY OTHER DAMAGES OR LOSSES), EVEN IF THE LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
================================================
FILE: README.md
================================================
# **STORM**
### Spatio-Temporal Reconstruction Model for Large-Scale Outdoor Scenes
[Project Page](https://jiawei-yang.github.io/STORM/) • [arXiv Paper](https://arxiv.org/abs/2501.00602)
---
## Highlights
* **Fast, feed-forward, and self-supervised** dynamic scene reconstruction from sparse multi-view sequences
* Learns **3D Gaussian** *and* **scene flow** jointly; supports real-time rendering (once Gaussians are generated) and motion segmentation
* Outperforms per‑scene optimization and other generalizable models by **+4 dB PSNR** on dynamic regions while being significantly faster
---
## Installation
> Tested with **CUDA 12.1**, **PyTorch 2.3** and an NVIDIA **A100**
> Replace the CUDA/PyTorch versions as needed for your environment
```bash
# clone project
git clone https://github.com/NVlabs/GaussianSTORM.git
cd GaussianSTORM
# create conda environment
conda create -n storm python=3.10 -y
conda activate storm
# install python dependencies
pip install -r requirements.txt
# install gsplat (for batch-wise rendering support)
pip install git+https://github.com/nerfstudio-project/gsplat.git@2b0de894232d21e8963179a7bbbd315f27c52c9c
# └─ if the above fails, drop the commit hash:
# pip install git+https://github.com/nerfstudio-project/gsplat.git
```
> Note: installing `gsplat` can be machine-dependent and sometimes be tricky. if you encounter issues, please refer to the original `gsplat` repository for troubleshooting.
## Quick Start (Playground)
We provide a tiny subset of Waymo Open Dataset (3 sequences) for quick experimentation:
```bash
# download dataset subset (≈ 600 MB)
gdown 14fapsAGoMCQ5Ky82cg2X6bk-mLQ7fdCF
tar -xf STORM_subset.tar.gz
```
```bash
# run single-GPU inference demo
python inference.py \
--project storm_playground --exp_name visualization \
--data_root data/STORM_subset \
--model STORM-B/8 --num_motion_tokens 16 \
--use_sky_token --use_affine_token \
--load_depth --load_flow --load_ground \
--load_from $CKPT_PTH
```
> `CKPT_PTH` refers to the checkpoint. We cannot share an official checkpoint at this moment. Please refer to the issue page for an unofficial checkpoint.
## Dataset Preparation
### Waymo Dataset
- To prepare the Waymo Open Dataset, please refer to [Waymo Data](docs/WAYMO.md)
### Other datasets
We haven't included instructions for preparing NuScenes and Argoverse2 datasets. We might include these based on the capacity.
## Training
Multi-GPU example that reproduces the paper's STORM-B/8 model:
```bash
# with a global batch size= num_gpus * batch_size = 8 * 4 = 32 (We used 64 global batch size for main experiments)
torchrun --nproc_per_node=8 main_storm.py \
--project 0504_storm \
--exp_name 0504_pixel_storm \
--data_root ../storm2.3/data/STORM2 \ # replace this with your data root.
--batch_size 4 --num_iterations 100000 --lr_sched constant \
--model STORM-B/8 --num_motion_tokens 16 \
--use_sky_token --use_affine_token \
--load_depth --load_flow --load_ground \
--enable_depth_loss --enable_flow_reg_loss --flow_reg_coeff 0.005 --enable_sky_opacity_loss \
--enable_perceptual_loss --perceptual_loss_start_iter 5000 \
--enable_wandb \
--auto_resume
```
> **Tips:**
> - Checkpoints and logs are saved at `work_dirs//`
> - `batch_size` is per-GPU; global batch = batch_size × #GPUs × #nodes
> - For additional arguments, see `main_storm.py`
## Evaluation
```bash
torchrun --nproc_per_node=8 main_storm.py \
--project 0504_storm \
--exp_name 0504_pixel_storm \
--data_root ../storm2.3/data/STORM2 \ # replace this with your data root.
--batch_size 4 --num_iterations 100000 --lr_sched constant \
--model STORM-B/8 --num_motion_tokens 16 \
--use_sky_token --use_affine_token \
--load_depth --load_flow --load_ground \
--enable_depth_loss --enable_flow_reg_loss --flow_reg_coeff 0.005 --enable_sky_opacity_loss \
--enable_perceptual_loss --perceptual_loss_start_iter 5000 \
--auto_resume \
--evaluate # this parameter specifies the evaluation mode
```
## TODO
[ ] Viewers.
## Citation
```bibtex
@inproceedings{yang2025storm,
title = {STORM: Spatio-Temporal Reconstruction Model for Large-Scale Outdoor Scenes},
author = {Jiawei Yang and Jiahui Huang and Yuxiao Chen and Yan Wang and Boyi Li and Yurong You and Maximilian Igl and Apoorva Sharma and Peter Karkus and Danfei Xu and Boris Ivanovic and Yue Wang and Marco Pavone},
booktitle = {ICLR},
year = {2025}
}
```
## License
This project is licensed under the **NVIDIA License**. See the [LICENSE](LICENSE) file for details.
## Acknowledgements
Our implementation builds upon **gsplat**.
We thank the respective authors for open‑sourcing their excellent work.
================================================
FILE: docs/WAYMO.md
================================================
# Preparing Waymo Dataset
> Note: This document is modified from [OmniRe](https://github.com/ziyc/drivestudio/blob/main/docs/Waymo.md) and [EmerNeRF](https://github.com/NVlabs/EmerNeRF/blob/main/docs/NOTR.md)
## 1. Register on Waymo Open Dataset
#### Sign Up for a Waymo Open Dataset Account and Install gcloud SDK
To download the Waymo dataset, you need to register an account at [Waymo Open Dataset](https://waymo.com/open/). You also need to install gcloud SDK and authenticate your account. Please refer to [this page](https://cloud.google.com/sdk/docs/install) for more details.
#### Set Up the Data Directory
Once you've registered and installed the gcloud SDK, create a directory to house the raw data:
```bash
# create the data directory or create a symbolic link to the data directory
mkdir -p ./data/waymo/raw
```
## 2. Environment
We highly recommend setting up another environment for data processing as the TensorFlow dependencies often conflict with our main environment.
```bash
conda create -n storm_data python=3.10
conda activate storm_data
pip install -r requirements_data_preprocess.txt
```
## 3. Download the Raw Data
For the Waymo Open Dataset, we first organize the scene names alphabetically and store them in `data/dataset_scene_list/waymo_train_list.txt` and `data/dataset_scene_list/waymo_val_list.txt`. The scene index is then determined by the line number minus one.
For example, you can download 3 sequences from the dataset by:
```bash
python preproc/waymo_download.py \
--target_dir ./data/waymo/raw/training \
--split_file data/dataset_scene_list/waymo_train_list.txt \
--scene_ids 700 754 23
```
If you wish to run experiments on different scenes, please specify your own list of scenes.
You can also omit the `scene_ids` to download all scenes specified in the `split_file`:
```bash
# training set
python preproc/waymo_download.py \
--target_dir ./data/waymo/raw/training \
--split_file data/dataset_scene_list/waymo_train_list.txt
# validation set
python preproc/waymo_download.py \
--target_dir ./data/waymo/raw/validation \
--split_file data/dataset_scene_list/waymo_val_list.txt
```
If this script doesn't work due to network issues, consider manual download:
Download the [scene flow version](https://console.cloud.google.com/storage/browser/waymo_open_dataset_scene_flow;tab=objects?prefix=&forceOnObjectsSortingFiltering=false) of Waymo.

> **Note**: Ensure you're downloading the scene flow version to avoid errors.
## 4. Preprocess the Data
After downloading the raw dataset, you'll need to preprocess this compressed data to extract and organize various components.
#### Run the Preprocessing Script
To preprocess specific scenes of the dataset, use the following command:
```bash
python preprocess.py \
--data_root data/waymo/raw/ \
--target_dir data/waymo/processed \
--dataset waymo \
--split training \
--scene_list_file data/dataset_scene_list/waymo_train_list.txt \
--scene_ids 700 754 23 \
--num_workers 8 \
--process_keys images lidar calib pose dynamic_masks ground \
--json_folder_to_save data/STORM_data/annotations/waymo
```
Alternatively, preprocess a batch of scenes by providing the split file:
```bash
# training set
python preprocess.py \
--data_root data/waymo/raw/ \
--target_dir data/waymo/processed \
--dataset waymo \
--split training \
--scene_list_file data/dataset_scene_list/waymo_train_list.txt \
--num_workers 8 \
--process_keys images lidar calib pose dynamic_masks ground \
--json_folder_to_save data/STORM_data/annotations/waymo
# validation set
python preprocess.py \
--data_root data/waymo/raw/ \
--target_dir data/waymo/processed \
--dataset waymo \
--split validation \
--scene_list_file data/dataset_scene_list/waymo_val_list.txt \
--num_workers 8 \
--process_keys images lidar calib pose dynamic_masks ground \
--json_folder_to_save data/STORM_data/annotations/waymo
```
The extracted data will be stored in the `data/waymo/processed` directory.
## 5. Extract Sky Masks
- We originally used `ViT-adapater`, following [EmerNeRF](https://github.com/NVlabs/EmerNeRF/blob/main/docs/NOTR.md#sky-mask), to extract sky masks. We later realized the non-trivial dependency of installing `mmcv` properly.
- Therefore, we recommend using `DepthAnything-v2` to extract the sky masks. Specifically, the predicted depth map from DepthAnything-v2 is a relative depth map, where lower values indicate farther regions, and zero values indicate infinitely far regions, which represent the sky.
- To do this:
1. Use whatever command you prefer to generate the file list to all image files, e.g., `find data/waymo/processed/training/*/images -name "*.jpg" > file_list.txt`
2. Download DpethAnything-v2 checkpoint, e.g., `mkdir ckpts && wget https://huggingface.co/depth-anything/Depth-Anything-V2-Large/resolve/main/depth_anything_v2_vitl.pth -O ckpts/depth_anything_v2_vitl.pth`
2. Use `extract_sky_mask.py` to extract sky masks for these images by `python extract_sky.py --file_list ./file_list.txt`
## 6. Data Structure
After completing all preprocessing steps, the project files should be organized according to the following structure:
```bash
ProjectPath/data/
└── waymo/
├── raw/
│ ├── segment-454855130179746819_4580_000_4600_000_with_camera_labels.tfrecord
│ └── ...
└── processed/
└──training/
├── 000/
│ ├──cam_to_ego/ # camera to ego-vehicle transformations: {cam_id}.txt
│ ├──cam_to_world/ # camera to world transformations: {timestep:03d}_{cam_id}.txt
│ ├──depth_flows_4/ # downsampled (1/4) depth flow maps: {timestep:03d}_{cam_id}.npy
│ ├──dynamic_masks/ # bounding-box-generated dynamic masks: {timestep:03d}_{cam_id}.png
│ ├──ego_to_world/ # ego-vehicle to world transformations: {timestep:03d}.txt
│ ├──ground_label_4/ # downsampled (1/4) ground labels extracted from point cloud, used for flow evaluation only: {timestep:03d}.txt
│ ├──images/ # original camera images: {timestep:03d}_{cam_id}.jpg
│ ├──images_4/ # downsampled (1/4) camera images: {timestep:03d}_{cam_id}.jpg
│ ├──intrinsics/ # camera intrinsics: {cam_id}.txt
│ ├──lidar/ # lidar data: {timestep:03d}.bin
│ ├──sky_masks/ # sky masks: {timestep:03d}_{cam_id}.png
├── 001/
├── ...
```
================================================
FILE: docs/example_data/segment-990914685337955114_980_000_1000_000_with_camera_labels.json
================================================
{"dataset": "waymo", "scene_id": 796, "scene_name": "segment-990914685337955114_980_000_1000_000_with_camera_labels", "num_timesteps": 198, "camera_list": ["0", "1", "2", "3", "4"], "normalized_time": [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0, 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7, 2.8, 2.9, 3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8, 3.9, 4.0, 4.1, 4.2, 4.3, 4.4, 4.5, 4.6, 4.7, 4.8, 4.9, 5.0, 5.1, 5.2, 5.3, 5.4, 5.5, 5.6, 5.7, 5.8, 5.9, 6.0, 6.1, 6.2, 6.3, 6.4, 6.5, 6.6, 6.7, 6.8, 6.9, 7.0, 7.1, 7.2, 7.3, 7.4, 7.5, 7.6, 7.7, 7.8, 7.9, 8.0, 8.1, 8.2, 8.3, 8.4, 8.5, 8.6, 8.7, 8.8, 8.9, 9.0, 9.1, 9.2, 9.3, 9.4, 9.5, 9.6, 9.7, 9.8, 9.9, 10.0, 10.1, 10.2, 10.3, 10.4, 10.5, 10.6, 10.7, 10.8, 10.9, 11.0, 11.1, 11.2, 11.3, 11.4, 11.5, 11.6, 11.7, 11.8, 11.9, 12.0, 12.1, 12.2, 12.3, 12.4, 12.5, 12.6, 12.7, 12.8, 12.9, 13.0, 13.1, 13.2, 13.3, 13.4, 13.5, 13.6, 13.7, 13.8, 13.9, 14.0, 14.1, 14.2, 14.3, 14.4, 14.5, 14.6, 14.7, 14.8, 14.9, 15.0, 15.1, 15.2, 15.3, 15.4, 15.5, 15.6, 15.7, 15.8, 15.9, 16.0, 16.1, 16.2, 16.3, 16.4, 16.5, 16.6, 16.7, 16.8, 16.9, 17.0, 17.1, 17.2, 17.3, 17.4, 17.5, 17.6, 17.7, 17.8, 17.9, 18.0, 18.1, 18.2, 18.3, 18.4, 18.5, 18.6, 18.7, 18.8, 18.9, 19.0, 19.1, 19.2, 19.3, 19.4, 19.5, 19.6, 19.7], "normalized_intrinsics": {"0": [1.0760852812653638, 1.6141279218980458, 0.4971965468813711, 0.5072499859713261], "1": [1.0714336048044208, 1.6071504072066312, 0.5112488553103808, 0.5005822834492333], "2": [1.067119656762737, 1.6006794851441053, 0.49226577388679255, 0.4929978469775671], "3": [1.0787773443175446, 2.337756773238923, 0.502880797268437, 0.2763063252339018], "4": [1.0748925209023337, 2.329338194280452, 0.5118216810459222, 0.25624083627281696]}, "camera_to_ego": {"0": [[0.9998842873191989, -0.0024402666225806988, 0.015015227969911943, 1.5389990840669499], [0.002560104988162815, 0.9999649851086498, -0.007967083475600458, -0.023573164304480632], [-0.014995260405451157, 0.008004602143037503, 0.9998555238182686, 2.115384290448616], [0.0, 0.0, 0.0, 1.0]], "1": [[0.718026611157686, -0.6959597957041816, 0.008817507176236696, 1.492768092127657], [0.6959181048595154, 0.7180804580838422, 0.007645066830298392, 0.09188386825351309], [-0.011652338741628897, 0.000646901455438632, 0.9999318999413697, 2.1150114403293925], [0.0, 0.0, 0.0, 1.0]], "2": [[0.7164452330835464, 0.6976372776735257, -0.00290805641879582, 1.4905599043950848], [-0.6976364192340506, 0.7164504681797242, 0.001467379755150582, -0.09390150748070432], [0.0031071772004358147, 0.0009774688362383767, 0.999994694988188, 2.1157584813144563], [0.0, 0.0, 0.0, 1.0]], "3": [[0.004228593556574258, -0.9999908764017675, 0.0006050700439783404, 1.4312924314965867], [0.9998715975152174, 0.004237440134424988, 0.015454209247788318, 0.11111617069514346], [-0.015456632197880632, 0.0005396427818340781, 0.9998803935005264, 2.1150525835808596], [0.0, 0.0, 0.0, 1.0]], "4": [[-0.004455252331979332, 0.9999900434506952, -0.0002524403592420155, 1.4286445077785184], [-0.9999717124550628, -0.004453640529595324, 0.00606130148749891, -0.1116662927794725], [0.006060116859236573, 0.00027943784591101875, 0.9999815982797596, 2.1159483894553066], [0.0, 0.0, 0.0, 1.0]]}, "ego_to_world": [[[-0.8436173559997825, -0.5369440355300293, 0.0009270192354945404, -25832.554037327885], [0.5369381252180163, -0.8435969239840693, 0.00645596870952176, 42363.018620870294], [-0.002684463316608952, 0.005944119223491552, 0.9999787303254798, -208.16], [0.0, 0.0, 0.0, 1.0]], [[-0.8454966740486528, -0.5339759519702431, -0.002248753018626498, -25833.11342240581], [0.5339296555422304, -0.8454665954261407, 0.010264450841902575, 42363.376008983185], [-0.007382215468369093, 0.007477883123129725, 0.9999447905553464, -208.156], [0.0, 0.0, 0.0, 1.0]], [[-0.8473232172132328, -0.531067868797043, -0.003206914838815649, -25833.67610041366], [0.5310010820583894, -0.8472882351156437, 0.011853163604605085, 42363.732038938055], [-0.009012015547946579, 0.008340585470128566, 0.9999246062627818, -208.16], [0.0, 0.0, 0.0, 1.0]], [[-0.8490670770713923, -0.5282849916940684, -0.00025726297879027915, -25834.24362188926], [0.528247157822505, -0.8490119014620124, 0.01156423055389164, 42364.085789114506], [-0.006327628772909453, 0.009682908997613546, 0.9999330989558531, -208.169], [0.0, 0.0, 0.0, 1.0]], [[-0.8508483430943125, -0.5254033989457885, 0.002892305287449951, -25834.812169137433], [0.5254034873024637, -0.8507925680719942, 0.010157836574336176, 42364.44132200648], [-0.0028762100189343187, 0.010162405703066299, 0.9999442249076962, -208.178], [0.0, 0.0, 0.0, 1.0]], [[-0.8524585152290614, -0.5227814843959442, 0.003741575368402853, -25835.381603415175], [0.522792025262381, -0.8524084143214997, 0.009401782595047337, 42364.79484502161], [-0.0017257275341621416, 0.00997069539599965, 0.9999488022382936, -208.181], [0.0, 0.0, 0.0, 1.0]], [[-0.8531956791899208, -0.52159065070131, -0.0005710627229586294, -25835.95433401698], [0.5215570919886778, -0.8531522397048138, 0.010462107004502303, 42365.14398306026], [-0.005944140641289175, 0.008628382678334533, 0.9999451075956083, -208.17600000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.8533494872192581, -0.5213044230846621, -0.006029190243886119, -25836.526832612064], [0.5212015328840508, -0.853332790852015, 0.013119076797813854, 42365.49243709409], [-0.011983938498880808, 0.008052734261041508, 0.999895763911907, -208.173], [0.0, 0.0, 0.0, 1.0]], [[-0.8537597382319496, -0.5205767412104872, -0.009703910798340596, -25837.1006393424], [0.5204411764113436, -0.8537900001132388, 0.01355055726576128, 42365.84651909287], [-0.015339206944610172, 0.006518605472434274, 0.9998610986097045, -208.177], [0.0, 0.0, 0.0, 1.0]], [[-0.8541479982984344, -0.5199630750002495, -0.00834251994678907, -25837.680266710522], [0.519891894744314, -0.8541809700891649, 0.009342810963532323, 42366.201697498975], [-0.011983938498880808, 0.003642934780903398, 0.9999215540452346, -208.191], [0.0, 0.0, 0.0, 1.0]], [[-0.8540441040647783, -0.5201952247408224, -0.0023656857431811414, -25838.266200493174], [0.520165537646677, -0.8540305053351261, 0.007727185889623107, 42366.55162339002], [-0.0060400129911796965, 0.005368809353540266, 0.9999673465314713, -208.207], [0.0, 0.0, 0.0, 1.0]], [[-0.8541586094817574, -0.5200092959074608, 0.0018444560221343358, -25838.84333949594], [0.5200108347516621, -0.8541595463284376, 0.0004485051585490714, 42366.91271213999], [0.0013422328673810522, 0.0013422316583044758, 0.9999981984109297, -208.219], [0.0, 0.0, 0.0, 1.0]], [[-0.8540929501346284, -0.52001119990405, 0.010657603139430082, -25839.422534565783], [0.5200830681151837, -0.8540978415832993, 0.00552080271761415, 42367.25682549672], [0.006231756592218552, 0.010258117619707154, 0.9999279655218549, -208.225], [0.0, 0.0, 0.0, 1.0]], [[-0.8538080457365581, -0.5205279560375698, 0.00790367122731285, -25839.987918025156], [0.5205828719584011, -0.8537684044467281, 0.008543125425226835, 42367.609923471246], [0.0023009691572484854, 0.011408705090324728, 0.9999322712009546, -208.216], [0.0, 0.0, 0.0, 1.0]], [[-0.8529830714134315, -0.5219207197484518, -0.004317659014690194, -25840.550961443285], [0.5218764451848467, -0.8529799858150386, 0.008373754247908996, 42367.95535023747], [-0.008053312569169932, 0.004889386079535859, 0.9999556180453354, -208.201], [0.0, 0.0, 0.0, 1.0]], [[-0.851247602692154, -0.5246655510809564, -0.01017734836669132, -25841.119989124894], [0.5246360147864287, -0.851308245378375, 0.005596725811279399, 42368.29540028651], [-0.011600469812676378, -0.0005752040584172157, 0.9999325468452441, -208.202], [0.0, 0.0, 0.0, 1.0]], [[-0.8495261433021757, -0.527350776995067, -0.014369754654492979, -25841.679812919312], [0.5274037743378905, -0.8496147624896938, 0.00011905346899262863, 42368.64869187437], [-0.012271538627189446, -0.007477523806729014, 0.9998967426576812, -208.207], [0.0, 0.0, 0.0, 1.0]], [[-0.8465969928469622, -0.5321276408984452, -0.010663277842018953, -25842.24214562359], [0.5321299796294581, -0.8466578702795323, 0.00285227510579011, 42368.99462585132], [-0.010545922531160148, -0.003259522293522638, 0.9999390776604267, -208.21800000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.8422580377079749, -0.5390385280539378, -0.006234034789404266, -25842.80013291607], [0.5390278884647001, -0.8422809718201959, 0.0034205214387728116, 42369.348189457385], [-0.00709460172231337, -0.00047935693418921536, 0.9999747181020784, -208.23000000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.8373191093498135, -0.5466637880268013, -0.007443922206978478, -25843.351120640174], [0.5466800344215179, -0.8373415720818542, -0.00017784368651603587, 42369.7022002782], [-0.006135884819898878, -0.004218355565539191, 0.99997227781264, -208.235], [0.0, 0.0, 0.0, 1.0]], [[-0.8330733728426487, -0.5529891231174316, -0.01384865243940232, -25843.89245342679], [0.5530673760011249, -0.833133317270006, -0.0023137108518771566, 42370.05616918209], [-0.01025831681143178, -0.009586728768967758, 0.9999014259254292, -208.232], [0.0, 0.0, 0.0, 1.0]], [[-0.8286717914906484, -0.5594054246427536, -0.01919981426837082, -25844.429748791146], [0.5595806923839474, -0.8287609984657761, -0.00496549424648596, 42370.412357748435], [-0.01313433282589583, -0.014858610374810165, 0.9998033361611413, -208.235], [0.0, 0.0, 0.0, 1.0]], [[-0.8239170070275499, -0.5664319658913278, -0.017764952779270832, -25844.967455503353], [0.5665492342001607, -0.8240255145969404, -0.001979020075399635, 42370.774004978404], [-0.013517794123881979, -0.011695268690068612, 0.9998402322032712, -208.243], [0.0, 0.0, 0.0, 1.0]], [[-0.8185618330155736, -0.574192956794585, -0.01609266595925053, -25845.496712577933], [0.5742682429243348, -0.8186671975020223, -7.001986845055902e-05, 42371.144091636445], [-0.01313433282589583, -0.009298822596253455, 0.9998705022149329, -208.25300000000001], [0.0, 0.0, 0.0, 1.0]], [[-0.8120655964198925, -0.5833250551135611, -0.016773407166255607, -25846.024489867938], [0.5834117918887275, -0.8121764354556117, -0.0003446386521238857, 42371.5126716817], [-0.013421929681990666, -0.010065672723530738, 0.9998592571138373, -208.261], [0.0, 0.0, 0.0, 1.0]], [[-0.8057006099872562, -0.5919792315458774, -0.020177127756835687, -25846.542992938463], [0.5921773521229783, -0.8057903766197885, -0.005277554315937626, 42371.88632726767], [-0.01313433282589583, -0.01620056682008183, 0.9997824918130577, -208.268], [0.0, 0.0, 0.0, 1.0]], [[-0.799858530086119, -0.5997806227970108, -0.022125468711488294, -25847.05436933601], [0.6000185654533614, -0.799965814324832, -0.005693594901169755, 42372.26191141069], [-0.014284710699326846, -0.0178297624448059, 0.9997389892423914, -208.275], [0.0, 0.0, 0.0, 1.0]], [[-0.7928386897152805, -0.6090778595142088, -0.020759892585150836, -25847.561979211492], [0.6092526861240822, -0.7929712433543367, -0.0027877703757284693, 42372.64419130533], [-0.014764028621883715, -0.0148582725330702, 0.9997806035306872, -208.284], [0.0, 0.0, 0.0, 1.0]], [[-0.7837916822497101, -0.6208291133602609, -0.01555026818038254, -25848.066585596895], [0.6209190282115767, -0.78387370093921, -0.0012575295849487226, 42373.03275927038], [-0.011408735291905683, -0.010641098635858269, 0.9998782964835577, -208.298], [0.0, 0.0, 0.0, 1.0]], [[-0.7737984000698772, -0.6333896518543825, -0.007320175754874038, -25848.56527453558], [0.6334274158845834, -0.7737849169719382, -0.005158592105384774, 42373.42974280463], [-0.0023968427310167746, -0.00862851032999068, 0.999959901173246, -208.316], [0.0, 0.0, 0.0, 1.0]], [[-0.7638061505807235, -0.6454338804633122, -0.003907721219282518, -25849.047838605213], [0.6454425696858642, -0.7637695508009451, -0.007743546127633784, 42373.82835051099], [0.002013348545398599, -0.008436777784981917, 0.9999623828965976, -208.323], [0.0, 0.0, 0.0, 1.0]], [[-0.7540690337811727, -0.6567616060952945, -0.006639657480449366, -25849.520298606116], [0.656795104706378, -0.7540348562674016, -0.0071851212611230945, 42374.23118020156], [-0.0002876213944907634, -0.009778971977061882, 0.999952143345373, -208.318], [0.0, 0.0, 0.0, 1.0]], [[-0.7442163240190387, -0.6678277891088963, -0.01216992841034321, -25849.985517651567], [0.6679193391128421, -0.7442098603389393, -0.005953168346426521, 42374.64026558721], [-0.005081289467609322, -0.01255897560393108, 0.9999082221029719, -208.312], [0.0, 0.0, 0.0, 1.0]], [[-0.7338067021291242, -0.6792253957255704, -0.013438218327626112, -25850.447344691926], [0.6793160965808759, -0.7338380602157435, -0.003367833861263921, 42375.05296340997], [-0.007573957783145835, -0.01160013707837743, 0.9999040313866436, -208.312], [0.0, 0.0, 0.0, 1.0]], [[-0.7229086264317304, -0.6908240086490189, -0.012857173277555468, -25850.898925103163], [0.6909137836755158, -0.7229252165108009, -0.004156303645587852, 42375.475243008754], [-0.006423500429786138, -0.011887825996032688, 0.9999087049502653, -208.318], [0.0, 0.0, 0.0, 1.0]], [[-0.7114274907705765, -0.7026920983622017, -0.00973346162508214, -25851.34377586023], [0.7027500640312183, -0.7114214931572277, -0.004669751391580535, 42375.899821523555], [-0.0036431963987249745, -0.010162380295306429, 0.9999417248753719, -208.32500000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.6994045676205893, -0.7146671855820901, -0.009168677310871166, -25851.775809481886], [0.7147254761494245, -0.6993619943756993, -0.007764957554474861, 42376.331661451215], [-0.0008628640882971438, -0.011983934037652537, 0.9999278177901374, -208.33], [0.0, 0.0, 0.0, 1.0]], [[-0.6860604913103244, -0.7274595134250635, -0.011120188415938504, -25852.200956462868], [0.7275438702131306, -0.6859998947393185, -0.009168496765721032, 42376.76572342466], [-0.0009587378867845129, -0.014380568313298814, 0.9998961346443194, -208.33], [0.0, 0.0, 0.0, 1.0]], [[-0.6721241705952266, -0.7403062007834849, -0.013993869486361117, -25852.615183123464], [0.7404324857957415, -0.6720745700815578, -0.008689431973849877, 42377.206196828774], [-0.0029720834472962386, -0.01620189282805503, 0.9998643234903276, -208.327], [0.0, 0.0, 0.0, 1.0]], [[-0.65801687270301, -0.7528117520045102, -0.016978259099517983, -25853.01648592975], [0.7529902692246216, -0.6579703609500408, -0.008981011318596455, 42377.654887379824], [-0.00441018040248823, -0.018694120871884608, 0.999815523060952, -208.326], [0.0, 0.0, 0.0, 1.0]], [[-0.6427937812486949, -0.7658058210730272, -0.018910293458103477, -25853.410356686218], [0.766018380015777, -0.6427626594750651, -0.008485579682398398, 42378.10654140614], [-0.005656524198624812, -0.019940110210535336, 0.9997851747944566, -208.327], [0.0, 0.0, 0.0, 1.0]], [[-0.6263417216517739, -0.7793257099242974, -0.01864096481628025, -25853.792909283125], [0.7795205489688516, -0.6263429977430349, -0.006493297588197355, 42378.56145561904], [-0.006615244031179783, -0.01859803831748467, 0.9998051567766338, -208.329], [0.0, 0.0, 0.0, 1.0]], [[-0.6090114462957522, -0.7930502547685031, -0.01327974745123182, -25854.164505117154], [0.7931539231285089, -0.6089931720582967, -0.00584556346551076, 42379.02439981838], [-0.0034514499288699854, -0.014092898849644651, 0.9998947333071627, -208.33700000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.5897494622067978, -0.8075712875580947, -0.0049180624796468266, -25854.55630986662], [0.8075776069118309, -0.5897040099472348, -0.00822128133029122, 42379.54440212416], [0.003739069583861832, -0.008820213371146085, 0.9999541105444462, -208.34900000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.5712288214150251, -0.8207882718437395, -0.0020606767259887812, -25854.898444525337], [0.820739254868612, -0.5711629556356187, -0.012647277465912876, 42380.02317763515], [0.009203754805349615, -0.00891576768157534, 0.9999178966215833, -208.353], [0.0, 0.0, 0.0, 1.0]], [[-0.5516189942103028, -0.8340932054812535, -0.0022382574519436956, -25855.23216761705], [0.8340114861248552, -0.5515227319030296, -0.015732679556561366, 42380.5021381446], [0.011888071257543355, -0.010545177297048966, 0.9998737285265322, -208.35], [0.0, 0.0, 0.0, 1.0]], [[-0.5326969749103455, -0.8463044170367517, -0.0016633176036238038, -25855.54919237391], [0.8462379308345013, -0.532628132215428, -0.013734525461274825, 42380.983755757494], [0.010737659815281426, -0.008723902612261424, 0.9999042935125857, -208.341], [0.0, 0.0, 0.0, 1.0]], [[-0.514184780410724, -0.8576192116657604, -0.010163630049109763, -25855.842410997688], [0.8576790000108085, -0.5141364946964869, -0.007099138093365303, 42381.46868980767], [0.0008628640882971438, -0.012367400818642712, 0.9999231484781999, -208.321], [0.0, 0.0, 0.0, 1.0]], [[-0.49487476783679374, -0.8688662924674675, -0.013051052536751448, -25856.13613345838], [0.8689081951943117, -0.49495767405098284, 0.003930549860382583, 42381.950834206524], [-0.009874840892056811, -0.00939503655546707, 0.9999071060880996, -208.308], [0.0, 0.0, 0.0, 1.0]], [[-0.47551062377752107, -0.8796744382837994, -0.007907547331176358, -25856.41973310818], [0.8796523805753236, -0.4755631402910657, 0.007168608279592744, 42382.44812691729], [-0.010066579502441806, -0.0035471434397354824, 0.9999430392530061, -208.315], [0.0, 0.0, 0.0, 1.0]], [[-0.4561020270663573, -0.8899053058377222, -0.006283911825439768, -25856.683196572314], [0.8899135275533976, -0.45612305645318313, 0.002381354504183894, 42382.95500044907], [-0.004985417076655695, -0.00450599752288974, 0.9999774205465319, -208.327], [0.0, 0.0, 0.0, 1.0]], [[-0.43620230003221716, -0.8998445125920851, -0.0027215077652370675, -25856.945372105645], [0.8998485821073104, -0.43619931606671764, -0.0016388844958439602, 42383.459454916694], [0.0002876213944907634, -0.0031638300903168866, 0.9999949537138141, -208.336], [0.0, 0.0, 0.0, 1.0]], [[-0.41660012400625646, -0.9090735827295465, -0.005436714194992916, -25857.18696828147], [0.9090796023599701, -0.4165596013603936, -0.007237063458922461, 42383.97572303921], [0.004314307709267083, -0.007957367512957211, 0.9999590326864664, -208.34], [0.0, 0.0, 0.0, 1.0]], [[-0.3963588797858475, -0.9180642151158214, -0.007598377371454228, -25857.42579107086], [0.9180652028213081, -0.39626474541013396, -0.011425187688266732, 42384.498330427334], [0.007478086892950109, -0.011504280456101423, 0.9999058604426764, -208.34], [0.0, 0.0, 0.0, 1.0]], [[-0.3762933056866148, -0.9264506344205558, -0.009621334480705797, -25857.65122103329], [0.9264900962256734, -0.37621932151778154, -0.00866739363607568, 42385.023981108905], [0.00441018040248823, -0.012175553311854548, 0.9999161495897382, -208.332], [0.0, 0.0, 0.0, 1.0]], [[-0.356672555779366, -0.9341305017786272, -0.013597558627373742, -25857.86230697575], [0.9342289702077747, -0.3566495233016061, -0.00416518346026135, 42385.5630027448], [-0.0009587378867845129, -0.014188839823852575, 0.9998988737097954, -208.321], [0.0, 0.0, 0.0, 1.0]], [[-0.3362517888668494, -0.941683004366676, -0.012955839254735008, -25858.07416103911], [0.941752755624803, -0.336301283518899, 0.0017871700061950625, 42386.11136152829], [-0.0060400129911796965, -0.011600258207986422, 0.9999144724688078, -208.316], [0.0, 0.0, 0.0, 1.0]], [[-0.31585622190697704, -0.9487240646213323, -0.0125497526281876, -25858.27201748586], [0.9487752847692464, -0.3159262703820157, 0.004006331681832521, 42386.67384723153], [-0.007765699819449574, -0.010641470334862445, 0.999913222742567, -208.318], [0.0, 0.0, 0.0, 1.0]], [[-0.2959604897386904, -0.9551218045469411, -0.01223629815873718, -25858.460458814654], [0.9551745423141581, -0.2960221213114069, 0.003535167497196001, 42387.24843425161], [-0.006998730497245779, -0.010641530589612931, 0.9999188845092072, -208.323], [0.0, 0.0, 0.0, 1.0]], [[-0.2775920254324555, -0.9606277334615717, -0.011705772968523527, -25858.640834813767], [0.9606735580766933, -0.2776528181364498, 0.003902229254366725, 42387.832119665945], [-0.006998730497245779, -0.010162198845288035, 0.9999238708452037, -208.327], [0.0, 0.0, 0.0, 1.0]], [[-0.26180244834767713, -0.9650703068076677, -0.009938860966729548, -25858.809955741937], [0.965082911682169, -0.2618701715662786, 0.006243942898077593, 42388.42623407599], [-0.008628535114868158, -0.007957145342515804, 0.9999311137372261, -208.33], [0.0, 0.0, 0.0, 1.0]], [[-0.2484455236542317, -0.9686089544256998, -0.008456664976629901, -25858.971161119887], [0.9685717355953711, -0.24852505253088572, 0.010202512939999676, 42389.03081532245], [-0.011983938498880808, -0.005656118003795717, 0.9999121929185493, -208.33100000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.23728628893240514, -0.9713975060351171, -0.00906103489905532, -25859.12684861997], [0.9713670217000692, -0.23737234409740274, 0.010023941901343742, 42389.64740817296], [-0.011888071257543355, -0.006423046509171861, 0.9999087049502653, -208.339], [0.0, 0.0, 0.0, 1.0]], [[-0.2282533746811618, -0.9735203619454003, -0.012589750766308755, -25859.277370316697], [0.9735772940490377, -0.22832023564924364, 0.0041379348988845235, 42390.27701554791], [-0.006902858742197426, -0.011312597878933331, 0.9999121839793806, -208.352], [0.0, 0.0, 0.0, 1.0]], [[-0.21900769531425168, -0.9756361861350953, -0.013025425018226155, -25859.42971878717], [0.9757226605008229, -0.21900119123774697, -0.001941139264306386, 42390.91483172881], [-0.0009587378867845129, -0.01313432678949735, 0.999913281380666, -208.363], [0.0, 0.0, 0.0, 1.0]], [[-0.20889299323643765, -0.9778280088442, -0.014700425045166419, -25859.572484619588], [0.9779374464792858, -0.20884705967936218, -0.004610470351147703, 42391.56034694312], [0.001438106496578116, -0.015339191082709284, 0.9998813134900725, -208.366], [0.0, 0.0, 0.0, 1.0]], [[-0.19819166461955104, -0.9800451611950394, -0.015216638706387833, -25859.711637517776], [0.9801590643825411, -0.1981213709217824, -0.006010897837689514, 42392.21599785054], [0.0028762100189343187, -0.016106036205810005, 0.9998661525492619, -208.366], [0.0, 0.0, 0.0, 1.0]], [[-0.18831555313453005, -0.9818350124087886, -0.023178888149996572, -25859.841438830026], [0.9821085699940971, -0.1882654016748193, -0.004346869720708183, 42392.877524936135], [-9.587380176390885e-05, -0.023582767870905158, 0.9997218832594199, -208.358], [0.0, 0.0, 0.0, 1.0]], [[-0.179644064266845, -0.9834112726403946, -0.02510535834218028, -25859.977783213602], [0.9837222153243667, -0.1796957510361093, -0.0002003419307292165, 42393.546626558404], [-0.004314307709267083, -0.024732688963560914, 0.9996847907443733, -208.353], [0.0, 0.0, 0.0, 1.0]], [[-0.1736048675871335, -0.9845405996359485, -0.023262794727653226, -25860.10154966435], [0.9848071566181466, -0.17365155945502153, -1.3134706896712756e-05, 42394.23056210284], [-0.004026688929534903, -0.022911646979783384, 0.9997293844880917, -208.357], [0.0, 0.0, 0.0, 1.0]], [[-0.16926130238853238, -0.9854097079741828, -0.017841495003881627, -25860.21898237728], [0.9855712061144807, -0.16923180422757453, -0.003161347169790285, 42394.92602722371], [9.587380176390885e-05, -0.0181191574891221, 0.9998358299941539, -208.366], [0.0, 0.0, 0.0, 1.0]], [[-0.16557397964248588, -0.9861115320796302, -0.013011670335287892, -25860.342981072554], [0.9861906430226106, -0.1655096978302207, -0.005878395937587334, 42395.62742907455], [0.0036431963987249745, -0.01380529694405635, 0.999898065252797, -208.371], [0.0, 0.0, 0.0, 1.0]], [[-0.1637760343764957, -0.9863541120014124, -0.016821899471047362, -25860.460164562483], [0.9864834543317432, -0.1636587350207061, -0.008137123640072543, 42396.338090331614], [0.0052730345741723575, -0.017927191339622443, 0.9998253902142373, -208.369], [0.0, 0.0, 0.0, 1.0]], [[-0.16368363332550756, -0.9863207813787578, -0.019467521715858117, -25860.58006989817], [0.9865120956026495, -0.16362669745637012, -0.0044932292608315695, 42397.05025527178], [0.0012463591094312608, -0.019940413734878676, 0.9998003933231133, -208.36], [0.0, 0.0, 0.0, 1.0]], [[-0.163304505011713, -0.9863626167280217, -0.020504315748865493, -25860.701278004686], [0.9865700070175915, -0.1633389763984918, 6.511735606752871e-06, 42397.770269504785], [-0.0033555768787436814, -0.02022787953648911, 0.9997897643971297, -208.354], [0.0, 0.0, 0.0, 1.0]], [[-0.16283015314309662, -0.9864401179129276, -0.020548357582176803, -25860.823468655148], [0.9866395066395468, -0.16290575347674413, 0.0020492491907787274, 42398.50004120543], [-0.005368907288018821, -0.01994014182756905, 0.9997867600535771, -208.354], [0.0, 0.0, 0.0, 1.0]], [[-0.16321014520690683, -0.9864359314515675, -0.017510044055311872, -25860.947805122363], [0.9865868513321687, -0.16323716326544782, 0.0001153582543233808, 42399.242014885924], [-0.0029720834472962386, -0.017256351593778762, 0.9998466807714342, -208.362], [0.0, 0.0, 0.0, 1.0]], [[-0.16387261664107033, -0.986337879587014, -0.016833086674826532, -25861.071587145558], [0.9864799984691462, -0.16387835757530447, -0.0010471574542626656, 42399.989770423366], [-0.0017257275341621416, -0.016777103749279106, 0.9998577652117644, -208.365], [0.0, 0.0, 0.0, 1.0]], [[-0.1642498289689644, -0.9862602561211204, -0.01768335034906796, -25861.19785250965], [0.98641055319777, -0.1642952455900748, 0.0011370209172676337, 42400.74199325921], [-0.004026688929534903, -0.01725628790901876, 0.9998429908259926, -208.364], [0.0, 0.0, 0.0, 1.0]], [[-0.1644345077778163, -0.9861771486274454, -0.02039421920321183, -25861.326143945247], [0.9863519215492729, -0.16457018738454499, 0.005151725950875572, 42401.50117993767], [-0.008436794884578543, -0.019268755778646642, 0.9997787432941443, -208.36100000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.16471509520422614, -0.9861071908357435, -0.0214836122172527, -25861.456711914092], [0.9862858276324495, -0.16489754194654072, 0.00700477489738459, 42402.26932586091], [-0.010450053743254294, -0.02003519009212081, 0.9997446611684084, -208.366], [0.0, 0.0, 0.0, 1.0]], [[-0.16519043729568708, -0.9860126344841563, -0.022163123960155746, -25861.58682945311], [0.9862222478602334, -0.16534308722031343, 0.005228894143522945, 42403.04671032328], [-0.008820275027912486, -0.020994002621448387, 0.9997406936812978, -208.376], [0.0, 0.0, 0.0, 1.0]], [[-0.1654787303349862, -0.9859668340361754, -0.022049761617804555, -25861.71899382119], [0.9862021701911379, -0.16554199014334023, 0.0010625486681222073, 42403.830681030624], [-0.004697799166715411, -0.02156969355515612, 0.9997563098090083, -208.388], [0.0, 0.0, 0.0, 1.0]], [[-0.16576410593944382, -0.985920080054976, -0.02199674808889854, -25861.853468042682], [0.986165358402702, -0.16573152613633066, -0.003308644887711946, 42404.61843214948], [-0.0003834951982431209, -0.022240885524465265, 0.9997525673598046, -208.39600000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.16557507847520583, -0.9859184368053713, -0.02344630792219806, -25861.98694349975], [0.9861971878827933, -0.1655282782692366, -0.003936458470852143, 42405.40432908392], [-0.0, -0.023774462359331516, 0.9997173475235511, -208.39600000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.1649130649590845, -0.9860044235745505, -0.0244736122633967, -25862.12119558434], [0.9863075427066411, -0.16488872592454207, -0.003023121394949799, 42406.18906358595], [-0.001054611676459362, -0.02463706058765047, 0.9996959052831077, -208.395], [0.0, 0.0, 0.0, 1.0]], [[-0.1634942653839673, -0.9862327118357753, -0.024792403909453063, -25862.255127100692], [0.9865426027068199, -0.1634887538643295, -0.0022628310459219493, 42406.97107991165], [-0.0018216012216118254, -0.024828722579731604, 0.9996900601206596, -208.395], [0.0, 0.0, 0.0, 1.0]], [[-0.1624540980480092, -0.9864334615866983, -0.023615501042571582, -25862.386557767633], [0.9867160275169552, -0.16241685344670542, -0.0034995367737282934, 42407.75342232415], [-0.0003834951982431209, -0.02387030746671062, 0.9997149900611062, -208.398], [0.0, 0.0, 0.0, 1.0]], [[-0.1619807319269902, -0.9865458541342076, -0.022125102824699833, -25862.516440045845], [0.9867922396600948, -0.16189834714117204, -0.00547731134504784, 42408.53505383845], [0.0018216012216118254, -0.022720098669758295, 0.9997402056961728, -208.401], [0.0, 0.0, 0.0, 1.0]], [[-0.1617914123217341, -0.9867116673516103, -0.014954076739932485, -25862.64896636451], [0.9868222962823023, -0.16173706118832895, -0.004783157773554638, 42409.31216089552], [0.0023009691572484854, -0.015530890198823024, 0.9998767408988818, -208.40200000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.1616969611104298, -0.9867664736961209, -0.012083838668850955, -25862.772176307546], [0.9868385967612381, -0.1616607405292852, -0.003922870611383037, 42410.086697497565], [0.0019174748923177283, -0.012559114652148116, 0.9999192927077623, -208.4], [0.0, 0.0, 0.0, 1.0]], [[-0.16112949170859717, -0.9868856100083222, -0.009699467007560178, -25862.90115196208], [0.9869332318731914, -0.16111883900998358, -0.0018749774431799599, 42410.85759591365], [0.0002876213944907634, -0.009874840483603431, 0.9999512012090174, -208.397], [0.0, 0.0, 0.0, 1.0]], [[-0.16046681731154241, -0.9870010768346086, -0.008903643592772813, -25863.029312973595], [0.9870395545827397, -0.16047666287412882, 0.0003979470941234272, 42411.626403382455], [-0.0018216012216118254, -0.008724391102321578, 0.9999602825957055, -208.395], [0.0, 0.0, 0.0, 1.0]], [[-0.16018269302290342, -0.9870608053085895, -0.007243720003493939, -25863.15299405685], [0.9870837342964538, -0.1601977450157222, 0.0015440140043322635, 42412.394930782415], [-0.002684463316608952, -0.006902833869967699, 0.9999725718944823, -208.39600000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.1595199538184389, -0.9871787324800295, -0.005615556329106277, -25863.2772717441], [0.987189634524236, -0.15953478428909443, 0.002297409353479113, 42413.15992414405], [-0.003163830221182504, -0.005177136366211648, 0.9999815935492898, -208.398], [0.0, 0.0, 0.0, 1.0]], [[-0.15838372585959642, -0.9873722605950419, -0.0032579733376769298, -25863.40325007548], [0.9873701882258014, -0.15839448465814943, 0.0033613439016186948, 42413.9217130815], [-0.0038349427346299368, -0.0026844435766229108, 0.9999890434284299, -208.4], [0.0, 0.0, 0.0, 1.0]], [[-0.15762437720431222, -0.9874946288413649, -0.0029855854379887523, -25863.524380758572], [0.9874788690524293, -0.15763979383198296, 0.005931152969276579, 42414.68217569862], [-0.006327628772909453, -0.002013308238878807, 0.999977953609002, -208.4], [0.0, 0.0, 0.0, 1.0]], [[-0.15724371987564673, -0.9875571306619775, -0.0023078644568141097, -25863.646065717112], [0.9875269903735961, -0.15725711921889832, 0.00778728056774665, 42415.44023071312], [-0.008053312569169932, -0.0010545774770400103, 0.9999670154675144, -208.403], [0.0, 0.0, 0.0, 1.0]], [[-0.15667580361339606, -0.9876496168954138, -0.0009627089066268703, -25863.767520333186], [0.9876180292429624, -0.15667849732976785, 0.007904226004677212, 42416.19683939803], [-0.007957441570230497, 0.0002876122881272282, 0.9999682976988962, -208.41], [0.0, 0.0, 0.0, 1.0]], [[-0.15610783241315615, -0.9877395826086022, -0.000928228124023254, -25863.88646516205], [0.987709490976765, -0.15611032709947983, 0.007715387696885641, 42416.95127685793], [-0.007765699819449574, 0.00028761272169862477, 0.9999698051367534, -208.416], [0.0, 0.0, 0.0, 1.0]], [[-0.15563673434036776, -0.9878134711512175, -0.0013240604398744773, -25864.005502747124], [0.9877992423104128, -0.15564162818606242, 0.005323576482565493, 42417.70617794193], [-0.005464779486860826, -0.00047936184052352554, 0.9999849530854881, -208.425], [0.0, 0.0, 0.0, 1.0]], [[-0.15535381924014385, -0.987857399317748, -0.0017173993933687984, -25864.12409656753], [0.9878518159147898, -0.15535905192541258, 0.003514936512829534, 42418.45644334804], [-0.003739069583861832, -0.0011504772977356248, 0.9999923478510395, -208.431], [0.0, 0.0, 0.0, 1.0]], [[-0.1550683417709215, -0.9878998991231228, -0.0027565726800728523, -25864.2404838355], [0.9878880951473226, -0.15508098760405212, 0.005196032134834907, 42419.2012602925], [-0.005560651635471955, -0.0019174452471150498, 0.9999827011289312, -208.431], [0.0, 0.0, 0.0, 1.0]], [[-0.15468685416641775, -0.9879585600917363, -0.0031401734921046196, -25864.358095565356], [0.9879307269767252, -0.15470684235015775, 0.00765974054931219, 42419.942105563394], [-0.008053312569169932, -0.0019174127115839069, 0.9999657332554736, -208.43200000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.15411732502848835, -0.9880474086270917, -0.003188797791013868, -25864.47467221937], [0.9880114542361474, -0.1541395482793854, 0.008623569701705679, 42420.67818522794], [-0.009012015547946579, -0.0018215272481410278, 0.9999577319138283, -208.43800000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.1537406838312869, -0.9880995704592422, -0.004800103472745459, -25864.58713854296], [0.9880850667974507, -0.15377014970321, 0.006530071372487861, 42421.41158775701], [-0.007190473347818418, -0.0037389729222289778, 0.9999671580479637, -208.446], [0.0, 0.0, 0.0, 1.0]], [[-0.15326879022696643, -0.9881643400401999, -0.0063178331157322815, -25864.70152750421], [0.9881699513630714, -0.15329813148764573, 0.004453100661522836, 42422.13707640126], [-0.005368907288018821, -0.005560571491542661, 0.9999701269934118, -208.453], [0.0, 0.0, 0.0, 1.0]], [[-0.15307550029780226, -0.9881939556040211, -0.006371602330305301, -25864.814748588182], [0.9881751332415776, -0.15312376893778026, 0.007938351798689623, 42422.85120748239], [-0.008820275027912486, -0.005081091808587414, 0.9999481912851609, -208.453], [0.0, 0.0, 0.0, 1.0]], [[-0.15325228399865068, -0.9881640207930352, -0.006753181423513684, -25864.921970023766], [0.9880635307486215, -0.1533376113956031, 0.014766046730895845, 42423.55626822488], [-0.015626792817622637, -0.0044096418939623344, 0.9998681705127943, -208.451], [0.0, 0.0, 0.0, 1.0]], [[-0.15286363513631274, -0.9882159310271956, -0.007872910323713653, -25865.030133815337], [0.9880594099920783, -0.1529850597585926, 0.018280421679106894, 42424.25431233008], [-0.019269441585535726, -0.004984491419676117, 0.9998019021117478, -208.458], [0.0, 0.0, 0.0, 1.0]], [[-0.15153694833147857, -0.9884203195967116, -0.007862893787742254, -25865.13983045848], [0.9882618775065588, -0.1516589158133042, 0.018385720588120363, 42424.94193257729], [-0.019365297766729923, -0.004984482186509822, 0.9998000500998878, -208.472], [0.0, 0.0, 0.0, 1.0]], [[-0.1503154098212332, -0.9885836320487271, -0.010376898169566296, -25865.241576347933], [0.9885175953029112, -0.15045268056873146, 0.01403405451867909, 42425.62156974122], [-0.01543506873404564, -0.008148211768854078, 0.9998476710470177, -208.489], [0.0, 0.0, 0.0, 1.0]], [[-0.14871846355737015, -0.9888486239799116, -0.00782415811258342, -25865.348446391414], [0.9888568043551703, -0.14876408014137177, 0.005609718368326576, 42426.29516369579], [-0.006711115973933693, -0.006902703291251735, 0.9999536557309331, -208.51], [0.0, 0.0, 0.0, 1.0]], [[-0.1479631531648268, -0.9889613773156486, -0.007893002182773948, -25865.444761741524], [0.9889918286594821, -0.14796988953490348, 0.00027319553795685696, 42426.95605918316], [-0.001438106496578116, -0.007765691789128234, 0.9999688124540389, -208.52100000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.1478684013436023, -0.9889808303862274, -0.007200903599915454, -25865.54179749394], [0.9890062600777902, -0.1478735400034708, 0.0001835586669734846, 42427.60594213957], [-0.0012463591094312608, -0.007094596211894948, 0.9999740563102427, -208.522], [0.0, 0.0, 0.0, 1.0]], [[-0.1474886547421367, -0.9890379567688812, -0.0071426040623391845, -25865.63972231173], [0.9890598396714654, -0.1475045265451217, 0.0017459088620435821, 42428.24901207018], [-0.0027803365641348716, -0.00680696107936877, 0.9999729671393393, -208.52100000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.1470127964152949, -0.9891036706258345, -0.007820897944342056, -25865.735663113584], [0.9891194942348043, -0.14704964088378134, 0.004362251785019234, 42428.88432180238], [-0.005464779486860826, -0.007094495785586347, 0.9999599013534034, -208.519], [0.0, 0.0, 0.0, 1.0]], [[-0.14748653445154494, -0.9890332042493817, -0.007813005034817355, -25865.829368326577], [0.9890456209895171, -0.14752742270705305, 0.004941573724519288, 42429.511931203495], [-0.0060400129911796965, -0.006998602833088992, 0.999957267987713, -208.522], [0.0, 0.0, 0.0, 1.0]], [[-0.14834035531016096, -0.988893999266994, -0.009154081067251381, -25865.920421344683], [0.9889212684393107, -0.14838288591174456, 0.004152589178694458, 42430.13113541156], [-0.005464779486860826, -0.008436668906197849, 0.9999494781252337, -208.526], [0.0, 0.0, 0.0, 1.0]], [[-0.1488140454975921, -0.988832993099527, -0.007980703006095108, -25866.01639587597], [0.9888479041874059, -0.14885432684077682, 0.004712935916628429, 42430.739474224254], [-0.005848268702414403, -0.007190350381594898, 0.9999570473848236, -208.529], [0.0, 0.0, 0.0, 1.0]], [[-0.15108824435342297, -0.9884813683536862, -0.008770794480801378, -25866.104083770373], [0.9884981441381709, -0.15113829042162796, 0.005351280577520545, 42431.33685247802], [-0.006615244031179783, -0.007861398479389325, 0.9999472170871602, -208.532], [0.0, 0.0, 0.0, 1.0]], [[-0.156110673599968, -0.9877367187475911, -0.002373188973975667, -25866.198378679812], [0.9877274674430083, -0.15612020393709894, 0.004575148222705507, 42431.92738057938], [-0.004889544639877367, -0.001629834464162404, 0.99998671790821, -208.53900000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.1654794581610623, -0.9862078034064133, 0.003273748160487453, -25866.287837257696], [0.9862065078099624, -0.16546515462163963, 0.004243413639435338, 42432.505016325726], [-0.0036431963987249745, 0.003930789530610641, 0.9999856379036983, -208.54500000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.1791735729045439, -0.9837906127806741, 0.0072705554977548666, -25866.38287757756], [0.9838140728691613, -0.17914861519996889, 0.003955211312433048, 42433.07088515939], [-0.002588589811578258, 0.007861544158708586, 0.9999657470764827, -208.549], [0.0, 0.0, 0.0, 1.0]], [[-0.1968765461463832, -0.980361229983385, 0.011466661368304834, -25866.488575241543], [0.9804278180463544, -0.1968515497453447, 0.0032803907877863524, 42433.62478572335], [-0.0009587378867845129, 0.011888065793913324, 0.9999288748272769, -208.553], [0.0, 0.0, 0.0, 1.0]], [[-0.21797827161519992, -0.9758950815998445, 0.01068937850780159, -25866.596584482962], [0.9759519224186403, -0.21798526004352145, 0.000521086479174868, 42434.1679882098], [0.0018216012216118254, 0.010545905034242044, 0.9999427311881406, -208.556], [0.0, 0.0, 0.0, 1.0]], [[-0.24176825527902443, -0.9702344056900749, 0.013903551868634923, -25866.722489475942], [0.9703221792018573, -0.2418112720296947, -0.0014755569592442416, 42434.70120214387], [0.00479367169249964, 0.013134181916218589, 0.9999022522112331, -208.559], [0.0, 0.0, 0.0, 1.0]], [[-0.26892125273573875, -0.963030947580696, 0.015898233514068237, -25866.852728849255], [0.9631292950329656, -0.2690118100806648, -0.0038219218237714, 42435.22795873286], [0.007957441570230497, 0.014284258431967343, 0.9998663106059256, -208.56], [0.0, 0.0, 0.0, 1.0]], [[-0.29815692711284125, -0.9543638918198897, 0.01708826512615581, -25867.000638416313], [0.9544890549395034, -0.2982371175779911, -0.002294711194250592, 42435.74103803764], [0.007286344441580877, 0.01562637799253094, 0.999851351699598, -208.555], [0.0, 0.0, 0.0, 1.0]], [[-0.3300159494014289, -0.9437312128913358, 0.02146790523832457, -25867.16560020564], [0.9439606285471089, -0.3300524854741743, 0.001920568995601728, 42436.24321338804], [0.0052730345741723575, 0.02089867572283309, 0.9997676932464918, -208.548], [0.0, 0.0, 0.0, 1.0]], [[-0.3643619374012749, -0.9310255956042638, 0.020777846445565358, -25867.335501675632], [0.931252976365239, -0.36433840754170255, 0.005041706145728966, 42436.73872791111], [0.0028762100189343187, 0.02118643716395836, 0.99977140502028, -208.542], [0.0, 0.0, 0.0, 1.0]], [[-0.39927198099755, -0.9166336163895236, 0.019097080796701596, -25867.528120350024], [0.916830718813065, -0.39922595350836293, 0.006330172671969587, 42437.22074593528], [0.0018216012216118254, 0.020036250896864956, 0.9997975947255461, -208.538], [0.0, 0.0, 0.0, 1.0]], [[-0.4345622882033915, -0.9005024331836803, 0.015842521948983748, -25867.73849222828], [0.9006417758907516, -0.4344969225090456, 0.007537628972144069, 42437.68965095837], [9.587380176390885e-05, 0.017544006396484037, 0.9998460874793551, -208.53300000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.47038021312424766, -0.8824115090923689, 0.009611645151347435, -25867.96178014573], [0.8824600581918565, -0.4703183379054308, 0.008056471057374445, 42438.14732775968], [-0.002588589811578258, 0.012271497512574657, 0.9999213516830142, -208.528], [0.0, 0.0, 0.0, 1.0]], [[-0.5057729016644855, -0.8626053299620712, 0.010286722651697305, -25868.20854707111], [0.86266576292957, -0.5057191867417157, 0.007475669230294172, 42438.59269479717], [-0.0012463591094312608, 0.012654994362861138, 0.9999191455846049, -208.53], [0.0, 0.0, 0.0, 1.0]], [[-0.5414613039361097, -0.8405777553952716, 0.015770018213522703, -25868.469206326215], [0.8407244424836906, -0.541397333279031, 0.00844626123450799, 42439.02653615879], [0.001438106496578116, 0.017831563391943233, 0.9998399707936784, -208.534], [0.0, 0.0, 0.0, 1.0]], [[-0.5780772658063301, -0.8157530104876815, 0.01933133824033553, -25868.739372194872], [0.8159779249223994, -0.5779872657667591, 0.010523623463471486, 42439.44681087558], [0.002588589811578258, 0.02185741274146095, 0.9997577470122634, -208.535], [0.0, 0.0, 0.0, 1.0]], [[-0.6103068401530896, -0.791966800263581, 0.01772422473950874, -25868.99909618103], [0.7921648253140763, -0.6101727933974016, 0.012808268139407126, 42439.80692247738], [0.0006711165848251493, 0.02185748105059542, 0.9997608714710996, -208.53], [0.0, 0.0, 0.0, 1.0]], [[-0.6449291348390295, -0.7641308245384124, 0.013057336099950356, -25869.304971474114], [0.7642379931922832, -0.6447734284997405, 0.014405403920809025, 42440.18868187609], [-0.002588589811578258, 0.01926937702511734, 0.9998109772911334, -208.524], [0.0, 0.0, 0.0, 1.0]], [[-0.6793254467465412, -0.7338227808953313, 0.004589514315793068, -25869.621110886557], [0.7338167847547317, -0.6792467512561604, 0.011695183631715336, 42440.56101882516], [-0.005464779486860826, 0.011312698484198939, 0.9999210764046158, -208.519], [0.0, 0.0, 0.0, 1.0]], [[-0.7135594944803137, -0.7005931320727432, -0.0014529726435494527, -25869.9534503047], [0.7005438375498529, -0.7135311935017871, 0.010562555128406398, 42440.91359323017], [-0.008436794884578543, 0.006519140466279027, 0.9999431590343806, -208.516], [0.0, 0.0, 0.0, 1.0]], [[-0.7470656939920567, -0.6647482268445574, -0.0016259665382547398, -25870.300057907243], [0.6646796870518651, -0.7470198270950107, 0.012739369983441728, 42441.24489886814], [-0.009683102849878996, 0.008436399347897505, 0.999917528941883, -208.518], [0.0, 0.0, 0.0, 1.0]], [[-0.7789418325409236, -0.6270961793655874, 5.781756401714716e-05, -25870.66016310125], [0.6269834888412639, -0.7788001005628931, 0.01902913775409465, 42441.5613947501], [-0.011888071257543355, 0.014858842091851938, 0.9998189278931786, -208.52100000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.8085598322000208, -0.5884058676197996, 0.003087507613764421, -25871.035059029913], [0.5882114895221356, -0.8081328304577667, 0.03047247824626153, 42441.85407293017], [-0.01543506873404564, 0.026454929349919314, 0.9995308376264667, -208.522], [0.0, 0.0, 0.0, 1.0]], [[-0.836847265427225, -0.5474260138437254, 0.003378418882442875, -25871.416538581907], [0.5471733843620615, -0.8362379223148919, 0.036158328615139984, 42442.13326019868], [-0.01696884771407416, 0.03210757931769783, 0.9993403632184653, -208.52700000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.8632497970740202, -0.5047720636019852, 0.002225232270150597, -25871.807611041968], [0.5044482144386156, -0.8625201158828124, 0.03988795115310143, 42442.382684947464], [-0.018215015820889736, 0.0355557801840019, 0.9992016811906151, -208.531], [0.0, 0.0, 0.0, 1.0]], [[-0.8876578411258186, -0.46048573092193745, -0.004055700326686829, -25872.203271996957], [0.46011615430368485, -0.887237822551704, 0.033198957551383544, 42442.61154854872], [-0.018886016960667086, 0.027603221750365465, 0.999440533754941, -208.535], [0.0, 0.0, 0.0, 1.0]], [[-0.9092852206441698, -0.41614327281030083, -0.005016374469179597, -25872.60884010287], [0.41585049549531794, -0.9089882855901602, 0.028436983949778966, 42442.808614911155], [-0.016393685198332824, 0.023771267416631083, 0.9995830000210212, -208.544], [0.0, 0.0, 0.0, 1.0]], [[-0.928806202961135, -0.3705043277629191, -0.006751329488792841, -25873.013367710144], [0.3703013880930794, -0.9286775011386519, 0.02085619464829937, 42442.99361777956], [-0.013997118176876577, 0.016871336278331556, 0.9997596904731286, -208.55100000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.9459413906790162, -0.32428747481407494, -0.005702550215430464, -25873.42056900257], [0.3240057566007664, -0.9456226089371284, 0.02860334170169196, 42443.14706143583], [-0.014668165863996148, 0.025209425730261175, 0.9995745743887929, -208.55700000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.9588935509510584, -0.28366660551840234, -0.007510982501443539, -25873.78605751158], [0.2832639408376249, -0.9584354174333026, 0.034104111637593505, 42443.27212220458], [-0.01687298923156206, 0.030574622207282017, 0.9993900613430542, -208.559], [0.0, 0.0, 0.0, 1.0]], [[-0.9705931524662867, -0.24056634696667528, -0.00876156907549135, -25874.188644893526], [0.24005736660208182, -0.9699671179371507, 0.03919503617514951, 42443.38695947161], [-0.017927440576644017, 0.035939154522703826, 0.9991931665331604, -208.56300000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.9801602250595209, -0.1980646777346282, -0.0075044416945678274, -25874.590515584354], [0.19760488826049968, -0.9794314897540665, 0.04081990952569038, 42443.478801886566], [-0.01543506873404564, 0.038527137345097454, 0.9991383379398308, -208.572], [0.0, 0.0, 0.0, 1.0]], [[-0.9878720006574763, -0.15516540922911717, -0.00571017475703349, -25874.986671604987], [0.15487165897912367, -0.9873023303362678, 0.03533946459171905, 42443.55825493173], [-0.011121131329558479, 0.03402652335070305, 0.9993590526665652, -208.58100000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.9935745224732333, -0.11305955811877366, -0.005215803876110732, -25875.374979393273], [0.11288604482239903, -0.9932609659287586, 0.026256322031558876, 42443.6149778201], [-0.008149182562792977, 0.025498821154266063, 0.9996416362593652, -208.585], [0.0, 0.0, 0.0, 1.0]], [[-0.9972385797153394, -0.07414671793082674, -0.004180830948399298, -25875.75870139491], [0.0740413740065863, -0.9970263268178394, 0.021363018685184254, 42443.648444483035], [-0.005752396244130235, 0.020994471944136662, 0.9997630429683011, -208.589], [0.0, 0.0, 0.0, 1.0]], [[-0.999227416499634, -0.03928944661528834, 0.0009536771629896957, -25876.13860663987], [0.039298095638721546, -0.99915485171926, 0.012051637442006944, 42443.67263156913], [0.00047936899847047203, 0.01207980424212649, 0.9999269215971911, -208.597], [0.0, 0.0, 0.0, 1.0]], [[-0.9999886716361658, -0.004685538172799089, 0.0008380521272126497, -25876.506080283958], [0.00469769359951217, -0.9998742919406933, 0.015143711267130941, 42443.68292629497], [0.0007669903400861504, 0.015147476625773969, 0.9998849762236103, -208.597], [0.0, 0.0, 0.0, 1.0]], [[-0.9996531427849141, 0.026232047802395693, -0.002339612860462064, -25876.866285445354], [-0.026266278957654876, -0.9995261247579426, 0.01605018741599397, 42443.689031028975], [-0.0019174748923177283, 0.016106073216731066, 0.9998684501951117, -208.59300000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.9987637274878097, 0.049456797888808794, -0.0050041779786230344, -25877.21730980242], [-0.0495459628115246, -0.998579102728346, 0.01962073299676796, 42443.668503105204], [-0.004026688929534903, 0.019844413239926136, 0.9997949715013712, -208.591], [0.0, 0.0, 0.0, 1.0]], [[-0.997522320892344, 0.07026010372002972, -0.0035689139527467426, -25877.565362995545], [-0.07031304901999938, -0.9973641339512409, 0.017912549935796437, 42443.64813015262], [-0.0023009691572484854, 0.01811910960676331, 0.9998331877908412, -208.594], [0.0, 0.0, 0.0, 1.0]], [[-0.9959067779624488, 0.09038609970639554, -0.00020636937583104595, -25877.903736733893], [-0.09038125043457962, -0.9958239375062633, 0.012880801970514891, 42443.627171654996], [0.0009587378867845129, 0.01284672991026686, 0.9999170177331103, -208.597], [0.0, 0.0, 0.0, 1.0]], [[-0.994315760941942, 0.106438883834205, 0.002632783807190107, -25878.23030131742], [-0.10640909107048656, -0.9942757575919693, 0.009634479875348096, 42443.59331218086], [0.0036431963987249745, 0.009299563056628374, 0.9999501213795398, -208.6], [0.0, 0.0, 0.0, 1.0]], [[-0.9929785451393636, 0.1181697213349074, 0.005433769643920284, -25878.550968725103], [-0.11812523018824175, -0.9929673791836223, 0.007887576952617133, 42443.556868019376], [0.006327628772909453, 0.007190329397106571, 0.9999541290865663, -208.602], [0.0, 0.0, 0.0, 1.0]], [[-0.9917850664391132, 0.12778856412305897, 0.0056977949884546766, -25878.8588682832], [-0.1277342824422827, -0.9917671627572106, 0.009046986540707631, 42443.518199809725], [0.006806987389349617, 0.008244862392995989, 0.9999428419498795, -208.6], [0.0, 0.0, 0.0, 1.0]], [[-0.9905476957015329, 0.13710198646602545, 0.004278767048231882, -25879.15552136619], [-0.13704806630461364, -0.9905028216472895, 0.011044810135266384, 42443.479942470556], [0.005752396244130235, 0.010354014478820974, 0.9999298497002791, -208.597], [0.0, 0.0, 0.0, 1.0]], [[-0.9894273878869706, 0.14498125697473171, 0.003725483216041022, -25879.44200081182], [-0.14492612008994854, -0.9893663973700301, 0.012269941757049653, 42443.44009440208], [0.005464779486860826, 0.011600296594241857, 0.9999177812720832, -208.595], [0.0, 0.0, 0.0, 1.0]], [[-0.9886556682837112, 0.15010755482588634, 0.005262276568358657, -25879.717825174626], [-0.15004544142748755, -0.9886213117843956, 0.010689592720551594, 42443.39761403146], [0.006806987389349617, 0.009778745824204732, 0.9999290180071718, -208.595], [0.0, 0.0, 0.0, 1.0]], [[-0.9881529270107219, 0.1533175580518737, 0.006893419554424778, -25879.980033518066], [-0.15326601698941406, -0.9881567133012961, 0.007472482437631809, 42443.35583183038], [0.007957441570230497, 0.006327428434238459, 0.9999483200511244, -208.594], [0.0, 0.0, 0.0, 1.0]], [[-0.9877593008629605, 0.15584589715444291, 0.006604536232178973, -25880.22875430302], [-0.1558243568416604, -0.9877779891656767, 0.0036625038832695128, 42443.3183017026], [0.00709460172231337, 0.00258852466452944, 0.9999714826766125, -208.59], [0.0, 0.0, 0.0, 1.0]], [[-0.9875778135142333, 0.1570723499186926, 0.004282422849973707, -25880.46426287476], [-0.15705757850594385, -0.9875829101888332, 0.003593401842880814, 42443.27997198332], [0.00479367169249964, 0.002876176972114858, 0.9999843740367794, -208.585], [0.0, 0.0, 0.0, 1.0]], [[-0.9876478676936403, 0.15668888178722837, 0.0005326954790095646, -25880.690580263534], [-0.15668028371394796, -0.9876197004450813, 0.007656107894006601, 42443.24305998586], [0.0017257275341621416, 0.007478075757563711, 0.9999705496900606, -208.58100000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.9877386229418249, 0.15611592568818408, -0.00048009947812627284, -25880.90893963886], [-0.15611242465758418, -0.9876844682007709, 0.010406831529839086, 42443.211972163546], [0.0011504853400251943, 0.010354178938077958, 0.9999457322085038, -208.58], [0.0, 0.0, 0.0, 1.0]], [[-0.9876734711992424, 0.15651722557914294, 0.001863433993195865, -25881.113047746076], [-0.15649026098332067, -0.9876258637642001, 0.010293271656273428, 42443.17907293308], [0.0034514499288699854, 0.009874782074827938, 0.9999452865894028, -208.583], [0.0, 0.0, 0.0, 1.0]], [[-0.9874206923207304, 0.15803771796248825, 0.0049453086919345355, -25881.307288756736], [-0.15800330239867164, -0.9874157577549869, 0.006713997934101828, 42443.14786241442], [0.005944140641289175, 0.005848165383624268, 0.9999652324724507, -208.585], [0.0, 0.0, 0.0, 1.0]], [[-0.9873868471232673, 0.15819598793057418, 0.006422112630861616, -25881.490027541775], [-0.15819200962838745, -0.9874077274415441, 0.0011260010913581803, 42443.11799708585], [0.006519372493271016, 9.58717643176622e-05, 0.9999787440695422, -208.584], [0.0, 0.0, 0.0, 1.0]], [[-0.9875850324457565, 0.15694319195652182, 0.006681181603031318, -25881.663461631484], [-0.15696167971479436, -0.987601931468132, -0.002335821381319208, 42443.08913220996], [0.006231756592218552, -0.0033555117215489996, 0.999974952561744, -208.582], [0.0, 0.0, 0.0, 1.0]], [[-0.9876168688046711, 0.15675459481758683, 0.006396675388909113, -25881.827937282465], [-0.15677240715283242, -0.9876318836009846, -0.0023821944248443665, 42443.06435705908], [0.005944140641289175, -0.0033555175972432997, 0.9999767035754838, -208.58], [0.0, 0.0, 0.0, 1.0]], [[-0.9874223396628445, 0.15799452808461978, 0.005903577734937827, -25881.982944757718], [-0.1580035660000892, -0.9874379350889109, -0.0010942936549452314, 42443.042139753255], [0.005656524198624812, -0.002013316335323132, 0.9999819749832115, -208.579], [0.0, 0.0, 0.0, 1.0]], [[-0.9873647038358937, 0.15838439701462237, 0.005032335578285708, -25882.12859157166], [-0.1583828340332511, -0.9873775233368269, 0.0007101357825758452, 42443.01643650982], [0.005081289467609322, -9.587256404897997e-05, 0.9999870855695077, -208.578], [0.0, 0.0, 0.0, 1.0]], [[-0.9875057751030801, 0.1575330193345196, 0.003961307538536097, -25882.26655928384], [-0.1575313616849983, -0.9875137156656171, 0.0007290116303619751, 42442.99288142726], [0.004026688929534903, 9.587302450112209e-05, 0.9999918882594139, -208.576], [0.0, 0.0, 0.0, 1.0]], [[-0.987539875299688, 0.15734251739593366, 0.002885639025740325, -25882.397536156306], [-0.15734269003993653, -0.9875440493943185, 0.0001685137037544773, 42442.97305506505], [0.0028762100189343187, -0.0002876202048022194, 0.9999958223365459, -208.573], [0.0, 0.0, 0.0, 1.0]], [[-0.987405829590953, 0.1581945974714192, 0.0020486631391374672, -25882.522494291967], [-0.15819505086265107, -0.9874078803933484, -6.016376418183013e-05, 42442.954173591796], [0.002013348545398599, -0.0003834944209795643, 0.9999978996776262, -208.572], [0.0, 0.0, 0.0, 1.0]], [[-0.9873456017817961, 0.15857207953587502, 0.0018863281291725924, -25882.639359131026], [-0.15857390865656498, -0.9873467334118711, -0.0008622727451109027, 42442.935460109446], [0.0017257275341621416, -0.0011504836268732893, 0.9999978491236379, -208.571], [0.0, 0.0, 0.0, 1.0]], [[-0.9874371158700929, 0.15800404546575286, 0.001632120892934758, -25882.74710483629], [-0.15800593043255481, -0.987437552471604, -0.0010981425365894104, 42442.916543257896], [0.001438106496578116, -0.001342231479410684, 0.9999980651303082, -208.57], [0.0, 0.0, 0.0, 1.0]], [[-0.9874373700069138, 0.15800416055126976, 0.0014579308343807417, -25882.848963879107], [-0.15800597109856201, -0.9874373990797624, -0.0012231090689864344, 42442.901330354674], [0.0012463591094312608, -0.0014381053795924355, 0.9999981892193043, -208.57], [0.0, 0.0, 0.0, 1.0]], [[-0.9873612448902636, 0.15847777400388688, 0.0016022595051636101, -25882.94406982402], [-0.15847934861821408, -0.9873618746604472, -0.0009080355465443196, 42442.88665501285], [0.001438106496578116, -0.0011504841503362812, 0.9999983041165241, -208.57], [0.0, 0.0, 0.0, 1.0]], [[-0.9873457606025391, 0.1585713113518252, 0.0018676820810445266, -25883.032663249865], [-0.15857393416417442, -0.9873461818426855, -0.0013507791701046206, 42442.87170954186], [0.0016298539472650948, -0.0016298517824721761, 0.9999973435761105, -208.57], [0.0, 0.0, 0.0, 1.0]], [[-0.9873916431638762, 0.1582866609993882, 0.001753841442502375, -25883.114317618132], [-0.15828984446212963, -0.9873909479738312, -0.0018549931249812036, 42442.85777875133], [0.001438106496578116, -0.002109219998877822, 0.9999967415150415, -208.57], [0.0, 0.0, 0.0, 1.0]], [[-0.987346305131371, 0.1585710456883948, 0.0015802571684935532, -25883.190419244052], [-0.15857402161892695, -0.9873451073613538, -0.0019795547976040667, 42442.84649693268], [0.0012463591094312608, -0.0022050938496196155, 0.999996792069897, -208.56900000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.9872851848314489, 0.1589491420597156, 0.0017703250583382527, -25883.25997128304], [-0.1589524949852513, -0.9872843095087137, -0.0019484701347616262, 42442.835602057916], [0.001438106496578116, -0.002205093282094466, 0.9999965347006567, -208.56900000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.9872845677368949, 0.1589487163008524, 0.0021184652189307936, -25883.322814461102], [-0.15895239563329047, -0.9872847877018042, -0.00169820347465412, 42442.825014071204], [0.0018216012216118254, -0.0020133452050180387, 0.9999963140982444, -208.57], [0.0, 0.0, 0.0, 1.0]], [[-0.9872683529877357, 0.15904337059326346, 0.0025309008707415236, -25883.381203718494], [-0.15904686331336765, -0.987270251684162, -0.0012431450720558233, 42442.816056993106], [0.0023009691572484854, -0.001629849632661883, 0.999996024557654, -208.571], [0.0, 0.0, 0.0, 1.0]], [[-0.9871587552122865, 0.1597089215771658, 0.0032637978472797717, -25883.43734511131], [-0.15970900519506684, -0.9871641087699002, 0.0002366772717267073, 42442.80786073335], [0.0032597035649443527, -0.0002876198664020766, 0.9999946457894068, -208.572], [0.0, 0.0, 0.0, 1.0]], [[-0.9870447893758111, 0.1603786846286207, 0.004610995863608914, -25883.497483489955], [-0.16037043405589274, -0.9870546469019813, 0.002109006858608524, 42442.80007714828], [0.004889544639877367, 0.0013422168224705043, 0.9999871453209868, -208.57500000000002], [0.0, 0.0, 0.0, 1.0]], [[-0.9870852213471125, 0.1601031854836307, 0.005453053833951075, -25883.560686321092], [-0.16008570513996057, -0.9870968895896388, 0.0035067894579063072, 42442.790180668766], [0.005944140641289175, 0.0025885440801009445, 0.9999789830949357, -208.576], [0.0, 0.0, 0.0, 1.0]], [[-0.9871130017382247, 0.15992001152690308, 0.005788930176363574, -25883.61972446902], [-0.15989577993674556, -0.9871239425209732, 0.004434147073558709, 42442.77961701849], [0.006423500429786138, 0.003451378722380286, 0.9999734129601362, -208.577], [0.0, 0.0, 0.0, 1.0]], [[-0.9870650829519706, 0.1602050506397989, 0.006071553880102253, -25883.68075384933], [-0.1601795334598148, -0.9870777222586965, 0.004481883664240965, 42442.77032343871], [0.006711115973933693, 0.003451372202934183, 0.9999715240707113, -208.577], [0.0, 0.0, 0.0, 1.0]]], "camera_to_world": {"0": [[[-0.8449083566665649, -0.5348591804504395, -0.007462343666702509, -25833.837890625], [0.5346195101737976, -0.8448260426521301, 0.021238291636109352, 42363.87890625], [-0.017663875594735146, 0.013954893685877323, 0.999746561050415, -206.04893493652344], [0.0, 0.0, 0.0, 1.0]], [[-0.8467321395874023, -0.5319120287895203, -0.010689521208405495, -25834.40625], [0.5315495133399963, -0.8466578125953674, 0.025015946477651596, 42364.23828125], [-0.022356649860739708, 0.015499796718358994, 0.9996299147605896, -206.05227661132812], [0.0, 0.0, 0.0, 1.0]], [[-0.8485366702079773, -0.5290072560310364, -0.011698140762746334, -25834.974609375], [0.5285927653312683, -0.8484594821929932, 0.02657496929168701, 42364.59375], [-0.023983748629689217, 0.01636628247797489, 0.9995783567428589, -206.058837890625], [0.0, 0.0, 0.0, 1.0]], [[-0.8503174781799316, -0.5261965990066528, -0.00879727117717266, -25835.5390625], [0.5258391499519348, -0.8501787185668945, 0.02625845931470394, 42364.9453125], [-0.021296365186572075, 0.017702078446745872, 0.9996165037155151, -206.06373596191406], [0.0, 0.0, 0.0, 1.0]], [[-0.8521384000778198, -0.5232855677604675, -0.005697860848158598, -25836.103515625], [0.5230122804641724, -0.8519636392593384, 0.024823758751153946, 42365.29296875], [-0.01784428395330906, 0.018173225224018097, 0.9996755719184875, -206.06739807128906], [0.0, 0.0, 0.0, 1.0]], [[-0.8537543416023254, -0.5206530690193176, -0.004893780220299959, -25836.671875], [0.5204083323478699, -0.853579044342041, 0.024041473865509033, 42365.63671875], [-0.01669449359178543, 0.01797875016927719, 0.9996989965438843, -206.06861877441406], [0.0, 0.0, 0.0, 1.0]], [[-0.8544237017631531, -0.5194949507713318, -0.009226350113749504, -25837.2578125], [0.519155740737915, -0.8543113470077515, 0.025089029222726822, 42365.98828125], [-0.0209158007055521, 0.016646748408675194, 0.9996426105499268, -206.07008361816406], [0.0, 0.0, 0.0, 1.0]], [[-0.8544949889183044, -0.5192520618438721, -0.014688280411064625, -25837.841796875], [0.5187599062919617, -0.8544698357582092, 0.02774171531200409, 42366.34375], [-0.026955632492899895, 0.016085464507341385, 0.9995071887969971, -206.07647705078125], [0.0, 0.0, 0.0, 1.0]], [[-0.8548482656478882, -0.5185527801513672, -0.018374428153038025, -25838.423828125], [0.5179919600486755, -0.8549216389656067, 0.028165357187390327, 42366.69921875], [-0.030313922092318535, 0.014559299685060978, 0.9994344115257263, -206.08567810058594], [0.0, 0.0, 0.0, 1.0]], [[-0.8552551865577698, -0.5179272890090942, -0.017023950815200806, -25839.0], [0.5175049304962158, -0.8553449511528015, 0.023953087627887726, 42367.04296875], [-0.026967309415340424, 0.011676025576889515, 0.9995681047439575, -206.09429931640625], [0.0, 0.0, 0.0, 1.0]], [[-0.8552415370941162, -0.5181118845939636, -0.011044571176171303, -25839.572265625], [0.5178030729293823, -0.8552080988883972, 0.02234060689806938, 42367.38671875], [-0.02102033980190754, 0.013387701474130154, 0.9996894001960754, -206.10110473632812], [0.0, 0.0, 0.0, 1.0]], [[-0.8554186820983887, -0.5178919434547424, -0.0068382383324205875, -25840.142578125], [0.51775723695755, -0.8553950190544128, 0.015061681158840656, 42367.734375], [-0.013649719767272472, 0.009343497455120087, 0.9998632073402405, -206.10157775878906], [0.0, 0.0, 0.0, 1.0]], [[-0.855485200881958, -0.5178234577178955, 0.0019746352918446064, -25840.701171875], [0.517753541469574, -0.8552929162979126, 0.020133839920163155, 42368.08984375], [-0.008736882358789444, 0.018246576189994812, 0.9997953176498413, -206.1004180908203], [0.0, 0.0, 0.0, 1.0]], [[-0.8551603555679321, -0.5183629989624023, -0.0007705028401687741, -25841.2734375], [0.5182088017463684, -0.8549405336380005, 0.023160606622695923, 42368.44921875], [-0.01266433484852314, 0.019406750798225403, 0.9997314810752869, -206.0974884033203], [0.0, 0.0, 0.0, 1.0]], [[-0.8541557788848877, -0.5198555588722229, -0.012966584414243698, -25841.859375], [0.5195067524909973, -0.8541565537452698, 0.023004401475191116, 42368.796875], [-0.023034458979964256, 0.012913113459944725, 0.9996512532234192, -206.0982208251953], [0.0, 0.0, 0.0, 1.0]], [[-0.8523397445678711, -0.5226513743400574, -0.018777500838041306, -25842.4375], [0.5223119854927063, -0.85251384973526, 0.020255891606211662, 42369.13671875], [-0.026594849303364754, 0.007457186467945576, 0.9996184706687927, -206.10459899902344], [0.0, 0.0, 0.0, 1.0]], [[-0.8505624532699585, -0.5253742933273315, -0.022922059521079063, -25843.005859375], [0.525165855884552, -0.8508710861206055, 0.014807076193392277, 42369.48046875], [-0.027282973751425743, 0.0005564593593589962, 0.9996275901794434, -206.1105499267578], [0.0, 0.0, 0.0, 1.0]], [[-0.8477014899253845, -0.5301284193992615, -0.019134078174829483, -25843.5546875], [0.5298581123352051, -0.8479039669036865, 0.017587309703230858, 42369.83984375], [-0.025547394528985023, 0.004770440980792046, 0.9996622204780579, -206.118896484375], [0.0, 0.0, 0.0, 1.0]], [[-0.8434470891952515, -0.5370142459869385, -0.014585265889763832, -25844.09765625], [0.5367578864097595, -0.8435395359992981, 0.01822417601943016, 42370.203125], [-0.022089889273047447, 0.007542372215539217, 0.9997275471687317, -206.12557983398438], [0.0, 0.0, 0.0, 1.0]], [[-0.8385101556777954, -0.5446609854698181, -0.01566006802022457, -25844.642578125], [0.544475793838501, -0.838647723197937, 0.014701876789331436, 42370.5625], [-0.021140819415450096, 0.0038011453580111265, 0.9997692704200745, -206.12901306152344], [0.0, 0.0, 0.0, 1.0]], [[-0.8341850638389587, -0.5510477423667908, -0.021949727088212967, -25845.19140625], [0.550905168056488, -0.8344723582267761, 0.012628699652850628, 42370.921875], [-0.0252754557877779, -0.0015575479483231902, 0.9996792674064636, -206.13238525390625], [0.0, 0.0, 0.0, 1.0]], [[-0.8297201991081238, -0.5575173497200012, -0.02718290686607361, -25845.732421875], [0.5574687123298645, -0.8301372528076172, 0.010040261782705784, 42371.28515625], [-0.028163162991404533, -0.006823010742664337, 0.9995800852775574, -206.139892578125], [0.0, 0.0, 0.0, 1.0]], [[-0.8250054717063904, -0.5645437836647034, -0.025620875880122185, -25846.259765625], [0.564403772354126, -0.8253950476646423, 0.013093211688101292, 42371.66015625], [-0.028539037331938744, -0.003658548928797245, 0.9995859861373901, -206.1484832763672], [0.0, 0.0, 0.0, 1.0]], [[-0.819695770740509, -0.5723041296005249, -0.023806588724255562, -25846.775390625], [0.5721069574356079, -0.8200404047966003, 0.01507514901459217, 42372.046875], [-0.028149936348199844, -0.0012628810945898294, 0.9996029138565063, -206.1578826904297], [0.0, 0.0, 0.0, 1.0]], [[-0.8132135272026062, -0.581457257270813, -0.024316933006048203, -25847.296875], [0.5812702775001526, -0.8135744333267212, 0.014886150136590004, 42372.4296875], [-0.028439296409487724, -0.0020290915854275227, 0.9995934963226318, -206.1663360595703], [0.0, 0.0, 0.0, 1.0]], [[-0.8068203330039978, -0.5901538729667664, -0.027555642649531364, -25847.8125], [0.5901250839233398, -0.8072494864463806, 0.010034685023128986, 42372.8046875], [-0.028166286647319794, -0.00816508661955595, 0.9995698928833008, -206.17291259765625], [0.0, 0.0, 0.0, 1.0]], [[-0.800969660282135, -0.5979848504066467, -0.029353827238082886, -25848.318359375], [0.5979864597320557, -0.8014476299285889, 0.009690036997199059, 42373.19140625], [-0.02932005003094673, -0.009791768155992031, 0.9995220899581909, -206.18173217773438], [0.0, 0.0, 0.0, 1.0]], [[-0.7939949631690979, -0.6072880029678345, -0.02780897170305252, -25848.8125], [0.6071938872337341, -0.7944525480270386, 0.01267836894840002, 42373.59375], [-0.02979232929646969, -0.006818879395723343, 0.9995328187942505, -206.1914520263672], [0.0, 0.0, 0.0, 1.0]], [[-0.7850572466850281, -0.6190191507339478, -0.022370634600520134, -25849.291015625], [0.6188592910766602, -0.7853715419769287, 0.014311080798506737, 42374.00390625], [-0.026428092271089554, -0.0026092585176229477, 0.999647319316864, -206.20018005371094], [0.0, 0.0, 0.0, 1.0]], [[-0.7752206325531006, -0.6315377950668335, -0.013891609385609627, -25849.755859375], [0.6314505934715271, -0.7753449082374573, 0.010518020018935204, 42374.41015625], [-0.017413314431905746, -0.000618078513070941, 0.9998481869697571, -206.2041778564453], [0.0, 0.0, 0.0, 1.0]], [[-0.7653115391731262, -0.6435787081718445, -0.010233653709292412, -25850.21484375], [0.6435286402702332, -0.7653799057006836, 0.008034056052565575, 42374.82421875], [-0.013003179803490639, -0.0004370941314846277, 0.999915361404419, -206.2043914794922], [0.0, 0.0, 0.0, 1.0]], [[-0.7555636167526245, -0.6549516320228577, -0.012728741392493248, -25850.677734375], [0.6548964381217957, -0.7556687593460083, 0.00868530385196209, 42375.2421875], [-0.015307165682315826, -0.0017737088492140174, 0.9998812675476074, -206.2029266357422], [0.0, 0.0, 0.0, 1.0]], [[-0.7456573843955994, -0.6660857200622559, -0.01802210696041584, -25851.142578125], [0.6660261154174805, -0.7458613514900208, 0.010005835443735123, 42375.671875], [-0.020106738433241844, -0.004542268812656403, 0.9997875094413757, -206.20433044433594], [0.0, 0.0, 0.0, 1.0]], [[-0.7352592349052429, -0.6775185465812683, -0.019043106585741043, -25851.58984375], [0.677409291267395, -0.7354970574378967, 0.012679288163781166, 42376.109375], [-0.02259659953415394, -0.0035774146672338247, 0.9997382760047913, -206.20819091796875], [0.0, 0.0, 0.0, 1.0]], [[-0.7244007587432861, -0.6891387104988098, -0.018206100910902023, -25852.021484375], [0.6890454292297363, -0.7246192097663879, 0.011978130787611008, 42376.546875], [-0.021447082981467247, -0.00386786344461143, 0.9997624754905701, -206.21240234375], [0.0, 0.0, 0.0, 1.0]], [[-0.7129982113838196, -0.7010093331336975, -0.014815893955528736, -25852.443359375], [0.7009174823760986, -0.7131488919258118, 0.011550829745829105, 42376.98828125], [-0.018663177266716957, -0.002148998901247978, 0.9998235106468201, -206.21510314941406], [0.0, 0.0, 0.0, 1.0]], [[-0.7010157704353333, -0.7130088210105896, -0.013975257985293865, -25852.853515625], [0.7129687666893005, -0.7011438012123108, 0.008539806120097637, 42377.43359375], [-0.01588762179017067, -0.0039773848839104176, 0.9998658895492554, -206.2158203125], [0.0, 0.0, 0.0, 1.0]], [[-0.6876766681671143, -0.7258488535881042, -0.015624204650521278, -25853.263671875], [0.7258409857749939, -0.6878246665000916, 0.007222483400255442, 42377.8828125], [-0.015989145264029503, -0.0063739558681845665, 0.9998518228530884, -206.21597290039062], [0.0, 0.0, 0.0, 1.0]], [[-0.6737317442893982, -0.7387521266937256, -0.01818586327135563, -25853.662109375], [0.738756537437439, -0.6739274263381958, 0.007784060202538967, 42378.34375], [-0.018006443977355957, -0.008190557360649109, 0.9998043179512024, -206.21609497070312], [0.0, 0.0, 0.0, 1.0]], [[-0.6596134305000305, -0.7513155341148376, -0.020858364179730415, -25854.046875], [0.7513532638549805, -0.6598567366600037, 0.007568711880594492, 42378.8125], [-0.019450021907687187, -0.010679580271244049, 0.9997537732124329, -206.2173614501953], [0.0, 0.0, 0.0, 1.0]], [[-0.6443963646888733, -0.7643618583679199, -0.022458015009760857, -25854.421875], [0.764411449432373, -0.6446774005889893, 0.00813852995634079, 42379.28125], [-0.02069895714521408, -0.011922726407647133, 0.9997146725654602, -206.22030639648438], [0.0, 0.0, 0.0, 1.0]], [[-0.6279848217964172, -0.7779192328453064, -0.021833982318639755, -25854.77734375], [0.7779242992401123, -0.6282753348350525, 0.010202446952462196, 42379.76171875], [-0.02165442891418934, -0.01057820301502943, 0.9997095465660095, -206.22377014160156], [0.0, 0.0, 0.0, 1.0]], [[-0.6107721924781799, -0.7916426062583923, -0.01610397733747959, -25855.111328125], [0.7915907502174377, -0.6109541654586792, 0.010916567407548428, 42380.24609375], [-0.018480811268091202, -0.006080223713070154, 0.9998107552528381, -206.22682189941406], [0.0, 0.0, 0.0, 1.0]], [[-0.5916749238967896, -0.8061432242393494, -0.00733858672901988, -25855.455078125], [0.806097686290741, -0.591719925403595, 0.008604087866842747, 42380.78125], [-0.011278515681624413, -0.000824794580694288, 0.999936044216156, -206.22775268554688], [0.0, 0.0, 0.0, 1.0]], [[-0.5732331275939941, -0.8193821310997009, -0.004098220728337765, -25855.76171875], [0.81937175989151, -0.5732470154762268, 0.004228639416396618, 42381.2734375], [-0.005814164411276579, -0.0009339707321487367, 0.9999826550483704, -206.22340393066406], [0.0, 0.0, 0.0, 1.0]], [[-0.5536569356918335, -0.8327358961105347, -0.0038753284607082605, -25856.06640625], [0.8327389359474182, -0.5536645650863647, 0.0011864944826811552, 42381.765625], [-0.0031336687970906496, -0.002570226788520813, 0.9999918341636658, -206.21633911132812], [0.0, 0.0, 0.0, 1.0]], [[-0.534777045249939, -0.8449881672859192, -0.002919065300375223, -25856.3515625], [0.844982385635376, -0.5347844362258911, 0.0032174072694033384, 42382.26953125], [-0.0042797415517270565, -0.00074596400372684, 0.9999905228614807, -206.20909118652344], [0.0, 0.0, 0.0, 1.0]], [[-0.516168475151062, -0.8564158082008362, -0.01105003897100687, -25856.634765625], [0.8563699722290039, -0.516268253326416, 0.009876301512122154, 42382.78515625], [-0.014163006097078323, -0.004365087021142244, 0.9998902082443237, -206.20416259765625], [0.0, 0.0, 0.0, 1.0]], [[-0.49684619903564453, -0.8677327036857605, -0.013557493686676025, -25856.90625], [0.867481529712677, -0.49702927470207214, 0.020920205861330032, 42383.3046875], [-0.02489161677658558, -0.0013667527819052339, 0.9996891617774963, -206.20779418945312], [0.0, 0.0, 0.0, 1.0]], [[-0.47758910059928894, -0.8785465955734253, -0.008037865161895752, -25857.1484375], [0.8782256841659546, -0.4776357114315033, 0.024164604023098946, 42383.828125], [-0.025068901479244232, 0.004481691401451826, 0.9996756315231323, -206.21514892578125], [0.0, 0.0, 0.0, 1.0]], [[-0.4582332372665405, -0.888811469078064, -0.00604152912274003, -25857.376953125], [0.8886071443557739, -0.458259642124176, 0.01937723532319069, 42384.33984375], [-0.019991297274827957, 0.0035107468720525503, 0.9997939467430115, -206.21922302246094], [0.0, 0.0, 0.0, 1.0]], [[-0.43841472268104553, -0.8987703323364258, -0.0021016548853367567, -25857.6015625], [0.8986523151397705, -0.4383930265903473, 0.015348020009696484, 42384.8515625], [-0.01471569575369358, 0.004840140230953693, 0.9998799562454224, -206.22010803222656], [0.0, 0.0, 0.0, 1.0]], [[-0.41879773139953613, -0.9080686569213867, -0.004448609426617622, -25857.818359375], [0.9080165028572083, -0.4188213646411896, 0.009732785634696484, 42385.37109375], [-0.010701209306716919, 3.665783879114315e-05, 0.9999427795410156, -206.21786499023438], [0.0, 0.0, 0.0, 1.0]], [[-0.3985494077205658, -0.9171257019042969, -0.006234403699636459, -25858.029296875], [0.9171158075332642, -0.39858266711235046, 0.005518494639545679, 42385.8984375], [-0.007546079345047474, -0.0035182777792215347, 0.9999653697013855, -206.21302795410156], [0.0, 0.0, 0.0, 1.0]], [[-0.3784773051738739, -0.92557692527771, -0.007888964377343655, -25858.228515625], [0.9255497455596924, -0.3785364329814911, 0.008242689073085785, 42386.44140625], [-0.010615503415465355, -0.004181958734989166, 0.9999349117279053, -206.209716796875], [0.0, 0.0, 0.0, 1.0]], [[-0.35881882905960083, -0.9333362579345703, -0.011508817784488201, -25858.41796875], [0.933270275592804, -0.3589501678943634, 0.012704535387456417, 42387.0], [-0.01598869450390339, -0.006182211916893721, 0.9998530149459839, -206.20697021484375], [0.0, 0.0, 0.0, 1.0]], [[-0.33842942118644714, -0.9409332275390625, -0.010500396601855755, -25858.59765625], [0.9407559633255005, -0.3385733664035797, 0.0186068844050169, 42387.57421875], [-0.02106298878788948, -0.0035811953712254763, 0.9997717142105103, -206.20980834960938], [0.0, 0.0, 0.0, 1.0]], [[-0.3180603086948395, -0.9480205774307251, -0.009732028469443321, -25858.76171875], [0.947796642780304, -0.31819841265678406, 0.020768841728568077, 42388.15234375], [-0.022786004468798637, -0.0026182401925325394, 0.9997369050979614, -206.21449279785156], [0.0, 0.0, 0.0, 1.0]], [[-0.2981879711151123, -0.9544640779495239, -0.009068909101188183, -25858.919921875], [0.9542531967163086, -0.29831433296203613, 0.020235253497958183, 42388.734375], [-0.02201920747756958, -0.0026201263535767794, 0.9997541308403015, -206.2183074951172], [0.0, 0.0, 0.0, 1.0]], [[-0.27984368801116943, -0.9600104093551636, -0.008218787610530853, -25859.0703125], [0.9597930312156677, -0.279956191778183, 0.020538482815027237, 42389.32421875], [-0.022018056362867355, -0.0021407718304544687, 0.9997552633285522, -206.22230529785156], [0.0, 0.0, 0.0, 1.0]], [[-0.2640937864780426, -0.9644772410392761, -0.006179652642458677, -25859.2109375], [0.9642071723937988, -0.26416608691215515, 0.02282032184302807, 42389.9296875], [-0.02364213392138481, 6.823967123636976e-05, 0.9997204542160034, -206.2278594970703], [0.0, 0.0, 0.0, 1.0]], [[-0.250769704580307, -0.9680365324020386, -0.004468920640647411, -25859.34765625], [0.9676703810691833, -0.2507982850074768, 0.026724383234977722, 42390.55078125], [-0.026990976184606552, 0.0023772232234477997, 0.9996328353881836, -206.23410034179688], [0.0, 0.0, 0.0, 1.0]], [[-0.23960985243320465, -0.9708570241928101, -0.004883428104221821, -25859.48828125], [0.970496654510498, -0.23965418338775635, 0.026498954743146896, 42391.171875], [-0.02689702995121479, 0.0016100594075396657, 0.9996368885040283, -206.24195861816406], [0.0, 0.0, 0.0, 1.0]], [[-0.2305305004119873, -0.9730300903320312, -0.008259089663624763, -25859.6328125], [0.9728180766105652, -0.23065491020679474, 0.020574869588017464, 42391.7890625], [-0.02192496508359909, -0.003291457425802946, 0.9997541904449463, -206.24716186523438], [0.0, 0.0, 0.0, 1.0]], [[-0.22128477692604065, -0.9751718640327454, -0.008539018221199512, -25859.771484375], [0.9750782251358032, -0.22139009833335876, 0.01445463951677084, 42392.41796875], [-0.015986211597919464, -0.005127619486302137, 0.9998590350151062, -206.2489776611328], [0.0, 0.0, 0.0, 1.0]], [[-0.21115171909332275, -0.9774017333984375, -0.010044438764452934, -25859.90234375], [0.9773588180541992, -0.21126309037208557, 0.011738050729036331, 42393.05859375], [-0.013594809919595718, -0.007338511757552624, 0.9998806715011597, -206.248291015625], [0.0, 0.0, 0.0, 1.0]], [[-0.20044955611228943, -0.9796490669250488, -0.010382231324911118, -25860.025390625], [0.9796285629272461, -0.20055440068244934, 0.01028573140501976, 42393.71484375], [-0.012158608995378017, -0.008108961395919323, 0.9998931884765625, -206.24609375], [0.0, 0.0, 0.0, 1.0]], [[-0.19045980274677277, -0.9815266728401184, -0.01818077825009823, -25860.158203125], [0.9815782308578491, -0.19069021940231323, 0.011900268495082855, 42394.38671875], [-0.01514732651412487, -0.015579331666231155, 0.9997639060020447, -206.2427978515625], [0.0, 0.0, 0.0, 1.0]], [[-0.18176445364952087, -0.9831394553184509, -0.01996420882642269, -25860.28515625], [0.983151376247406, -0.182091623544693, 0.016002152115106583, 42395.06640625], [-0.019367661327123642, -0.016719216480851173, 0.9996726512908936, -206.2443389892578], [0.0, 0.0, 0.0, 1.0]], [[-0.17575646936893463, -0.9842687249183655, -0.018022233620285988, -25860.39453125], [0.9842487573623657, -0.17604878544807434, 0.016157466918230057, 42395.75], [-0.01907608099281788, -0.01489858329296112, 0.999707043170929, -206.24783325195312], [0.0, 0.0, 0.0, 1.0]], [[-0.1714969426393509, -0.985105037689209, -0.012529573403298855, -25860.494140625], [0.9850713014602661, -0.17165623605251312, 0.012985968962311745, 42396.44140625], [-0.014943323098123074, -0.010115469805896282, 0.9998371601104736, -206.25038146972656], [0.0, 0.0, 0.0, 1.0]], [[-0.16788426041603088, -0.9857770800590515, -0.007639489136636257, -25860.603515625], [0.9857409596443176, -0.16795752942562103, 0.010248959064483643, 42397.13671875], [-0.011386299505829811, -0.00580991804599762, 0.9999182820320129, -206.24990844726562], [0.0, 0.0, 0.0, 1.0]], [[-0.16603000462055206, -0.9860545992851257, -0.011420238763093948, -25860.724609375], [0.9860723614692688, -0.16612543165683746, 0.007980208843946457, 42397.84375], [-0.009766113013029099, -0.009936229325830936, 0.9999029636383057, -206.24545288085938], [0.0, 0.0, 0.0, 1.0]], [[-0.16589787602424622, -0.9860426783561707, -0.014064355753362179, -25860.849609375], [0.9860464334487915, -0.1660642921924591, 0.011623752303421497, 42398.5625], [-0.013797101564705372, -0.011939752846956253, 0.9998335242271423, -206.24264526367188], [0.0, 0.0, 0.0, 1.0]], [[-0.1655033379793167, -0.9860937595367432, -0.015094975009560585, -25860.97265625], [0.98603755235672, -0.1657407134771347, 0.01612141914665699, 42399.29296875], [-0.018399082124233246, -0.012216064147651196, 0.9997560977935791, -206.24375915527344], [0.0, 0.0, 0.0, 1.0]], [[-0.16502858698368073, -0.9861727952957153, -0.01513126865029335, -25861.095703125], [0.9860775470733643, -0.16529130935668945, 0.018161453306674957, 42400.02734375], [-0.020411398261785507, -0.01192344818264246, 0.9997205138206482, -206.24685668945312], [0.0, 0.0, 0.0, 1.0]], [[-0.16545405983924866, -0.9861433506011963, -0.012099132873117924, -25861.212890625], [0.9860531091690063, -0.16563807427883148, 0.016229692846536636, 42400.765625], [-0.018008878454566002, -0.009245119988918304, 0.9997950792312622, -206.25111389160156], [0.0, 0.0, 0.0, 1.0]], [[-0.16612638533115387, -0.9860382676124573, -0.011433003470301628, -25861.3359375], [0.9859619736671448, -0.16628828644752502, 0.015070848166942596, 42401.5078125], [-0.01676160655915737, -0.008768841624259949, 0.9998210668563843, -206.25218200683594], [0.0, 0.0, 0.0, 1.0]], [[-0.1664905846118927, -0.9859665036201477, -0.012289425358176231, -25861.46484375], [0.9858587384223938, -0.1666874885559082, 0.01725699007511139, 42402.265625], [-0.019063306972384453, -0.009242511354386806, 0.9997755885124207, -206.25473022460938], [0.0, 0.0, 0.0, 1.0]], [[-0.16663438081741333, -0.9859046339988708, -0.01500333845615387, -25861.599609375], [0.9857391715049744, -0.16693015396595, 0.02127242460846901, 42403.03125], [-0.023477092385292053, -0.01124466210603714, 0.9996611475944519, -206.25860595703125], [0.0, 0.0, 0.0, 1.0]], [[-0.1668984293937683, -0.9858427047729492, -0.01609734445810318, -25861.732421875], [0.985644519329071, -0.16724249720573425, 0.023126821964979172, 42403.8046875], [-0.025491569191217422, -0.012006430886685848, 0.9996029138565063, -206.2667694091797], [0.0, 0.0, 0.0, 1.0]], [[-0.16736328601837158, -0.9857524037361145, -0.016784649342298508, -25861.86328125], [0.9856064915657043, -0.1677020937204361, 0.021353794261813164, 42404.578125], [-0.023864373564720154, -0.012969216331839561, 0.9996311068534851, -206.2742462158203], [0.0, 0.0, 0.0, 1.0]], [[-0.1676531285047531, -0.9857050180435181, -0.016675997525453568, -25861.99609375], [0.9856483936309814, -0.16793428361415863, 0.017189333215355873, 42405.35546875], [-0.019744083285331726, -0.013554822653532028, 0.9997131824493408, -206.27984619140625], [0.0, 0.0, 0.0, 1.0]], [[-0.16793914139270782, -0.9856571555137634, -0.01662764884531498, -25862.1328125], [0.9856765866279602, -0.16815871000289917, 0.012819726951420307, 42406.1328125], [-0.015431939624249935, -0.014236549846827984, 0.9997795820236206, -206.2812042236328], [0.0, 0.0, 0.0, 1.0]], [[-0.16772837936878204, -0.9856675863265991, -0.018074173480272293, -25862.267578125], [0.985718309879303, -0.16796058416366577, 0.0121908625587821, 42406.91796875], [-0.015051887370646, -0.01577129028737545, 0.9997623562812805, -206.28065490722656], [0.0, 0.0, 0.0, 1.0]], [[-0.1670512706041336, -0.985763430595398, -0.01909070461988449, -25862.404296875], [0.9858167171478271, -0.16731400787830353, 0.013100630603730679, 42407.703125], [-0.016108263283967972, -0.016631457954645157, 0.9997318983078003, -206.28131103515625], [0.0, 0.0, 0.0, 1.0]], [[-0.16562843322753906, -0.9859976172447205, -0.019386325031518936, -25862.537109375], [0.9860438108444214, -0.1659085899591446, 0.013853185810148716, 42408.48828125], [-0.01687556691467762, -0.01682128757238388, 0.9997161030769348, -206.28250122070312], [0.0, 0.0, 0.0, 1.0]], [[-0.16460655629634857, -0.9861915111541748, -0.01819237694144249, -25862.6640625], [0.9862385392189026, -0.16484704613685608, 0.012610724195837975, 42409.26953125], [-0.015435547567903996, -0.015866216272115707, 0.9997549653053284, -206.28323364257812], [0.0, 0.0, 0.0, 1.0]], [[-0.16415587067604065, -0.9862931370735168, -0.016694189980626106, -25862.7890625], [0.9863457679748535, -0.16434454917907715, 0.010630249045789242, 42410.046875], [-0.013228139840066433, -0.014721226878464222, 0.9998040795326233, -206.28282165527344], [0.0, 0.0, 0.0, 1.0]], [[-0.16407454013824463, -0.9864020347595215, -0.009520037099719048, -25862.90625], [0.986365795135498, -0.1641777902841568, 0.011323468759655952, 42410.82421875], [-0.012732469476759434, -0.007532346528023481, 0.9998905658721924, -206.282958984375], [0.0, 0.0, 0.0, 1.0]], [[-0.1640232801437378, -0.9864341020584106, -0.0066483584232628345, -25863.0234375], [0.9863693714141846, -0.16409464180469513, 0.012183266691863537, 42411.6015625], [-0.013108949176967144, -0.004559398163110018, 0.999903678894043, -206.2815399169922], [0.0, 0.0, 0.0, 1.0]], [[-0.16349193453788757, -0.9865355491638184, -0.004254861269146204, -25863.146484375], [0.9864346981048584, -0.16353657841682434, 0.014227966777980328, 42412.37890625], [-0.014732220210134983, -0.0018709857249632478, 0.9998896718025208, -206.2810516357422], [0.0, 0.0, 0.0, 1.0]], [[-0.1628415584564209, -0.9866462349891663, -0.0034482835326343775, -25863.271484375], [0.9865085482597351, -0.16287650167942047, 0.016497043892741203, 42413.1484375], [-0.01683839038014412, -0.0007153564947657287, 0.9998579621315002, -206.2823028564453], [0.0, 0.0, 0.0, 1.0]], [[-0.16258253157138824, -0.9866933822631836, -0.0017838568892329931, -25863.390625], [0.9865362644195557, -0.16258853673934937, 0.01764138601720333, 42413.921875], [-0.017696673050522804, 0.00110834080260247, 0.9998427629470825, -206.28463745117188], [0.0, 0.0, 0.0, 1.0]], [[-0.16194456815719604, -0.9867998957633972, -0.0001450374984415248, -25863.51171875], [0.9866325855255127, -0.16191981732845306, 0.01839098148047924, 42414.6875], [-0.018171701580286026, 0.002835219958797097, 0.9998308420181274, -206.2873992919922], [0.0, 0.0, 0.0, 1.0]], [[-0.16084431111812592, -0.9869773387908936, 0.00223080744035542, -25863.62890625], [0.9868000745773315, -0.1607714742422104, 0.01944838836789131, 42415.453125], [-0.01883646659553051, 0.005329522769898176, 0.9998083710670471, -206.29046630859375], [0.0, 0.0, 0.0, 1.0]], [[-0.16008946299552917, -0.9870993494987488, 0.002515532774850726, -25863.75], [0.9868720769882202, -0.15999650955200195, 0.02201344445347786, 42416.21875], [-0.021326979622244835, 0.00600662874057889, 0.9997544884681702, -206.29434204101562], [0.0, 0.0, 0.0, 1.0]], [[-0.15971916913986206, -0.9871573448181152, 0.003199369180947542, -25863.869140625], [0.9868934154510498, -0.15959912538528442, 0.023866979405283928, 42416.98046875], [-0.023049848154187202, 0.0069694495759904385, 0.9997100234031677, -206.300048828125], [0.0, 0.0, 0.0, 1.0]], [[-0.15917173027992249, -0.9872404336929321, 0.004553594626486301, -25863.98828125], [0.9869840741157532, -0.15901979804039001, 0.02398066408932209, 42417.734375], [-0.022950569167733192, 0.008311368525028229, 0.9997020363807678, -206.30694580078125], [0.0, 0.0, 0.0, 1.0]], [[-0.15860456228256226, -0.9873315095901489, 0.004597315099090338, -25864.10546875], [0.9870798587799072, -0.15845337510108948, 0.023788699880242348, 42418.4921875], [-0.02275887131690979, 0.008310913108289242, 0.9997063875198364, -206.31263732910156], [0.0, 0.0, 0.0, 1.0]], [[-0.1581277847290039, -0.9874097108840942, 0.004209202714264393, -25864.224609375], [0.9872066974639893, -0.1580040603876114, 0.021394846960902214, 42419.2421875], [-0.02046041004359722, 0.007538472302258015, 0.999762237071991, -206.31805419921875], [0.0, 0.0, 0.0, 1.0]], [[-0.1578391194343567, -0.9874575138092041, 0.003820518497377634, -25864.34375], [0.9872871041297913, -0.15773609280586243, 0.019585005939006805, 42419.98828125], [-0.01873672753572464, 0.006863228045403957, 0.9998009204864502, -206.3213653564453], [0.0, 0.0, 0.0, 1.0]], [[-0.15753819048404694, -0.9875089526176453, 0.002786120632663369, -25864.4609375], [0.9872989058494568, -0.15744468569755554, 0.02126418799161911, 42420.73828125], [-0.020559918135404587, 0.0061006550677120686, 0.9997700452804565, -206.32415771484375], [0.0, 0.0, 0.0, 1.0]], [[-0.15715114772319794, -0.987571656703949, 0.0024087706115096807, -25864.578125], [0.9873054623603821, -0.15705092251300812, 0.023725200444459915, 42421.48046875], [-0.02305203676223755, 0.0061066341586411, 0.9997156262397766, -206.32904052734375], [0.0, 0.0, 0.0, 1.0]], [[-0.15658117830753326, -0.9876622557640076, 0.0023694129195064306, -25864.6953125], [0.9873731732368469, -0.15647614002227783, 0.024685584008693695, 42422.22265625], [-0.02401026152074337, 0.006204791832715273, 0.999692440032959, -206.33653259277344], [0.0, 0.0, 0.0, 1.0]], [[-0.15618056058883667, -0.9877282977104187, 0.0007644101860933006, -25864.810546875], [0.9874791502952576, -0.15612368285655975, 0.022590549662709236, 42422.94921875], [-0.02219398133456707, 0.004283043555915356, 0.9997444748878479, -206.34165954589844], [0.0, 0.0, 0.0, 1.0]], [[-0.15568611025810242, -0.9878063201904297, -0.0007454975275322795, -25864.927734375], [0.9875964522361755, -0.15566851198673248, 0.02051139436662197, 42423.671875], [-0.02037733420729637, 0.0024570876266807318, 0.9997893571853638, -206.34580993652344], [0.0, 0.0, 0.0, 1.0]], [[-0.1554921269416809, -0.9878368377685547, -0.0007961216033436358, -25865.041015625], [0.9875497817993164, -0.15546627342700958, 0.02399482950568199, 42424.39453125], [-0.02382674627006054, 0.002944797044619918, 0.9997117519378662, -206.35118103027344], [0.0, 0.0, 0.0, 1.0]], [[-0.15566308796405792, -0.9878095388412476, -0.001180538791231811, -25865.1484375], [0.987335205078125, -0.15562519431114197, 0.03082156553864479, 42425.109375], [-0.030629558488726616, 0.0036321922671049833, 0.9995241761207581, -206.35984802246094], [0.0, 0.0, 0.0, 1.0]], [[-0.15525782108306885, -0.9878713488578796, -0.0022938561160117388, -25865.2578125], [0.9872792959213257, -0.15524451434612274, 0.03433256223797798, 42425.81640625], [-0.03427226096391678, 0.003065721830353141, 0.9994077682495117, -206.37255859375], [0.0, 0.0, 0.0, 1.0]], [[-0.15393194556236267, -0.9880788922309875, -0.0022622921969741583, -25865.3671875], [0.9874836206436157, -0.1539180725812912, 0.03443032130599022, 42426.50390625], [-0.03436807915568352, 0.0030659500043839216, 0.9994044899940491, -206.38671875], [0.0, 0.0, 0.0, 1.0]], [[-0.15267328917980194, -0.988265335559845, -0.004756290465593338, -25865.47265625], [0.98780757188797, -0.15274731814861298, 0.030073512345552444, 42427.17578125], [-0.03044711984694004, -0.00010687897156458348, 0.9995363354682922, -206.39749145507812], [0.0, 0.0, 0.0, 1.0]], [[-0.15111547708511353, -0.9885137677192688, -0.002177829621359706, -25865.5703125], [0.9882774353027344, -0.15112704038619995, 0.021642033010721207, 42427.8359375], [-0.021722575649619102, 0.001118146232329309, 0.9997633695602417, -206.40487670898438], [0.0, 0.0, 0.0, 1.0]], [[-0.15035951137542725, -0.9886289238929749, -0.002234424464404583, -25865.666015625], [0.9884944558143616, -0.15037591755390167, 0.01630198210477829, 42428.484375], [-0.016452614217996597, 0.00024244195083156228, 0.9998646378517151, -206.40771484375], [0.0, 0.0, 0.0, 1.0]], [[-0.15027521550655365, -0.9886430501937866, -0.0015408479375764728, -25865.759765625], [0.9885105490684509, -0.15028032660484314, 0.01621180772781372, 42429.1328125], [-0.016259249299764633, 0.0009130883263424039, 0.9998673796653748, -206.40843200683594], [0.0, 0.0, 0.0, 1.0]], [[-0.1498965173959732, -0.9887005686759949, -0.001476399484090507, -25865.859375], [0.9885416030883789, -0.149898961186409, 0.017771795392036438, 42429.78125], [-0.017792295664548874, 0.0012044472387060523, 0.9998409748077393, -206.4097900390625], [0.0, 0.0, 0.0, 1.0]], [[-0.14941073954105377, -0.9887729287147522, -0.0021469269413501024, -25865.955078125], [0.9885631799697876, -0.149423286318779, 0.020385032519698143, 42430.41796875], [-0.020476968958973885, 0.0009233688469976187, 0.9997898936271667, -206.4119415283203], [0.0, 0.0, 0.0, 1.0]], [[-0.14988435804843903, -0.9887012243270874, -0.0021467097103595734, -25866.05078125], [0.9884793758392334, -0.14989623427391052, 0.020966967567801476, 42431.046875], [-0.02105185016989708, 0.0010206411825492978, 0.9997778534889221, -206.41583251953125], [0.0, 0.0, 0.0, 1.0]], [[-0.1507176011800766, -0.9885706305503845, -0.0035015216562896967, -25866.14453125], [0.9883647561073303, -0.15075768530368805, 0.02018304541707039, 42431.66796875], [-0.02048024907708168, -0.0004188403836451471, 0.9997901320457458, -206.41893005371094], [0.0, 0.0, 0.0, 1.0]], [[-0.15120868384838104, -0.9884991645812988, -0.0023359120823442936, -25866.23828125], [0.9882817268371582, -0.15122444927692413, 0.02074596658349037, 42432.2734375], [-0.020860616117715836, 0.0008284306386485696, 0.9997820258140564, -206.42254638671875], [0.0, 0.0, 0.0, 1.0]], [[-0.15346986055374146, -0.9881482720375061, -0.0031628378201276064, -25866.33203125], [0.9879165887832642, -0.15350235998630524, 0.021397164091467857, 42432.87109375], [-0.021629072725772858, 0.00015919945144560188, 0.9997659921646118, -206.42672729492188], [0.0, 0.0, 0.0, 1.0]], [[-0.15858574211597443, -0.9873402118682861, 0.0031524975784122944, -25866.421875], [0.9871448874473572, -0.1584884375333786, 0.0206492617726326, 42433.4609375], [-0.01988821104168892, 0.006386650260537863, 0.9997817873954773, -206.43113708496094], [0.0, 0.0, 0.0, 1.0]], [[-0.16803418099880219, -0.9857432246208191, 0.008645763620734215, -25866.51171875], [0.9856051802635193, -0.16783201694488525, 0.020369190722703934, 42434.03515625], [-0.01862775720655918, 0.011944029480218887, 0.9997551441192627, -206.4353485107422], [0.0, 0.0, 0.0, 1.0]], [[-0.18178047239780426, -0.9832608103752136, 0.012417115271091461, -25866.619140625], [0.9831822514533997, -0.18151144683361053, 0.02015412412583828, 42434.59765625], [-0.01756291091442108, 0.015871914103627205, 0.9997197389602661, -206.43785095214844], [0.0, 0.0, 0.0, 1.0]], [[-0.19953553378582, -0.9797546863555908, 0.016319477930665016, -25866.744140625], [0.9797612428665161, -0.19921091198921204, 0.019569596275687218, 42435.14453125], [-0.015922386199235916, 0.01989402063190937, 0.9996753334999084, -206.43951416015625], [0.0, 0.0, 0.0, 1.0]], [[-0.22061173617839813, -0.9752434492111206, 0.015189878642559052, -25866.884765625], [0.9752731323242188, -0.22035503387451172, 0.016911858692765236, 42435.67578125], [-0.013146012090146542, 0.018545234575867653, 0.9997415542602539, -206.4381866455078], [0.0, 0.0, 0.0, 1.0]], [[-0.24443267285823822, -0.9694991707801819, 0.01800127699971199, -25867.04296875], [0.9696130156517029, -0.24418248236179352, 0.015020795166492462, 42436.19921875], [-0.010167052038013935, 0.02112584374845028, 0.9997251033782959, -206.436767578125], [0.0, 0.0, 0.0, 1.0]], [[-0.27159398794174194, -0.9622137546539307, 0.01953057013452053, -25867.2109375], [0.9623865485191345, -0.27138328552246094, 0.0127834752202034, 42436.70703125], [-0.0070001655258238316, 0.022267872467637062, 0.9997275471687317, -206.4329833984375], [0.0, 0.0, 0.0, 1.0]], [[-0.3008219599723816, -0.9534661173820496, 0.02021239884197712, -25867.400390625], [0.9536494612693787, -0.30057424306869507, 0.014413570985198021, 42437.21484375], [-0.00766752427443862, 0.023611463606357574, 0.9996917843818665, -206.4290771484375], [0.0, 0.0, 0.0, 1.0]], [[-0.3327157497406006, -0.9427210092544556, 0.024028323590755463, -25867.60546875], [0.9429776668548584, -0.3323290944099426, 0.018723631277680397, 42437.70703125], [-0.00966584961861372, 0.02888781949877739, 0.9995359182357788, -206.4254913330078], [0.0, 0.0, 0.0, 1.0]], [[-0.36701491475105286, -0.9299376010894775, 0.02272142469882965, -25867.830078125], [0.9301368594169617, -0.3665578067302704, 0.02192666567862034, 42438.19140625], [-0.012061716057360172, 0.029181450605392456, 0.9995013475418091, -206.42318725585938], [0.0, 0.0, 0.0, 1.0]], [[-0.4018588364124298, -0.9154742956161499, 0.020402057096362114, -25868.080078125], [0.9156076312065125, -0.401398628950119, 0.023276345804333687, 42438.65625], [-0.013119539245963097, 0.028034085407853127, 0.9995208382606506, -206.4207000732422], [0.0, 0.0, 0.0, 1.0]], [[-0.43705496191978455, -0.8992836475372314, 0.016489559784531593, -25868.3515625], [0.8993122577667236, -0.4366191625595093, 0.02452155575156212, 42439.10546875], [-0.014852175489068031, 0.02554653026163578, 0.9995632767677307, -206.418212890625], [0.0, 0.0, 0.0, 1.0]], [[-0.47272899746894836, -0.881155788898468, 0.009577636606991291, -25868.64453125], [0.8810330629348755, -0.4723908305168152, 0.025052711367607117, 42439.53515625], [-0.017550954595208168, 0.020281357690691948, 0.9996402859687805, -206.41705322265625], [0.0, 0.0, 0.0, 1.0]], [[-0.5080769658088684, -0.8612586259841919, 0.009563389234244823, -25868.9453125], [0.8611592054367065, -0.5077467560768127, 0.024456817656755447, 42439.94921875], [-0.0162078645080328, 0.020661547780036926, 0.9996551275253296, -206.41700744628906], [0.0, 0.0, 0.0, 1.0]], [[-0.5437870621681213, -0.8391008377075195, 0.01433452870696783, -25869.248046875], [0.8391144871711731, -0.5433623790740967, 0.025382068008184433, 42440.3515625], [-0.013509269803762436, 0.025830751284956932, 0.9995750188827515, -206.4171600341797], [0.0, 0.0, 0.0, 1.0]], [[-0.5803886651992798, -0.8141590356826782, 0.01714775525033474, -25869.5703125], [0.8142459988594055, -0.5798740386962891, 0.027379069477319717, 42440.73828125], [-0.012347379699349403, 0.02985299378633499, 0.9994780421257019, -206.41665649414062], [0.0, 0.0, 0.0, 1.0]], [[-0.6125295162200928, -0.7903079390525818, 0.014867433346807957, -25869.8828125], [0.7903189659118652, -0.6119820475578308, 0.029562249779701233, 42441.0703125], [-0.01426467765122652, 0.02985776588320732, 0.9994523525238037, -206.4145965576172], [0.0, 0.0, 0.0, 1.0]], [[-0.6470065712928772, -0.7624257206916809, 0.009459586814045906, -25870.251953125], [0.7622829079627991, -0.6465004682540894, 0.031015492975711823, 42441.41015625], [-0.017531385645270348, 0.027278108522295952, 0.9994741678237915, -206.4134521484375], [0.0, 0.0, 0.0, 1.0]], [[-0.6811943650245667, -0.732102632522583, 0.00023505184799432755, -25870.638671875], [0.7318176031112671, -0.6809200048446655, 0.028123535215854645, 42441.734375], [-0.02042926289141178, 0.019329609349370003, 0.9996044039726257, -206.41246032714844], [0.0, 0.0, 0.0, 1.0]], [[-0.715248703956604, -0.6988389492034912, -0.00658533675596118, -25871.037109375], [0.6984776854515076, -0.7151312232017517, 0.02676461637020111, 42442.03125], [-0.023413537070155144, 0.01454364787787199, 0.9996200203895569, -206.41387939453125], [0.0, 0.0, 0.0, 1.0]], [[-0.7486567497253418, -0.6629149317741394, -0.007546988781541586, -25871.4375], [0.6624993085861206, -0.7485136985778809, 0.028669416904449463, 42442.3125], [-0.024654407054185867, 0.01646367460489273, 0.9995604157447815, -206.4178924560547], [0.0, 0.0, 0.0, 1.0]], [[-0.7804580330848694, -0.6251729130744934, -0.0066420515067875385, -25871.84375], [0.6246317625045776, -0.7801505327224731, 0.034645453095436096, 42442.5859375], [-0.02684120088815689, 0.0228904839605093, 0.9993776082992554, -206.4246368408203], [0.0, 0.0, 0.0, 1.0]], [[-0.8100190162658691, -0.5863873958587646, -0.0043657696805894375, -25872.259765625], [0.5856176018714905, -0.8092960715293884, 0.045738667249679565, 42442.84375], [-0.030353780835866928, 0.03449251502752304, 0.9989439249038696, -206.43199157714844], [0.0, 0.0, 0.0, 1.0]], [[-0.8382025957107544, -0.5453376173973083, -0.004826132673770189, -25872.68359375], [0.5444269776344299, -0.8372544646263123, 0.05103141441941261, 42443.0703125], [-0.03187005594372749, 0.04014718905091286, 0.998685359954834, -206.43988037109375], [0.0, 0.0, 0.0, 1.0]], [[-0.8644755482673645, -0.5026299953460693, -0.006715419702231884, -25873.119140625], [0.5015836358070374, -0.8634015917778015, 0.05432835966348648, 42443.265625], [-0.03310517221689224, 0.04359719902276993, 0.9985005259513855, -206.44618225097656], [0.0, 0.0, 0.0, 1.0]], [[-0.8886732459068298, -0.4583359360694885, -0.013714770786464214, -25873.56640625], [0.45729368925094604, -0.888063907623291, 0.04717160761356354, 42443.4140625], [-0.03380003571510315, 0.035648465156555176, 0.9987926483154297, -206.4505157470703], [0.0, 0.0, 0.0, 1.0]], [[-0.9101701974868774, -0.41394996643066406, -0.015353326685726643, -25874.009765625], [0.4130488634109497, -0.9097436666488647, 0.04191895201802254, 42443.53125], [-0.031319938600063324, 0.031811702996492386, 0.9990030527114868, -206.45529174804688], [0.0, 0.0, 0.0, 1.0]], [[-0.929546058177948, -0.3682788610458374, -0.017744751647114754, -25874.44921875], [0.3675682842731476, -0.9293816685676575, 0.03381219133734703, 42443.62890625], [-0.02894396334886551, 0.024907581508159637, 0.9992706179618835, -206.4580535888672], [0.0, 0.0, 0.0, 1.0]], [[-0.9465766549110413, -0.3220134377479553, -0.017321625724434853, -25874.880859375], [0.32111844420433044, -0.9461512565612793, 0.04099808260798454, 42443.73046875], [-0.02959081158041954, 0.03324553370475769, 0.9990090727806091, -206.4656982421875], [0.0, 0.0, 0.0, 1.0]], [[-0.9593961834907532, -0.28137683868408203, -0.019647905603051186, -25875.26953125], [0.28026607632637024, -0.958820104598999, 0.04598839208483696, 42443.8046875], [-0.03177887573838234, 0.03861444443464279, 0.9987487196922302, -206.4716033935547], [0.0, 0.0, 0.0, 1.0]], [[-0.9709653854370117, -0.23825955390930176, -0.021417368203401566, -25875.6953125], [0.23695862293243408, -0.9702052474021912, 0.05052169784903526, 42443.86328125], [-0.03281651809811592, 0.04397978633642197, 0.9984933137893677, -206.47776794433594], [0.0, 0.0, 0.0, 1.0]], [[-0.9804413914680481, -0.1957259625196457, -0.020642688497900963, -25876.109375], [0.19446247816085815, -0.979552686214447, 0.051584307104349136, 42443.89453125], [-0.030316989868879318, 0.046561162918806076, 0.998455286026001, -206.4831085205078], [0.0, 0.0, 0.0, 1.0]], [[-0.9880693554878235, -0.1527950018644333, -0.019306257367134094, -25876.515625], [0.15179622173309326, -0.987362802028656, 0.04552571102976799, 42443.89453125], [-0.026018382981419563, 0.04205194115638733, 0.9987766146659851, -206.48487854003906], [0.0, 0.0, 0.0, 1.0]], [[-0.9936707615852356, -0.11067277193069458, -0.019233042374253273, -25876.912109375], [0.10993640869855881, -0.9932915568351746, 0.03586093336343765, 42443.8671875], [-0.023072848096489906, 0.033519547432661057, 0.9991717338562012, -206.4835205078125], [0.0, 0.0, 0.0, 1.0]], [[-0.9972503185272217, -0.07174406200647354, -0.01856325753033161, -25877.298828125], [0.0711599662899971, -0.9970011115074158, 0.030415071174502373, 42443.83203125], [-0.020689690485596657, 0.029010480269789696, 0.999364972114563, -206.4834747314453], [0.0, 0.0, 0.0, 1.0]], [[-0.9992266893386841, -0.03684205934405327, -0.013737065717577934, -25877.673828125], [0.03655489161610603, -0.9991193413734436, 0.020600317046046257, 42443.78125], [-0.014483924955129623, 0.020082227885723114, 0.9996933937072754, -206.4813232421875], [0.0, 0.0, 0.0, 1.0]], [[-0.9998975396156311, -0.0022384272888302803, -0.014139795675873756, -25878.04296875], [0.0019102829974144697, -0.9997295141220093, 0.023178141564130783, 42443.74609375], [-0.014187854714691639, 0.023148756474256516, 0.9996313452720642, -206.4810333251953], [0.0, 0.0, 0.0, 1.0]], [[-0.9994352459907532, 0.028651822358369827, -0.01755828782916069, -25878.412109375], [-0.02906280942261219, -0.999298632144928, 0.023616783320903778, 42443.703125], [-0.016869306564331055, 0.024113738909363747, 0.9995668530464172, -206.48123168945312], [0.0, 0.0, 0.0, 1.0]], [[-0.998446524143219, 0.051852259784936905, -0.02039414644241333, -25878.765625], [-0.05239091441035271, -0.9982661604881287, 0.026829717680811882, 42443.65625], [-0.018967606127262115, 0.027856506407260895, 0.9994319677352905, -206.4827117919922], [0.0, 0.0, 0.0, 1.0]], [[-0.9971734881401062, 0.07266329973936081, -0.019106190651655197, -25879.109375], [-0.07312687486410141, -0.9970142841339111, 0.02480028010904789, 42443.6015625], [-0.017247075214982033, 0.02612735703587532, 0.9995098114013672, -206.48292541503906], [0.0, 0.0, 0.0, 1.0]], [[-0.9955570697784424, 0.09281156212091446, -0.015880221500992775, -25879.439453125], [-0.09311336278915405, -0.9954653978347778, 0.01945565827190876, 42443.5390625], [-0.014002500101923943, 0.020847877487540245, 0.9996846318244934, -206.48062133789062], [0.0, 0.0, 0.0, 1.0]], [[-0.9939676523208618, 0.10888262838125229, -0.013145482167601585, -25879.7578125], [-0.10908670723438263, -0.993904173374176, 0.015956809744238853, 42443.47265625], [-0.011327928863465786, 0.01729455031454563, 0.9997862577438354, -206.47933959960938], [0.0, 0.0, 0.0, 1.0]], [[-0.9926426410675049, 0.12063221633434296, -0.010418281890451908, -25880.0703125], [-0.12077194452285767, -0.9925812482833862, 0.014023814350366592, 42443.41796875], [-0.008649267256259918, 0.015178871341049671, 0.9998473525047302, -206.47714233398438], [0.0, 0.0, 0.0, 1.0]], [[-0.991428554058075, 0.13024991750717163, -0.01021300908178091, -25880.376953125], [-0.13039419054985046, -0.9913483262062073, 0.015029211528599262, 42443.3671875], [-0.008167095482349396, 0.01623210683465004, 0.9998348951339722, -206.47445678710938], [0.0, 0.0, 0.0, 1.0]], [[-0.9901462197303772, 0.1395486444234848, -0.011687454767525196, -25880.67578125], [-0.13973362743854523, -0.9900453686714172, 0.0168768260627985, 42443.31640625], [-0.00921596959233284, 0.01834365539252758, 0.9997892379760742, -206.47315979003906], [0.0, 0.0, 0.0, 1.0]], [[-0.9889976382255554, 0.1474204659461975, -0.012286609970033169, -25880.958984375], [-0.14762622117996216, -0.988879919052124, 0.017974436283111572, 42443.26953125], [-0.00950018223375082, 0.019590498879551888, 0.9997629523277283, -206.47164916992188], [0.0, 0.0, 0.0, 1.0]], [[-0.9882359504699707, 0.15255700051784515, -0.01077929325401783, -25881.232421875], [-0.15271934866905212, -0.9881349802017212, 0.016311511397361755, 42443.21484375], [-0.008162961341440678, 0.017765825614333153, 0.9998088479042053, -206.4695281982422], [0.0, 0.0, 0.0, 1.0]], [[-0.987749457359314, 0.15577872097492218, -0.009166410192847252, -25881.490234375], [-0.15589012205600739, -0.9876883029937744, 0.013042805716395378, 42443.16015625], [-0.007021766155958176, 0.014311976730823517, 0.9998729228973389, -206.4666290283203], [0.0, 0.0, 0.0, 1.0]], [[-0.9873450994491577, 0.15830370783805847, -0.00946948491036892, -25881.73828125], [-0.158390074968338, -0.9873338937759399, 0.009191946126520634, 42443.109375], [-0.0078944256529212, 0.010575495660305023, 0.9999129176139832, -206.4638214111328], [0.0, 0.0, 0.0, 1.0]], [[-0.9871256351470947, 0.15951107442378998, -0.011798309162259102, -25881.978515625], [-0.15962161123752594, -0.9871363043785095, 0.009102783165872097, 42443.0703125], [-0.010194545611739159, 0.010868855752050877, 0.9998889565467834, -206.46234130859375], [0.0, 0.0, 0.0, 1.0]], [[-0.9871404767036438, 0.1590977907180786, -0.015545492060482502, -25882.21484375], [-0.1593053787946701, -0.9871415495872498, 0.013170860707759857, 42443.0390625], [-0.013250146061182022, 0.01547796931117773, 0.9997923970222473, -206.46319580078125], [0.0, 0.0, 0.0, 1.0]], [[-0.9872174859046936, 0.15851695835590363, -0.0165549386292696, -25882.43359375], [-0.15877899527549744, -0.9871856570243835, 0.015930229797959328, 42443.015625], [-0.013817586936056614, 0.018355175852775574, 0.9997360706329346, -206.4632110595703], [0.0, 0.0, 0.0, 1.0]], [[-0.9871864318847656, 0.15893684327602386, -0.014213962480425835, -25882.6328125], [-0.15915493667125702, -0.9871270060539246, 0.015810545533895493, 42442.984375], [-0.011518108658492565, 0.017870178446173668, 0.9997739791870117, -206.462646484375], [0.0, 0.0, 0.0, 1.0]], [[-0.9869759678840637, 0.16048134863376617, -0.011140851303935051, -25882.8203125], [-0.16061358153820038, -0.9869418740272522, 0.01220739632844925, 42442.94140625], [-0.009036313742399216, 0.013837779872119427, 0.9998634457588196, -206.460693359375], [0.0, 0.0, 0.0, 1.0]], [[-0.9869638681411743, 0.1606513410806656, -0.009665013290941715, -25883.0], [-0.16071845591068268, -0.9869781732559204, 0.006617309525609016, 42442.8984375], [-0.00847607757896185, 0.008084391243755817, 0.9999313950538635, -206.45863342285156], [0.0, 0.0, 0.0, 1.0]], [[-0.9871691465377808, 0.15940114855766296, -0.00939897634088993, -25883.173828125], [-0.15943685173988342, -0.987203061580658, 0.003176008118316531, 42442.8671875], [-0.008772439323365688, 0.004633800126612186, 0.9999507665634155, -206.45700073242188], [0.0, 0.0, 0.0, 1.0]], [[-0.98719722032547, 0.15921036899089813, -0.009682418778538704, -25883.337890625], [-0.1592469960451126, -0.9872338175773621, 0.003132722107693553, 42442.83984375], [-0.009060048498213291, 0.004634510260075331, 0.9999482035636902, -206.4554443359375], [0.0, 0.0, 0.0, 1.0]], [[-0.9869921207427979, 0.16044582426548004, -0.010182402096688747, -25883.494140625], [-0.16049683094024658, -0.9870266318321228, 0.0044004060328006744, 42442.8203125], [-0.00934427510946989, 0.005977408494800329, 0.9999384880065918, -206.45489501953125], [0.0, 0.0, 0.0, 1.0]], [[-0.9869204759597778, 0.16082856059074402, -0.011055760085582733, -25883.640625], [-0.1609029471874237, -0.9869508147239685, 0.006198398303240538, 42442.796875], [-0.009914609603583813, 0.00789622962474823, 0.9999196529388428, -206.45481872558594], [0.0, 0.0, 0.0, 1.0]], [[-0.9870476722717285, 0.1599690020084381, -0.012121967040002346, -25883.78125], [-0.16005222499370575, -0.9870889186859131, 0.006231141276657581, 42442.7734375], [-0.010968669317662716, 0.008090580813586712, 0.9999071359634399, -206.45443725585938], [0.0, 0.0, 0.0, 1.0]], [[-0.9870660901069641, 0.15976998209953308, -0.013196473941206932, -25883.916015625], [-0.15985523164272308, -0.9871242046356201, 0.0056737991981208324, 42442.75390625], [-0.01212005689740181, 0.00770993996411562, 0.9998968243598938, -206.45318603515625], [0.0, 0.0, 0.0, 1.0]], [[-0.98691725730896, 0.160615012049675, -0.014038107357919216, -25884.04296875], [-0.16070370376110077, -0.9869877696037292, 0.005431271158158779, 42442.734375], [-0.012983094900846481, 0.007616191171109676, 0.999886691570282, -206.45352172851562], [0.0, 0.0, 0.0, 1.0]], [[-0.9868537187576294, 0.16099101305007935, -0.014202519319951534, -25884.158203125], [-0.1610703468322754, -0.986932098865509, 0.004623102489858866, 42442.7109375], [-0.013272644951939583, 0.00684993015602231, 0.9998884797096252, -206.45294189453125], [0.0, 0.0, 0.0, 1.0]], [[-0.9869428277015686, 0.1604211926460266, -0.014453538693487644, -25884.267578125], [-0.16049911081790924, -0.9870261549949646, 0.004396518692374229, 42442.6953125], [-0.01356072723865509, 0.00665889261290431, 0.9998859167098999, -206.45237731933594], [0.0, 0.0, 0.0, 1.0]], [[-0.9869404435157776, 0.16041991114616394, -0.014627709053456783, -25884.369140625], [-0.1604972779750824, -0.9870270490646362, 0.004271568730473518, 42442.6796875], [-0.013752699829638004, 0.006563491187989712, 0.9998838901519775, -206.45266723632812], [0.0, 0.0, 0.0, 1.0]], [[-0.9868653416633606, 0.1608944833278656, -0.014486031606793404, -25884.462890625], [-0.1609751433134079, -0.9869478344917297, 0.004578886553645134, 42442.6640625], [-0.013560240156948566, 0.0068506356328725815, 0.9998846054077148, -206.452392578125], [0.0, 0.0, 0.0, 1.0]], [[-0.9868535995483398, 0.16099010407924652, -0.014221158809959888, -25884.552734375], [-0.1610630601644516, -0.9869354367256165, 0.0041346619836986065, 42442.6484375], [-0.013369726948440075, 0.0063708084635436535, 0.9998902678489685, -206.45208740234375], [0.0, 0.0, 0.0, 1.0]], [[-0.9868984222412109, 0.16070465743541718, -0.014333405531942844, -25884.634765625], [-0.16077153384685516, -0.9869849681854248, 0.003635142929852009, 42442.63671875], [-0.013562670908868313, 0.005891920067369938, 0.9998906254768372, -206.45236206054688], [0.0, 0.0, 0.0, 1.0]], [[-0.986849844455719, 0.16098754107952118, -0.014508550055325031, -25884.708984375], [-0.16105370223522186, -0.9869394302368164, 0.0035059673245996237, 42442.62109375], [-0.013754642568528652, 0.005796518176794052, 0.9998885989189148, -206.45166015625], [0.0, 0.0, 0.0, 1.0]], [[-0.9867905974388123, 0.16136698424816132, -0.01432060357183218, -25884.779296875], [-0.1614324301481247, -0.9868775010108948, 0.0035308804363012314, 42442.609375], [-0.013562913052737713, 0.005796048790216446, 0.9998912215232849, -206.45135498046875], [0.0, 0.0, 0.0, 1.0]], [[-0.98679518699646, 0.16136935353279114, -0.013972500339150429, -25884.841796875], [-0.16143609583377838, -0.9868759512901306, 0.0037811158690601587, 42442.59765625], [-0.01317896880209446, 0.0059868525713682175, 0.9998952150344849, -206.4517822265625], [0.0, 0.0, 0.0, 1.0]], [[-0.9867849349975586, 0.1614672690629959, -0.013560635037720203, -25884.8984375], [-0.16153733432292938, -0.986857533454895, 0.004234574269503355, 42442.59375], [-0.012698669917881489, 0.006369162350893021, 0.9998990893363953, -206.45204162597656], [0.0, 0.0, 0.0, 1.0]], [[-0.9866846203804016, 0.16213838756084442, -0.01283150166273117, -25884.953125], [-0.16222131252288818, -0.9867379069328308, 0.005703394766896963, 42442.5859375], [-0.01173658948391676, 0.007708994671702385, 0.9999014139175415, -206.4516143798828], [0.0, 0.0, 0.0, 1.0]], [[-0.9865891933441162, 0.16281862556934357, -0.01148812286555767, -25885.01171875], [-0.16291047632694244, -0.9866119027137756, 0.007564650382846594, 42442.58203125], [-0.01010265201330185, 0.009334737434983253, 0.9999053478240967, -206.4521484375], [0.0, 0.0, 0.0, 1.0]], [[-0.9866428971290588, 0.16254998743534088, -0.010644599795341492, -25885.072265625], [-0.1626468449831009, -0.9866436719894409, 0.008966842666268349, 42442.57421875], [-0.009044865146279335, 0.010578381828963757, 0.9999030828475952, -206.4515838623047], [0.0, 0.0, 0.0, 1.0]], [[-0.9866761565208435, 0.16236957907676697, -0.010307729244232178, -25885.12890625], [-0.16247092187404633, -0.9866637587547302, 0.009897134266793728, 42442.56640625], [-0.00856326799839735, 0.011439972557127476, 0.9998978972434998, -206.4518585205078], [0.0, 0.0, 0.0, 1.0]], [[-0.9866317510604858, 0.1626567542552948, -0.01002669706940651, -25885.19140625], [-0.16275523602962494, -0.9866164326667786, 0.009940234944224358, 42442.5546875], [-0.008275656960904598, 0.011439248919487, 0.9999002814292908, -206.45143127441406], [0.0, 0.0, 0.0, 1.0]]], "1": [[[-0.9794196486473083, 0.20155538618564606, -0.010616619139909744, -25833.861328125], [-0.20161369442939758, -0.9794536828994751, 0.004740629345178604, 42363.7578125], [-0.00944298692047596, 0.0067835221998393536, 0.9999323487281799, -206.04849243164062], [0.0, 0.0, 0.0, 1.0]], [[-0.9786664247512817, 0.20499254763126373, -0.013786054216325283, -25834.4296875], [-0.20511941611766815, -0.9786999821662903, 0.008508031256496906, 42364.1171875], [-0.011748328804969788, 0.011154312640428543, 0.9998687505722046, -206.0514373779297], [0.0, 0.0, 0.0, 1.0]], [[-0.9779430031776428, 0.20835138857364655, -0.014738024212419987, -25834.99609375], [-0.20850838720798492, -0.9779688715934753, 0.010056886821985245, 42364.46875], [-0.012317962944507599, 0.012908064760267735, 0.9998407959938049, -206.05783081054688], [0.0, 0.0, 0.0, 1.0]], [[-0.9772928953170776, 0.211565300822258, -0.011782674118876457, -25835.560546875], [-0.21168193221092224, -0.977290153503418, 0.009730513207614422, 42364.8203125], [-0.0094564538449049, 0.012003741227090359, 0.999883234500885, -206.0626983642578], [0.0, 0.0, 0.0, 1.0]], [[-0.9766031503677368, 0.2148762047290802, -0.008626995608210564, -25836.125], [-0.21494659781455994, -0.9765906929969788, 0.008285527117550373, 42365.16796875], [-0.00664468202739954, 0.00994601659476757, 0.9999284148216248, -206.06646728515625], [0.0, 0.0, 0.0, 1.0]], [[-0.9759446382522583, 0.21788010001182556, -0.007771937642246485, -25836.693359375], [-0.21793736517429352, -0.9759339690208435, 0.007494145072996616, 42365.515625], [-0.00595207279548049, 0.009007667191326618, 0.9999417066574097, -206.0677490234375], [0.0, 0.0, 0.0, 1.0]], [[-0.9755948781967163, 0.21924544870853424, -0.01208167802542448, -25837.27734375], [-0.2193540632724762, -0.9756079316139221, 0.008537821471691132, 42365.8671875], [-0.009915103204548359, 0.010979621671140194, 0.9998905062675476, -206.06918334960938], [0.0, 0.0, 0.0, 1.0]], [[-0.9754425883293152, 0.21955455839633942, -0.01753860153257847, -25837.861328125], [-0.2197660207748413, -0.9754884839057922, 0.01119009405374527, 42366.21875], [-0.014651867561042309, 0.01476968452334404, 0.9997835159301758, -206.0753631591797], [0.0, 0.0, 0.0, 1.0]], [[-0.9751879572868347, 0.22036024928092957, -0.021211126819252968, -25838.4453125], [-0.22063514590263367, -0.9752872586250305, 0.011611346155405045, 42366.57421875], [-0.01812826469540596, 0.016003165394067764, 0.9997075796127319, -206.08457946777344], [0.0, 0.0, 0.0, 1.0]], [[-0.975055456161499, 0.22107195854187012, -0.01984856091439724, -25839.01953125], [-0.22125263512134552, -0.9751885533332825, 0.007396053988486528, 42366.921875], [-0.017721027135849, 0.01160311046987772, 0.9997756481170654, -206.0937042236328], [0.0, 0.0, 0.0, 1.0]], [[-0.9752121567726135, 0.22083680331707, -0.013872992247343063, -25839.59375], [-0.22093257308006287, -0.9752719402313232, 0.005784102715551853, 42367.265625], [-0.012252596206963062, 0.008705723099410534, 0.9998869895935059, -206.1005859375], [0.0, 0.0, 0.0, 1.0]], [[-0.9752140045166016, 0.22105270624160767, -0.00966272410005331, -25840.162109375], [-0.22104868292808533, -0.9752616286277771, -0.0014964330475777388, 42367.61328125], [-0.009754475206136703, 0.0006765904254280031, 0.9999521970748901, -206.1018524169922], [0.0, 0.0, 0.0, 1.0]], [[-0.9752708673477173, 0.22101138532161713, -0.0008496135706081986, -25840.72265625], [-0.22101296484470367, -0.975264310836792, 0.0035766279324889183, 42367.96875], [-3.8122187106637284e-05, 0.0036759567447006702, 0.9999932050704956, -206.0998992919922], [0.0, 0.0, 0.0, 1.0]], [[-0.9753938317298889, 0.22044026851654053, -0.0036047969479113817, -25841.29296875], [-0.22046007215976715, -0.9753736257553101, 0.006605669856071472, 42368.32421875], [-0.0020598683040589094, 0.007237843703478575, 0.9999716877937317, -206.09664916992188], [0.0, 0.0, 0.0, 1.0]], [[-0.9756283164024353, 0.21885806322097778, -0.01582866907119751, -25841.880859375], [-0.2189805507659912, -0.9757078886032104, 0.006453744135797024, 42368.67578125], [-0.014031701721251011, 0.009762627072632313, 0.9998538494110107, -206.09765625], [0.0, 0.0, 0.0, 1.0]], [[-0.9762240648269653, 0.2156754583120346, -0.0216936394572258, -25842.458984375], [-0.2158033698797226, -0.9764297604560852, 0.003714017802849412, 42369.01171875], [-0.020381292328238487, 0.008307276293635368, 0.999757707118988, -206.1044921875], [0.0, 0.0, 0.0, 1.0]], [[-0.9768079519271851, 0.2125464677810669, -0.02589111030101776, -25843.02734375], [-0.21257370710372925, -0.9771435260772705, -0.001725929556414485, 42369.359375], [-0.025666171684861183, 0.003817868186160922, 0.9996632933616638, -206.1112060546875], [0.0, 0.0, 0.0, 1.0]], [[-0.9780721664428711, 0.2070801705121994, -0.02219557762145996, -25843.578125], [-0.20715424418449402, -0.9783077239990234, 0.0010713846422731876, 42369.71875], [-0.02149224281311035, 0.005645800847560167, 0.9997530579566956, -206.11915588378906], [0.0, 0.0, 0.0, 1.0]], [[-0.9798176884651184, 0.19910068809986115, -0.0177812110632658, -25844.12109375], [-0.1991620510816574, -0.9799650311470032, 0.001733875833451748, 42370.08203125], [-0.017079750075936317, 0.005240225698798895, 0.9998403787612915, -206.12567138671875], [0.0, 0.0, 0.0, 1.0]], [[-0.981563925743103, 0.19018711149692535, -0.01900576241314411, -25844.66796875], [-0.1901882290840149, -0.9817460775375366, -0.0017590085044503212, 42370.44140625], [-0.018993373960256577, 0.0018880937714129686, 0.9998177886009216, -206.1295928955078], [0.0, 0.0, 0.0, 1.0]], [[-0.9828426241874695, 0.1826859414577484, -0.02542097680270672, -25845.216796875], [-0.18264849483966827, -0.9831709265708923, -0.0038062375970184803, 42370.80078125], [-0.02568851225078106, 0.0009021711302921176, 0.9996695518493652, -206.13339233398438], [0.0, 0.0, 0.0, 1.0]], [[-0.9840850830078125, 0.17501170933246613, -0.03078201785683632, -25845.7578125], [-0.17489805817604065, -0.9845659732818604, -0.006366982590407133, 42371.1640625], [-0.031421221792697906, -0.0008819358772598207, 0.9995058178901672, -206.14137268066406], [0.0, 0.0, 0.0, 1.0]], [[-0.9855776429176331, 0.1666579395532608, -0.02935904636979103, -25846.287109375], [-0.1666337549686432, -0.9860134124755859, -0.0032830636482685804, 42371.5390625], [-0.029495561495423317, 0.0016564958496019244, 0.9995635151863098, -206.1495819091797], [0.0, 0.0, 0.0, 1.0]], [[-0.9871529936790466, 0.1573590338230133, -0.02769898809492588, -25846.8046875], [-0.15738460421562195, -0.9875365495681763, -0.001265166443772614, 42371.92578125], [-0.027552848681807518, 0.0031104821246117353, 0.9996154308319092, -206.15872192382812], [0.0, 0.0, 0.0, 1.0]], [[-0.9888357520103455, 0.14627988636493683, -0.028392216190695763, -25847.326171875], [-0.14629900455474854, -0.9892393946647644, -0.0014095205115154386, 42372.30859375], [-0.028292885050177574, 0.0027599709574133158, 0.9995958805084229, -206.16725158691406], [0.0, 0.0, 0.0, 1.0]], [[-0.9902483820915222, 0.1356334686279297, -0.031805746257305145, -25847.841796875], [-0.13550350069999695, -0.9907573461532593, -0.0062159886583685875, 42372.68359375], [-0.032354872673749924, -0.0018455823883414268, 0.9994747042655945, -206.17454528808594], [0.0, 0.0, 0.0, 1.0]], [[-0.9914600849151611, 0.12596428394317627, -0.03376208245754242, -25848.3515625], [-0.1258150041103363, -0.9920322895050049, -0.00651833089068532, 42373.0703125], [-0.03431415185332298, -0.002214887412264943, 0.9994086027145386, -206.18350219726562], [0.0, 0.0, 0.0, 1.0]], [[-0.9929057359695435, 0.11440351605415344, -0.032405778765678406, -25848.845703125], [-0.11435090005397797, -0.9934343099594116, -0.0034778090193867683, 42373.4765625], [-0.03259088844060898, 0.0002524942683521658, 0.9994686841964722, -206.19285583496094], [0.0, 0.0, 0.0, 1.0]], [[-0.994648277759552, 0.09967215359210968, -0.027206577360630035, -25849.326171875], [-0.09966078400611877, -0.9950199127197266, -0.0017752525163814425, 42373.8828125], [-0.02724802866578102, 0.0009456787956878543, 0.9996282458305359, -206.20126342773438], [0.0, 0.0, 0.0, 1.0]], [[-0.9963098764419556, 0.08370314538478851, -0.018984956666827202, -25849.79296875], [-0.08361304551362991, -0.9964832067489624, -0.005488627124577761, 42374.29296875], [-0.019377604126930237, -0.003880982520058751, 0.999804675579071, -206.2054443359375], [0.0, 0.0, 0.0, 1.0]], [[-0.997556746006012, 0.06810235977172852, -0.015576706267893314, -25850.25390625], [-0.06798586994409561, -0.9976550936698914, -0.007890894077718258, 42374.703125], [-0.016077568754553795, -0.006812617648392916, 0.9998475313186646, -206.20584106445312], [0.0, 0.0, 0.0, 1.0]], [[-0.9984166026115417, 0.05318977311253548, -0.018309200182557106, -25850.71875], [-0.053066425025463104, -0.9985653162002563, -0.007157983258366585, 42375.125], [-0.018663663417100906, -0.006175044924020767, 0.9998067021369934, -206.20440673828125], [0.0, 0.0, 0.0, 1.0]], [[-0.9989787936210632, 0.03838267922401428, -0.02383681945502758, -25851.18359375], [-0.03825585916638374, -0.9992514252662659, -0.005752913653850555, 42375.55859375], [-0.02403978817164898, -0.004835139494389296, 0.9996992945671082, -206.20591735839844], [0.0, 0.0, 0.0, 1.0]], [[-0.9994214177131653, 0.022952817380428314, -0.02510037086904049, -25851.6328125], [-0.022884877398610115, -0.9997336864471436, -0.0029879712965339422, 42375.99609375], [-0.025162268429994583, -0.002411822322756052, 0.9996804594993591, -206.20956420898438], [0.0, 0.0, 0.0, 1.0]], [[-0.9996747970581055, 0.007039799354970455, -0.024511944502592087, -25852.068359375], [-0.0069537959061563015, -0.9999694228172302, -0.003590695094317198, 42376.43359375], [-0.02453647181391716, -0.0034190749283879995, 0.9996930360794067, -206.21385192871094], [0.0, 0.0, 0.0, 1.0]], [[-0.9997265934944153, -0.009470791555941105, -0.021377943456172943, -25852.490234375], [0.009556586854159832, -0.9999467134475708, -0.003911794163286686, 42376.87109375], [-0.02133975550532341, -0.004115025047212839, 0.9997637867927551, -206.2164764404297], [0.0, 0.0, 0.0, 1.0]], [[-0.9994341731071472, -0.026436997577548027, -0.020798735320568085, -25852.904296875], [0.026583770290017128, -0.9996234178543091, -0.006809000391513109, 42377.31640625], [-0.02061089314520359, -0.0073580555617809296, 0.9997604489326477, -206.217529296875], [0.0, 0.0, 0.0, 1.0]], [[-0.998732328414917, -0.04491112381219864, -0.022730249911546707, -25853.31640625], [0.045103028416633606, -0.9989503622055054, -0.007997263222932816, 42377.76953125], [-0.0223472248762846, -0.009012328460812569, 0.9997096061706543, -206.21795654296875], [0.0, 0.0, 0.0, 1.0]], [[-0.9976324439048767, -0.06383708864450455, -0.025579066947102547, -25853.716796875], [0.06404266506433487, -0.9979204535484314, -0.0072981263510882854, 42378.23046875], [-0.025059983134269714, -0.008918997831642628, 0.9996461272239685, -206.21820068359375], [0.0, 0.0, 0.0, 1.0]], [[-0.9961711168289185, -0.08263710141181946, -0.02853446640074253, -25854.103515625], [0.0828782320022583, -0.9965323805809021, -0.007371129468083382, 42378.69921875], [-0.02782639116048813, -0.009707792662084103, 0.999565601348877, -206.2196807861328], [0.0, 0.0, 0.0, 1.0]], [[-0.994260847568512, -0.10256380587816238, -0.030431479215621948, -25854.48046875], [0.10281030833721161, -0.994678795337677, -0.006644593086093664, 42379.171875], [-0.02958805300295353, -0.009735126979649067, 0.9995147585868835, -206.22271728515625], [0.0, 0.0, 0.0, 1.0]], [[-0.9918596744537354, -0.12372199445962906, -0.03012046404182911, -25854.83984375], [0.12390875816345215, -0.992283821105957, -0.004407861270010471, 42379.65625], [-0.02934270165860653, -0.008104168809950352, 0.9995365142822266, -206.22598266601562], [0.0, 0.0, 0.0, 1.0]], [[-0.9890297055244446, -0.1456349790096283, -0.02471172623336315, -25855.173828125], [0.14576438069343567, -0.9893131256103516, -0.0035073186736553907, 42380.140625], [-0.02393684908747673, -0.007070931605994701, 0.999688446521759, -206.22866821289062], [0.0, 0.0, 0.0, 1.0]], [[-0.98540198802948, -0.1694624423980713, -0.01629178412258625, -25855.521484375], [0.16957233846187592, -0.9855018258094788, -0.005608227103948593, 42380.67578125], [-0.015105198137462139, -0.008288993500173092, 0.9998515248298645, -206.22930908203125], [0.0, 0.0, 0.0, 1.0]], [[-0.9813348650932312, -0.19184106588363647, -0.013372331857681274, -25855.830078125], [0.19197742640972137, -0.9813506603240967, -0.009776121005415916, 42381.16796875], [-0.011247485876083374, -0.012160833925008774, 0.999862790107727, -206.22523498535156], [0.0, 0.0, 0.0, 1.0]], [[-0.9765115976333618, -0.215042844414711, -0.013478707522153854, -25856.13671875], [0.21521112322807312, -0.9764863848686218, -0.012594133615493774, 42381.6640625], [-0.01045349519699812, -0.015199086628854275, 0.9998298287391663, -206.21849060058594], [0.0, 0.0, 0.0, 1.0]], [[-0.9714297652244568, -0.23698006570339203, -0.01283031702041626, -25856.42578125], [0.23711587488651276, -0.9714263081550598, -0.010343858040869236, 42382.16796875], [-0.01001241896301508, -0.013090603053569794, 0.9998641610145569, -206.21096801757812], [0.0, 0.0, 0.0, 1.0]], [[-0.9659126400947571, -0.25799426436424255, -0.021253321319818497, -25856.708984375], [0.2581222355365753, -0.9661060571670532, -0.0034666715655475855, 42382.6875], [-0.019638581201434135, -0.008834456093609333, 0.999768078327179, -206.20599365234375], [0.0, 0.0, 0.0, 1.0]], [[-0.9598409533500671, -0.27951136231422424, -0.024056266993284225, -25856.982421875], [0.279403418302536, -0.9601420760154724, 0.007807901594787836, 42383.2109375], [-0.025279831141233444, 0.000772941333707422, 0.9996800422668457, -206.2087860107422], [0.0, 0.0, 0.0, 1.0]], [[-0.9535185098648071, -0.3007458746433258, -0.018824996426701546, -25857.2265625], [0.300577312707901, -0.9536906480789185, 0.011288749054074287, 42383.734375], [-0.021348267793655396, 0.005105664953589439, 0.9997590184211731, -206.21546936035156], [0.0, 0.0, 0.0, 1.0]], [[-0.9467214345932007, -0.32159900665283203, -0.017108552157878876, -25857.458984375], [0.3215295672416687, -0.9468755722045898, 0.006740919779986143, 42384.24609375], [-0.018367541953921318, 0.0008808679995127022, 0.9998309016227722, -206.2198944091797], [0.0, 0.0, 0.0, 1.0]], [[-0.9393911957740784, -0.3425832688808441, -0.013446911238133907, -25857.685546875], [0.3425753712654114, -0.9394857287406921, 0.002960875630378723, 42384.76171875], [-0.013647526502609253, -0.0018251592991873622, 0.9999051690101624, -206.22085571289062], [0.0, 0.0, 0.0, 1.0]], [[-0.9317073822021484, -0.362854540348053, -0.016059646382927895, -25857.904296875], [0.3629363775253296, -0.9318108558654785, -0.0024053798988461494, 42385.28125], [-0.014091748744249344, -0.008069739677011967, 0.9998681545257568, -206.2193603515625], [0.0, 0.0, 0.0, 1.0]], [[-0.9234052300453186, -0.3833990693092346, -0.018111418932676315, -25858.1171875], [0.38356056809425354, -0.9234938025474548, -0.00635883305221796, 42385.80859375], [-0.014287812635302544, -0.012818606570363045, 0.9998157620429993, -206.21507263183594], [0.0, 0.0, 0.0, 1.0]], [[-0.9148102402687073, -0.40338727831840515, -0.020021425560116768, -25858.318359375], [0.4035276770591736, -0.9149611592292786, -0.0033736927434802055, 42386.35546875], [-0.016957921907305717, -0.011165487580001354, 0.9997938275337219, -206.21170043945312], [0.0, 0.0, 0.0, 1.0]], [[-0.9060202836990356, -0.4225598871707916, -0.023883085697889328, -25858.509765625], [0.4226509928703308, -0.9062915444374084, 0.0013460617046803236, 42386.9140625], [-0.022213829681277275, -0.008874650113284588, 0.9997137784957886, -206.2089385986328], [0.0, 0.0, 0.0, 1.0]], [[-0.8966209888458252, -0.44219478964805603, -0.023119088262319565, -25858.689453125], [0.44214457273483276, -0.8969122767448425, 0.007519913837313652, 42387.4921875], [-0.024061061441898346, -0.003479466075077653, 0.9997044205665588, -206.21124267578125], [0.0, 0.0, 0.0, 1.0]], [[-0.8868812322616577, -0.4614451229572296, -0.022587019950151443, -25858.857421875], [0.46134042739868164, -0.8871673941612244, 0.009956613183021545, 42388.0703125], [-0.024632899090647697, -0.001589971943758428, 0.9996952414512634, -206.21572875976562], [0.0, 0.0, 0.0, 1.0]], [[-0.8770514726638794, -0.47988563776016235, -0.02214706689119339, -25859.015625], [0.47979244589805603, -0.8773285150527954, 0.009694076143205166, 42388.65625], [-0.024082301184535027, -0.002123790793120861, 0.9997076988220215, -206.21958923339844], [0.0, 0.0, 0.0, 1.0]], [[-0.8677002787590027, -0.4966226816177368, -0.021496707573533058, -25859.16796875], [0.49652013182640076, -0.8679647445678711, 0.010250034742057323, 42389.25], [-0.02374878339469433, -0.0017795891035348177, 0.9997163414955139, -206.22352600097656], [0.0, 0.0, 0.0, 1.0]], [[-0.8594751954078674, -0.5108006000518799, -0.019624656066298485, -25859.310546875], [0.5106422305107117, -0.8596986532211304, 0.01275112759321928, 42389.85546875], [-0.02338457480072975, 0.0009380995761603117, 0.9997260570526123, -206.22874450683594], [0.0, 0.0, 0.0, 1.0]], [[-0.8523644804954529, -0.5226365923881531, -0.018051840364933014, -25859.44921875], [0.5223883390426636, -0.8525413274765015, 0.01684221625328064, 42390.4765625], [-0.02419229783117771, 0.004925635643303394, 0.9996951818466187, -206.2345733642578], [0.0, 0.0, 0.0, 1.0]], [[-0.8462854027748108, -0.53240567445755, -0.018579090014100075, -25859.58984375], [0.5321588516235352, -0.846478283405304, 0.016773568466305733, 42391.09765625], [-0.02465713955461979, 0.004308197647333145, 0.9996866583824158, -206.24252319335938], [0.0, 0.0, 0.0, 1.0]], [[-0.8412356972694397, -0.5402188897132874, -0.022044148296117783, -25859.734375], [0.5401141047477722, -0.8415203094482422, 0.010976654477417469, 42391.71875], [-0.024480393156409264, -0.0026723984628915787, 0.9996967315673828, -206.24851989746094], [0.0, 0.0, 0.0, 1.0]], [[-0.8360644578933716, -0.5481731295585632, -0.022414444014430046, -25859.873046875], [0.5482105612754822, -0.836325466632843, 0.004988155793398619, 42392.34765625], [-0.021480143070220947, -0.008117415010929108, 0.9997363090515137, -206.25082397460938], [0.0, 0.0, 0.0, 1.0]], [[-0.8303076028823853, -0.5567876100540161, -0.02401689998805523, -25860.005859375], [0.5568984150886536, -0.8305771350860596, 0.002416164381429553, 42392.98828125], [-0.02129317820072174, -0.011368812993168831, 0.9997085928916931, -206.25050354003906], [0.0, 0.0, 0.0, 1.0]], [[-0.8241606950759888, -0.5658276677131653, -0.024455668404698372, -25860.12890625], [0.5659740567207336, -0.824422299861908, 0.001117419684305787, 42393.6484375], [-0.020794065669178963, -0.012920341454446316, 0.9997002482414246, -206.2484588623047], [0.0, 0.0, 0.0, 1.0]], [[-0.8182222247123718, -0.5739914774894714, -0.03234397619962692, -25860.26171875], [0.5742135047912598, -0.8187006115913391, 0.0028738738037645817, 42394.3203125], [-0.028129613026976585, -0.016220878809690475, 0.9994726181030273, -206.2458953857422], [0.0, 0.0, 0.0, 1.0]], [[-0.8130703568458557, -0.5811595916748047, -0.034205906093120575, -25860.390625], [0.5812875628471375, -0.8136672377586365, 0.007099862676113844, 42395.0], [-0.03195837885141373, -0.014110778458416462, 0.9993895888328552, -206.2473602294922], [0.0, 0.0, 0.0, 1.0]], [[-0.8095414042472839, -0.586172342300415, -0.032318852841854095, -25860.5], [0.5862706303596497, -0.810081958770752, 0.007342831697314978, 42395.68359375], [-0.030485086143016815, -0.013003266416490078, 0.9994506239891052, -206.25067138671875], [0.0, 0.0, 0.0, 1.0]], [[-0.807090699672699, -0.5898159742355347, -0.026866266503930092, -25860.599609375], [0.5899317860603333, -0.8074420690536499, 0.0042353603057563305, 42396.375], [-0.024191034957766533, -0.012430942617356777, 0.9996300339698792, -206.2528533935547], [0.0, 0.0, 0.0, 1.0]], [[-0.8049877285957336, -0.5928829908370972, -0.022009622305631638, -25860.708984375], [0.5929983854293823, -0.8052021265029907, 0.0015524150803685188, 42397.07421875], [-0.018642595037817955, -0.01180199719965458, 0.9997565150260925, -206.2520294189453], [0.0, 0.0, 0.0, 1.0]], [[-0.8038212060928345, -0.594310998916626, -0.02580559439957142, -25860.83203125], [0.5945231318473816, -0.804078221321106, -0.0006894266698509455, 42397.78125], [-0.020339980721473694, -0.01589619740843773, 0.9996667504310608, -206.24813842773438], [0.0, 0.0, 0.0, 1.0]], [[-0.8037008047103882, -0.5943530201911926, -0.02844996564090252, -25860.95703125], [0.5945234894752502, -0.8040728569030762, 0.0029547170270234346, 42398.5], [-0.024631988257169724, -0.014539465308189392, 0.9995908141136169, -206.24537658691406], [0.0, 0.0, 0.0, 1.0]], [[-0.8034456968307495, -0.5946476459503174, -0.029483666643500328, -25861.078125], [0.5947129726409912, -0.8039035797119141, 0.007456861436367035, 42399.2265625], [-0.028136229142546654, -0.011543133296072483, 0.9995374083518982, -206.24630737304688], [0.0, 0.0, 0.0, 1.0]], [[-0.8031584620475769, -0.5950334072113037, -0.02952411398291588, -25861.201171875], [0.595040500164032, -0.8036395311355591, 0.009503384120762348, 42399.9609375], [-0.02938157692551613, -0.009935318492352962, 0.9995188117027283, -206.24928283691406], [0.0, 0.0, 0.0, 1.0]], [[-0.8034638166427612, -0.594763994216919, -0.02648932673037052, -25861.318359375], [0.5947945713996887, -0.8038421869277954, 0.0075666275806725025, 42400.69921875], [-0.025793593376874924, -0.009676195681095123, 0.9996204376220703, -206.2533416748047], [0.0, 0.0, 0.0, 1.0]], [[-0.8038791418075562, -0.5942321419715881, -0.0258175078779459, -25861.443359375], [0.5942851901054382, -0.8042289614677429, 0.006398347206413746, 42401.4453125], [-0.024565288797020912, -0.010199463926255703, 0.9996461868286133, -206.25440979003906], [0.0, 0.0, 0.0, 1.0]], [[-0.8040860295295715, -0.5939143896102905, -0.026670444756746292, -25861.5703125], [0.5939198136329651, -0.8044785857200623, 0.00857857707887888, 42402.203125], [-0.026550741866230965, -0.008942189626395702, 0.9996074438095093, -206.2569122314453], [0.0, 0.0, 0.0, 1.0]], [[-0.8041292428970337, -0.5937278866767883, -0.029382122680544853, -25861.705078125], [0.5936395525932312, -0.8046325445175171, 0.01259038969874382, 42402.96875], [-0.03111707791686058, -0.007318087387830019, 0.9994889497756958, -206.2608184814453], [0.0, 0.0, 0.0, 1.0]], [[-0.8042693138122559, -0.5934830904006958, -0.0304733794182539, -25861.83984375], [0.593342661857605, -0.8048204183578491, 0.014440227299928665, 42403.7421875], [-0.03309563174843788, -0.006467324681580067, 0.9994312524795532, -206.2689666748047], [0.0, 0.0, 0.0, 1.0]], [[-0.8045368790626526, -0.5930848717689514, -0.03115631453692913, -25861.970703125], [0.5930076837539673, -0.8050972819328308, 0.012660500593483448, 42404.515625], [-0.032592613250017166, -0.008290092460811138, 0.9994343519210815, -206.27664184570312], [0.0, 0.0, 0.0, 1.0]], [[-0.8047133684158325, -0.5928512215614319, -0.031045153737068176, -25862.103515625], [0.5929033756256104, -0.8052288293838501, 0.008492741733789444, 42405.29296875], [-0.030033383518457413, -0.011572551913559437, 0.9994819164276123, -206.28250122070312], [0.0, 0.0, 0.0, 1.0]], [[-0.8048863410949707, -0.592619001865387, -0.030994299799203873, -25862.23828125], [0.5927959680557251, -0.8053421378135681, 0.004120071418583393, 42406.06640625], [-0.027402648702263832, -0.015057106502354145, 0.9995110630989075, -206.28411865234375], [0.0, 0.0, 0.0, 1.0]], [[-0.8047325611114502, -0.5927503108978271, -0.03244208171963692, -25862.373046875], [0.5929675698280334, -0.8052187561988831, 0.0034941360354423523, 42406.85546875], [-0.0281941220164299, -0.01642525941133499, 0.9994674921035767, -206.2837677001953], [0.0, 0.0, 0.0, 1.0]], [[-0.8043051362037659, -0.5932735204696655, -0.033464137464761734, -25862.509765625], [0.5934812426567078, -0.8048357367515564, 0.004413273185491562, 42407.63671875], [-0.02955140918493271, -0.016310719773173332, 0.9994301199913025, -206.2844696044922], [0.0, 0.0, 0.0, 1.0]], [[-0.8034415245056152, -0.5944250226020813, -0.03377214074134827, -25862.642578125], [0.5946153998374939, -0.8039935827255249, 0.005186287220567465, 42408.42578125], [-0.030235443264245987, -0.0159145575016737, 0.9994160532951355, -206.28564453125], [0.0, 0.0, 0.0, 1.0]], [[-0.8028480410575867, -0.595292329788208, -0.032587680965662, -25862.76953125], [0.5955003499984741, -0.8033453226089478, 0.00395938940346241, 42409.203125], [-0.028536155819892883, -0.01622718572616577, 0.9994609951972961, -206.28634643554688], [0.0, 0.0, 0.0, 1.0]], [[-0.8026037812232971, -0.5957015156745911, -0.03109407052397728, -25862.89453125], [0.5959389805793762, -0.803027331829071, 0.0019863860215991735, 42409.98046875], [-0.026152679696679115, -0.016935886815190315, 0.9995144605636597, -206.28590393066406], [0.0, 0.0, 0.0, 1.0]], [[-0.8026668429374695, -0.5959477424621582, -0.023923132568597794, -25863.01171875], [0.5960646867752075, -0.8029320240020752, 0.0026819901540875435, 42410.76171875], [-0.020806971937417984, -0.012106988579034805, 0.9997101426124573, -206.2852325439453], [0.0, 0.0, 0.0, 1.0]], [[-0.8026705384254456, -0.5960509181022644, -0.02105267532169819, -25863.12890625], [0.5961194634437561, -0.8028879761695862, 0.0035429452545940876, 42411.53515625], [-0.019014714285731316, -0.00970609113574028, 0.9997720718383789, -206.283447265625], [0.0, 0.0, 0.0, 1.0]], [[-0.8023738265037537, -0.5965299010276794, -0.018664373084902763, -25863.251953125], [0.5965406894683838, -0.8025633692741394, 0.005595676135271788, 42412.3125], [-0.01831732876598835, -0.006644233129918575, 0.9998100996017456, -206.28257751464844], [0.0, 0.0, 0.0, 1.0]], [[-0.8019875884056091, -0.597073495388031, -0.017863644286990166, -25863.37890625], [0.5970374345779419, -0.8021747469902039, 0.007874293252825737, 42413.0859375], [-0.01903129555284977, -0.0043501779437065125, 0.9998094439506531, -206.28359985351562], [0.0, 0.0, 0.0, 1.0]], [[-0.8018444776535034, -0.5973130464553833, -0.01620178483426571, -25863.498046875], [0.5972498655319214, -0.8020044565200806, 0.009022803045809269, 42413.85546875], [-0.018383340910077095, -0.0024416279047727585, 0.999828040599823, -206.2856903076172], [0.0, 0.0, 0.0, 1.0]], [[-0.8014696836471558, -0.5978579521179199, -0.01456878986209631, -25863.6171875], [0.5977785587310791, -0.8016016483306885, 0.009782150387763977, 42414.625], [-0.017526701092720032, -0.0008688123780302703, 0.9998459815979004, -206.28822326660156], [0.0, 0.0, 0.0, 1.0]], [[-0.800815999507904, -0.5987861156463623, -0.01220282819122076, -25863.736328125], [0.5986893177032471, -0.8009077906608582, 0.010856322012841702, 42415.38671875], [-0.01627395488321781, 0.0013882140628993511, 0.9998665452003479, -206.2909698486328], [0.0, 0.0, 0.0, 1.0]], [[-0.8003590703010559, -0.5994023084640503, -0.011924698948860168, -25863.85546875], [0.5992625951766968, -0.8004398345947266, 0.013432683423161507, 42416.15625], [-0.017596585676074028, 0.003604944795370102, 0.999838650226593, -206.29466247558594], [0.0, 0.0, 0.0, 1.0]], [[-0.8001371622085571, -0.5997116565704346, -0.011244145222008228, -25863.9765625], [0.5995419025421143, -0.8001973032951355, 0.015292035415768623, 42416.91796875], [-0.01816834695637226, 0.005494390614330769, 0.999819815158844, -206.3001708984375], [0.0, 0.0, 0.0, 1.0]], [[-0.7998093962669373, -0.6001724600791931, -0.0098947798833251, -25864.09375], [0.6000086069107056, -0.7998450994491577, 0.015414198860526085, 42417.671875], [-0.017165469005703926, 0.006391468923538923, 0.9998322129249573, -206.30691528320312], [0.0, 0.0, 0.0, 1.0]], [[-0.7994646430015564, -0.6006323099136353, -0.009855980984866619, -25864.212890625], [0.6004717946052551, -0.7995008230209351, 0.01523052342236042, 42418.4296875], [-0.017027810215950012, 0.006258025765419006, 0.9998353719711304, -206.3126220703125], [0.0, 0.0, 0.0, 1.0]], [[-0.79917311668396, -0.6010134816169739, -0.010248198173940182, -25864.33203125], [0.6008903384208679, -0.7992283701896667, 0.01284324936568737, 42419.1796875], [-0.015909617766737938, 0.004105938132852316, 0.9998649954795837, -206.31822204589844], [0.0, 0.0, 0.0, 1.0]], [[-0.7989959716796875, -0.6012422442436218, -0.010639351792633533, -25864.451171875], [0.6011458039283752, -0.7990631461143494, 0.011037357151508331, 42419.92578125], [-0.01513763889670372, 0.002423003315925598, 0.9998824596405029, -206.32168579101562], [0.0, 0.0, 0.0, 1.0]], [[-0.7988084554672241, -0.6014720797538757, -0.011676262132823467, -25864.568359375], [0.6013457179069519, -0.7988876700401306, 0.012720784172415733, 42420.67578125], [-0.016979217529296875, 0.0031400001607835293, 0.999850869178772, -206.3245086669922], [0.0, 0.0, 0.0, 1.0]], [[-0.7985709309577942, -0.6017799377441406, -0.012056921608746052, -25864.685546875], [0.6016080975532532, -0.7986471056938171, 0.015187560580670834, 42421.41796875], [-0.0187687948346138, 0.004874804988503456, 0.9998119473457336, -206.3292694091797], [0.0, 0.0, 0.0, 1.0]], [[-0.7982232570648193, -0.6022401452064514, -0.012101199477910995, -25864.802734375], [0.6020495295524597, -0.798295259475708, 0.016156373545527458, 42422.16015625], [-0.019390346482396126, 0.005610872060060501, 0.9997962117195129, -206.33670043945312], [0.0, 0.0, 0.0, 1.0]], [[-0.7979703545570374, -0.6025407314300537, -0.013709473423659801, -25864.91796875], [0.6023838520050049, -0.7980825901031494, 0.014066490344703197, 42422.88671875], [-0.01941692642867565, 0.0029662770684808493, 0.9998070597648621, -206.34213256835938], [0.0, 0.0, 0.0, 1.0]], [[-0.7976589202880859, -0.6029167175292969, -0.015223433263599873, -25865.033203125], [0.6027975082397461, -0.797804057598114, 0.011994018219411373, 42423.60546875], [-0.019376711919903755, 0.000390488188713789, 0.9998121857643127, -206.34657287597656], [0.0, 0.0, 0.0, 1.0]], [[-0.7975400686264038, -0.603072464466095, -0.015275721438229084, -25865.146484375], [0.6028819680213928, -0.7976802587509155, 0.015480411238968372, 42424.328125], [-0.0215209499001503, 0.003136791754513979, 0.9997634887695312, -206.3517303466797], [0.0, 0.0, 0.0, 1.0]], [[-0.7976417541503906, -0.6029281616210938, -0.015658603981137276, -25865.255859375], [0.6025733947753906, -0.7977516651153564, 0.022305021062493324, 42425.046875], [-0.025940006598830223, 0.008355958387255669, 0.9996285438537598, -206.36000061035156], [0.0, 0.0, 0.0, 1.0]], [[-0.7973857522010803, -0.6032366752624512, -0.016775228083133698, -25865.365234375], [0.6027748584747314, -0.7974933981895447, 0.02582181617617607, 42425.75390625], [-0.028954800218343735, 0.010478264652192593, 0.9995257258415222, -206.37261962890625], [0.0, 0.0, 0.0, 1.0]], [[-0.7965755462646484, -0.6043067574501038, -0.016755076125264168, -25865.474609375], [0.6038419604301453, -0.7966819405555725, 0.025939032435417175, 42426.44140625], [-0.02902360074222088, 0.010544981807470322, 0.9995230436325073, -206.38677978515625], [0.0, 0.0, 0.0, 1.0]], [[-0.795782744884491, -0.6052758097648621, -0.019259385764598846, -25865.580078125], [0.6049156785011292, -0.7959965467453003, 0.021599138155579567, 42427.11328125], [-0.028403840959072113, 0.005537918768823147, 0.9995811581611633, -206.39810180664062], [0.0, 0.0, 0.0, 1.0]], [[-0.7948502898216248, -0.6065758466720581, -0.01669476553797722, -25865.677734375], [0.6064324975013733, -0.7950255274772644, 0.013191276229918003, 42427.76953125], [-0.021274274215102196, 0.00036084221210330725, 0.9997735619544983, -206.40573120117188], [0.0, 0.0, 0.0, 1.0]], [[-0.7943856120109558, -0.6071825623512268, -0.01675780676305294, -25865.7734375], [0.6071444749832153, -0.7945526838302612, 0.007862379774451256, 42428.421875], [-0.01808885857462883, -0.003928645979613066, 0.9998286366462708, -206.40890502929688], [0.0, 0.0, 0.0, 1.0]], [[-0.7943391799926758, -0.6072620153427124, -0.016065068542957306, -25865.8671875], [0.6072227954864502, -0.794493556022644, 0.007773612160235643, 42429.0703125], [-0.017484212294220924, -0.0035801902413368225, 0.9998407363891602, -206.40956115722656], [0.0, 0.0, 0.0, 1.0]], [[-0.7941069602966309, -0.607567310333252, -0.016003860160708427, -25865.966796875], [0.6074999570846558, -0.7942649126052856, 0.009339150041341782, 42429.71484375], [-0.018385466188192368, -0.002306059468537569, 0.9998282790184021, -206.4108123779297], [0.0, 0.0, 0.0, 1.0]], [[-0.7938030958175659, -0.6079460978507996, -0.01667841523885727, -25866.0625], [0.6078287959098816, -0.7939780354499817, 0.011959318071603775, 42430.35546875], [-0.020512916147708893, -0.0006442765006795526, 0.9997893571853638, -206.4128875732422], [0.0, 0.0, 0.0, 1.0]], [[-0.7940943241119385, -0.6075658202171326, -0.016674160957336426, -25866.158203125], [0.6074365377426147, -0.7942693829536438, 0.012534297071397305, 42430.984375], [-0.020859185606241226, -0.0001750797382555902, 0.999782383441925, -206.416748046875], [0.0, 0.0, 0.0, 1.0]], [[-0.7945948243141174, -0.6068724393844604, -0.018021611496806145, -25866.251953125], [0.60676109790802, -0.7947976589202881, 0.011737729422748089, 42431.60546875], [-0.02144683711230755, -0.0016080713830888271, 0.9997686743736267, -206.42002868652344], [0.0, 0.0, 0.0, 1.0]], [[-0.7949062585830688, -0.6064982414245605, -0.01685202307999134, -25866.345703125], [0.6063737869262695, -0.7950847148895264, 0.012293786741793156, 42432.2109375], [-0.020854944363236427, -0.0004462165234144777, 0.999782383441925, -206.42347717285156], [0.0, 0.0, 0.0, 1.0]], [[-0.7962852120399475, -0.6046634316444397, -0.017659423872828484, -25866.4375], [0.6045258045196533, -0.7964809536933899, 0.012911544181406498, 42432.80859375], [-0.02187253348529339, -0.00039430538890883327, 0.999760627746582, -206.4276885986328], [0.0, 0.0, 0.0, 1.0]], [[-0.799447774887085, -0.6006292104721069, -0.011300847865641117, -25866.52734375], [0.6005144715309143, -0.7995225191116333, 0.01209058053791523, 42433.39453125], [-0.0162972379475832, 0.0028794670943170786, 0.9998630285263062, -206.4314727783203], [0.0, 0.0, 0.0, 1.0]], [[-0.8051766157150269, -0.5930073261260986, -0.0057252151891589165, -25866.6171875], [0.5929228663444519, -0.8051746487617493, 0.01167401485145092, 42433.96875], [-0.011532575823366642, 0.0060050333850085735, 0.9999154210090637, -206.43508911132812], [0.0, 0.0, 0.0, 1.0]], [[-0.8133738040924072, -0.5817385315895081, -0.0018309488659724593, -25866.724609375], [0.5816858410835266, -0.8133355975151062, 0.01126012671738863, 42434.53125], [-0.0080396244302392, 0.008093655109405518, 0.9999348521232605, -206.43719482421875], [0.0, 0.0, 0.0, 1.0]], [[-0.8237473368644714, -0.5669527053833008, 0.002234992804005742, -25866.84765625], [0.5669425129890442, -0.8236914873123169, 0.01042015291750431, 42435.078125], [-0.004066789522767067, 0.00985068641602993, 0.999943196773529, -206.4384765625], [0.0, 0.0, 0.0, 1.0]], [[-0.8357818126678467, -0.5490601658821106, 0.0013058418408036232, -25866.98828125], [0.5490534901618958, -0.8357539176940918, 0.007460001856088638, 42435.60546875], [-0.003004626603797078, 0.006951911374926567, 0.9999712705612183, -206.4374237060547], [0.0, 0.0, 0.0, 1.0]], [[-0.8489617109298706, -0.5284363627433777, 0.004353304393589497, -25867.142578125], [0.528453528881073, -0.8489461541175842, 0.005231703165918589, 42436.125], [0.0009310990571975708, 0.0067420341074466705, 0.9999768137931824, -206.4358367919922], [0.0, 0.0, 0.0, 1.0]], [[-0.8634685277938843, -0.5043649673461914, 0.006163499318063259, -25867.310546875], [0.5043867826461792, -0.8634738326072693, 0.00261412444524467, 42436.6328125], [0.00400354852899909, 0.0053660026751458645, 0.9999775290489197, -206.4320831298828], [0.0, 0.0, 0.0, 1.0]], [[-0.8784428238868713, -0.47779378294944763, 0.007161923684179783, -25867.49609375], [0.47782671451568604, -0.8784457445144653, 0.0038416164461523294, 42437.1328125], [0.004455863498151302, 0.006796799600124359, 0.9999669194221497, -206.427978515625], [0.0, 0.0, 0.0, 1.0]], [[-0.8939700126647949, -0.4479832053184509, 0.011341637000441551, -25867.69921875], [0.4480769634246826, -0.8939616084098816, 0.0077205440029501915, 42437.625], [0.006680314429104328, 0.011983862146735191, 0.9999058842658997, -206.42369079589844], [0.0, 0.0, 0.0, 1.0]], [[-0.909781277179718, -0.41495659947395325, 0.010445913299918175, -25867.921875], [0.415056049823761, -0.9097356200218201, 0.01046730112284422, 42438.10546875], [0.005159544758498669, 0.013858593069016933, 0.9998906254768372, -206.42123413085938], [0.0, 0.0, 0.0, 1.0]], [[-0.9248123168945312, -0.380327045917511, 0.00856747105717659, -25868.16796875], [0.38040655851364136, -0.9247495532035828, 0.011361793614923954, 42438.56640625], [0.003601567354053259, 0.013766649179160595, 0.9998987317085266, -206.41885375976562], [0.0, 0.0, 0.0, 1.0]], [[-0.9388878345489502, -0.3441850543022156, 0.005125285126268864, -25868.435546875], [0.344222754240036, -0.938809335231781, 0.01215677335858345, 42439.01171875], [0.0006274865008890629, 0.013178085908293724, 0.9999129772186279, -206.41656494140625], [0.0, 0.0, 0.0, 1.0]], [[-0.9519436359405518, -0.3062704801559448, -0.0012826855527237058, -25868.724609375], [0.30623286962509155, -0.9518779516220093, 0.012241404503583908, 42439.44140625], [-0.004970142152160406, 0.011260327883064747, 0.9999242424964905, -206.4158935546875], [0.0, 0.0, 0.0, 1.0]], [[-0.9635809063911438, -0.26741576194763184, -0.0007683099247515202, -25869.021484375], [0.26739078760147095, -0.9635229706764221, 0.01121546421200037, 42439.8515625], [-0.0037394752725958824, 0.010601568967103958, 0.9999367594718933, -206.41586303710938], [0.0, 0.0, 0.0, 1.0]], [[-0.9739406704902649, -0.2267569899559021, 0.004568332340568304, -25869.3203125], [0.22679592669010162, -0.9738717675209045, 0.011719761416316032, 42440.25], [0.0017914328491315246, 0.01245043147355318, 0.9999208450317383, -206.41554260253906], [0.0, 0.0, 0.0, 1.0]], [[-0.9829973578453064, -0.18344524502754211, 0.007996334694325924, -25869.63671875], [0.1835394650697708, -0.982922375202179, 0.013299047015607357, 42440.6328125], [0.005420129280537367, 0.01454057078808546, 0.9998795986175537, -206.4146270751953], [0.0, 0.0, 0.0, 1.0]], [[-0.9895671606063843, -0.14393538236618042, 0.0062869940884411335, -25869.947265625], [0.14401590824127197, -0.989459753036499, 0.015127502381801605, 42440.9609375], [0.004043343476951122, 0.015875106677412987, 0.9998657703399658, -206.4124755859375], [0.0, 0.0, 0.0, 1.0]], [[-0.9950008988380432, -0.0998542383313179, 0.0015279481885954738, -25870.310546875], [0.09986590594053268, -0.9948688745498657, 0.016213761642575264, 42441.30078125], [-9.89048057817854e-05, 0.01628529652953148, 0.9998673796653748, -206.41148376464844], [0.0, 0.0, 0.0, 1.0]], [[-0.9985077977180481, -0.05415761098265648, -0.007010878995060921, -25870.693359375], [0.05406364053487778, -0.9984532594680786, 0.012971934862434864, 42441.62109375], [-0.007702564354985952, 0.01257354486733675, 0.9998912811279297, -206.41127014160156], [0.0, 0.0, 0.0, 1.0]], [[-0.9998932480812073, -0.006474431604146957, -0.013100770302116871, -25871.0859375], [0.006326799746602774, -0.9999163150787354, 0.011283891275525093, 42441.91796875], [-0.01317273173481226, 0.011199803091585636, 0.9998504519462585, -206.41311645507812], [0.0, 0.0, 0.0, 1.0]], [[-0.9990044236183167, 0.04258395731449127, -0.013295157812535763, -25871.48046875], [-0.04275532066822052, -0.9990024566650391, 0.012888303026556969, 42442.1953125], [-0.012733060866594315, 0.013443912379443645, 0.9998285174369812, -206.41685485839844], [0.0, 0.0, 0.0, 1.0]], [[-0.9957091808319092, 0.09180669486522675, -0.011604703031480312, -25871.880859375], [-0.09201198816299438, -0.9955841302871704, 0.018602294847369194, 42442.46875], [-0.009845643304288387, 0.01959024742245674, 0.9997595548629761, -206.42274475097656], [0.0, 0.0, 0.0, 1.0]], [[-0.9900857210159302, 0.14020438492298126, -0.008540586568415165, -25872.2890625], [-0.1403978168964386, -0.9896562099456787, 0.029478730633854866, 42442.72265625], [-0.004319198429584503, 0.03038555383682251, 0.9995288848876953, -206.4285888671875], [0.0, 0.0, 0.0, 1.0]], [[-0.9818815588951111, 0.18931834399700165, -0.008185825310647488, -25872.708984375], [-0.18948934972286224, -0.9812734127044678, 0.034587472677230835, 42442.94921875], [-0.001484491629526019, 0.035511936992406845, 0.9993681311607361, -206.43576049804688], [0.0, 0.0, 0.0, 1.0]], [[-0.9711422920227051, 0.23832164704799652, -0.009245647117495537, -25873.13671875], [-0.23850084841251373, -0.9704087376594543, 0.03773918375372887, 42443.140625], [2.2007683583069593e-05, 0.03885521739721298, 0.9992448091506958, -206.4416046142578], [0.0, 0.0, 0.0, 1.0]], [[-0.9577749967575073, 0.2871057689189911, -0.015402796678245068, -25873.578125], [-0.28745606541633606, -0.9573090672492981, 0.030470779165625572, 42443.2890625], [-0.005996901076287031, 0.033611781895160675, 0.9994169473648071, -206.44683837890625], [0.0, 0.0, 0.0, 1.0]], [[-0.9424341917037964, 0.3339983820915222, -0.01621510647237301, -25874.015625], [-0.3343210220336914, -0.942123532295227, 0.02515253610908985, 42443.40625], [-0.006875727791339159, 0.02912566065788269, 0.9995520710945129, -206.45216369628906], [0.0, 0.0, 0.0, 1.0]], [[-0.9246695637702942, 0.38035547733306885, -0.017773155122995377, -25874.44921875], [-0.3806402385234833, -0.9245665669441223, 0.017020106315612793, 42443.50390625], [-0.009958772920072079, 0.02250315435230732, 0.9996971487998962, -206.45584106445312], [0.0, 0.0, 0.0, 1.0]], [[-0.9048221707344055, 0.42546898126602173, -0.016522206366062164, -25874.873046875], [-0.42576441168785095, -0.9045096039772034, 0.02422896772623062, 42443.60546875], [-0.00463581969961524, 0.02895747683942318, 0.9995698928833008, -206.4624786376953], [0.0, 0.0, 0.0, 1.0]], [[-0.8858323097229004, 0.46365106105804443, -0.01813417114317417, -25875.2578125], [-0.46399885416030884, -0.8853520154953003, 0.0292721688747406, 42443.6796875], [-0.0024830547627061605, 0.03434446454048157, 0.9994069337844849, -206.46766662597656], [0.0, 0.0, 0.0, 1.0]], [[-0.8642241358757019, 0.5027421712875366, -0.01915832981467247, -25875.6796875], [-0.5031067728996277, -0.863559365272522, 0.033893607556819916, 42443.73828125], [0.0004953904426656663, 0.03893036022782326, 0.999241828918457, -206.47315979003906], [0.0, 0.0, 0.0, 1.0]], [[-0.8415305614471436, 0.5399209260940552, -0.0176607184112072, -25876.087890625], [-0.5401941537857056, -0.8408092856407166, 0.03507168963551521, 42443.7734375], [0.004086642991751432, 0.03905411809682846, 0.9992287158966064, -206.4783172607422], [0.0, 0.0, 0.0, 1.0]], [[-0.8172342777252197, 0.5760942697525024, -0.01560660358518362, -25876.48828125], [-0.5762913823127747, -0.8167240619659424, 0.029154645279049873, 42443.7734375], [0.004049533978104591, 0.03282012790441513, 0.999453067779541, -206.48081970214844], [0.0, 0.0, 0.0, 1.0]], [[-0.7920323610305786, 0.6102986335754395, -0.014840645715594292, -25876.87890625], [-0.6104790568351746, -0.7917884588241577, 0.019656360149383545, 42443.74609375], [0.000245597620960325, 0.024628378450870514, 0.9996966123580933, -206.48057556152344], [0.0, 0.0, 0.0, 1.0]], [[-0.7675952315330505, 0.6407919526100159, -0.013540561310946941, -25877.26171875], [-0.6409338712692261, -0.7674611210823059, 0.014392090030014515, 42443.7109375], [-0.001169519149698317, 0.019725903868675232, 0.9998047351837158, -206.4811553955078], [0.0, 0.0, 0.0, 1.0]], [[-0.7448252439498901, 0.6672097444534302, -0.008157452568411827, -25877.6328125], [-0.667253315448761, -0.7448156476020813, 0.004758721683174372, 42443.6640625], [-0.0029007333796471357, 0.008987504057586193, 0.9999553561210632, -206.48031616210938], [0.0, 0.0, 0.0, 1.0]], [[-0.7212890386581421, 0.6925878524780273, -0.008015234023332596, -25877.998046875], [-0.6926340460777283, -0.7212498188018799, 0.007539995480328798, 42443.62890625], [-0.0005588752101175487, 0.010990139096975327, 0.9999393820762634, -206.4796905517578], [0.0, 0.0, 0.0, 1.0]], [[-0.6994950175285339, 0.7145536541938782, -0.010953356511890888, -25878.361328125], [-0.7146352529525757, -0.6994495391845703, 0.0081760473549366, 42443.58984375], [-0.001819096622057259, 0.013546758331358433, 0.9999065399169922, -206.47964477539062], [0.0, 0.0, 0.0, 1.0]], [[-0.6826627850532532, 0.7306101322174072, -0.013432342559099197, -25878.712890625], [-0.7307332158088684, -0.682565450668335, 0.011548320762813091, 42443.54296875], [-0.0007311335066333413, 0.017699066549539566, 0.9998430609703064, -206.48060607910156], [0.0, 0.0, 0.0, 1.0]], [[-0.6673106551170349, 0.7446855306625366, -0.011827187612652779, -25879.0546875], [-0.744779109954834, -0.667241096496582, 0.009666427969932556, 42443.48828125], [-0.0006931355455890298, 0.01525915414094925, 0.9998832941055298, -206.48110961914062], [0.0, 0.0, 0.0, 1.0]], [[-0.6521838903427124, 0.7580154538154602, -0.00829676166176796, -25879.3828125], [-0.7580581307411194, -0.6521716117858887, 0.0044698468409478664, 42443.4296875], [-0.002022700384259224, 0.009204589761793613, 0.9999555945396423, -206.47955322265625], [0.0, 0.0, 0.0, 1.0]], [[-0.63990318775177, 0.7684372067451477, -0.005321049597114325, -25879.69921875], [-0.7684512734413147, -0.6399073004722595, 0.001094255829229951, 42443.36328125], [-0.0025641105603426695, 0.004789185710251331, 0.9999852180480957, -206.4788055419922], [0.0, 0.0, 0.0, 1.0]], [[-0.630811870098114, 0.7759320735931396, -0.0024187806993722916, -25880.009765625], [-0.7759329080581665, -0.6308149099349976, -0.0007458329200744629, 42443.30859375], [-0.00210451683960855, 0.001406331779435277, 0.9999967217445374, -206.47698974609375], [0.0, 0.0, 0.0, 1.0]], [[-0.6232641339302063, 0.782008707523346, -0.0020707121584564447, -25880.31640625], [-0.7820107936859131, -0.6232648491859436, 0.0003379464033059776, 42443.2578125], [-0.0010263248113915324, 0.001829949556849897, 0.9999977946281433, -206.4741973876953], [0.0, 0.0, 0.0, 1.0]], [[-0.6158776879310608, 0.7878344058990479, -0.0034075318835675716, -25880.61328125], [-0.7878416776657104, -0.6158736348152161, 0.0022631753236055374, 42443.20703125], [-0.0003156012389808893, 0.004078434314578772, 0.9999915957450867, -206.47259521484375], [0.0, 0.0, 0.0, 1.0]], [[-0.6095835566520691, 0.7927122712135315, -0.0038906617555767298, -25880.896484375], [-0.7927218079566956, -0.609574019908905, 0.003427447285503149, 42443.16015625], [0.0003453338285908103, 0.005173528101295233, 0.9999865293502808, -206.470947265625], [0.0, 0.0, 0.0, 1.0]], [[-0.605479896068573, 0.795857310295105, -0.002307977993041277, -25881.169921875], [-0.7958606481552124, -0.6054771542549133, 0.0018077623099088669, 42443.10546875], [4.129315493628383e-05, 0.0029313918203115463, 0.9999956488609314, -206.46908569335938], [0.0, 0.0, 0.0, 1.0]], [[-0.6029039621353149, 0.7978135347366333, -0.0006479721050709486, -25881.427734375], [-0.797812283039093, -0.6029042601585388, -0.0014339747140184045, 42443.05078125], [-0.0015347091248258948, -0.00034758870606310666, 0.9999987483024597, -206.4666290283203], [0.0, 0.0, 0.0, 1.0]], [[-0.6008584499359131, 0.7993549108505249, -0.0009140354814007878, -25881.673828125], [-0.7993413209915161, -0.600854218006134, -0.0052633569575846195, 42443.0], [-0.004756492096930742, -0.002431905595585704, 0.9999857544898987, -206.46421813964844], [0.0, 0.0, 0.0, 1.0]], [[-0.5998475551605225, 0.8001077771186829, -0.0032250143121927977, -25881.916015625], [-0.8000901937484741, -0.599855899810791, -0.005341836251318455, 42442.96484375], [-0.00620858883485198, -0.0006239853100851178, 0.9999805092811584, -206.4626007080078], [0.0, 0.0, 0.0, 1.0]], [[-0.600121021270752, 0.7998787760734558, -0.006978036370128393, -25882.150390625], [-0.799892246723175, -0.6001423001289368, -0.0012763612903654575, 42442.93359375], [-0.005208748858422041, 0.004815705120563507, 0.99997478723526, -206.46278381347656], [0.0, 0.0, 0.0, 1.0]], [[-0.6005731225013733, 0.7995299100875854, -0.00799594260752201, -25882.369140625], [-0.799561619758606, -0.6005822420120239, 0.0014786857645958662, 42442.91015625], [-0.0036199663300067186, 0.007281308528035879, 0.9999669194221497, -206.46243286132812], [0.0, 0.0, 0.0, 1.0]], [[-0.6002744436264038, 0.799774169921875, -0.005648926366120577, -25882.568359375], [-0.7997907996177673, -0.6002771854400635, 0.0013622501865029335, 42442.87890625], [-0.0023014280013740063, 0.005335683934390545, 0.9999830722808838, -206.4620361328125], [0.0, 0.0, 0.0, 1.0]], [[-0.59907066822052, 0.800692081451416, -0.002553407335653901, -25882.755859375], [-0.8006892800331116, -0.5990756750106812, -0.0022285140585154295, 42442.8359375], [-0.0033140380401164293, 0.0007094492320902646, 0.9999942183494568, -206.46066284179688], [0.0, 0.0, 0.0, 1.0]], [[-0.5989534258842468, 0.8007831573486328, -0.0010751963127404451, -25882.935546875], [-0.8007540702819824, -0.598942220211029, -0.007817733101546764, 42442.79296875], [-0.006904288195073605, -0.0038214896339923143, 0.9999688267707825, -206.45928955078125], [0.0, 0.0, 0.0, 1.0]], [[-0.5999705791473389, 0.8000216484069824, -0.0008274700958281755, -25883.109375], [-0.799965500831604, -0.5999401211738586, -0.011269955895841122, 42442.76171875], [-0.009512640535831451, -0.006099694408476353, 0.9999361634254456, -206.4580535888672], [0.0, 0.0, 0.0, 1.0]], [[-0.6001214385032654, 0.7999082207679749, -0.0011136796092614532, -25883.275390625], [-0.7998498678207397, -0.600093424320221, -0.011314884759485722, 42442.734375], [-0.0097191808745265, -0.005899528507143259, 0.9999353289604187, -206.45648193359375], [0.0, 0.0, 0.0, 1.0]], [[-0.5991130471229553, 0.8006628751754761, -0.0015955487033352256, -25883.4296875], [-0.8006139397621155, -0.5990964770317078, -0.010036446154117584, 42442.71484375], [-0.008991696871817112, -0.0047355471178889275, 0.9999483823776245, -206.4557647705078], [0.0, 0.0, 0.0, 1.0]], [[-0.5987902283668518, 0.8009021878242493, -0.00246324366889894, -25883.578125], [-0.8008652329444885, -0.5987879633903503, -0.008235021494328976, 42442.69140625], [-0.008070405572652817, -0.002958324272185564, 0.9999629855155945, -206.4554443359375], [0.0, 0.0, 0.0, 1.0]], [[-0.5994715690612793, 0.8003882765769958, -0.003541950834915042, -25883.716796875], [-0.8003488779067993, -0.5994783043861389, -0.008209680207073689, 42442.66796875], [-0.008694253861904144, -0.002086672931909561, 0.9999600052833557, -206.45498657226562], [0.0, 0.0, 0.0, 1.0]], [[-0.5996160507202148, 0.8002744913101196, -0.004619303625077009, -25883.8515625], [-0.8002279996871948, -0.5996317863464355, -0.008768708445131779, 42442.6484375], [-0.00978725403547287, -0.0015613622963428497, 0.9999508857727051, -206.4537353515625], [0.0, 0.0, 0.0, 1.0]], [[-0.5989171266555786, 0.8007925152778625, -0.00544852577149868, -25883.978515625], [-0.800742506980896, -0.5989409685134888, -0.009003845043480396, 42442.625], [-0.010473556816577911, -0.0010296894470229745, 0.9999446272850037, -206.4540252685547], [0.0, 0.0, 0.0, 1.0]], [[-0.5986092686653137, 0.8010215759277344, -0.005607432220131159, -25884.09375], [-0.8009626269340515, -0.5986338257789612, -0.00980877224355936, 42442.60546875], [-0.011213837191462517, -0.0013802767498418689, 0.9999361634254456, -206.45352172851562], [0.0, 0.0, 0.0, 1.0]], [[-0.5990673303604126, 0.8006772994995117, -0.005866772495210171, -25884.203125], [-0.8006153106689453, -0.599094569683075, -0.010040312074124813, 42442.58984375], [-0.011553800664842129, -0.0013177944347262383, 0.9999324083328247, -206.45297241210938], [0.0, 0.0, 0.0, 1.0]], [[-0.599065363407135, 0.8006773591041565, -0.006040951702743769, -25884.306640625], [-0.8006138205528259, -0.5990945100784302, -0.010165269486606121, 42442.57421875], [-0.011758201755583286, -0.001253190916031599, 0.9999300837516785, -206.45327758789062], [0.0, 0.0, 0.0, 1.0]], [[-0.5986827611923218, 0.8009645938873291, -0.005892341490834951, -25884.3984375], [-0.8009047508239746, -0.5987105965614319, -0.009853813797235489, 42442.55859375], [-0.011420362628996372, -0.0011801040964201093, 0.9999340772628784, -206.4529571533203], [0.0, 0.0, 0.0, 1.0]], [[-0.5986096858978271, 0.801021158695221, -0.005626084748655558, -25884.48828125], [-0.8009566068649292, -0.5986338257789612, -0.010297241620719433, 42442.5390625], [-0.011616271920502186, -0.0016577779315412045, 0.9999310970306396, -206.4527130126953], [0.0, 0.0, 0.0, 1.0]], [[-0.5988394021987915, 0.8008486032485962, -0.005742499139159918, -25884.5703125], [-0.8007779121398926, -0.598863959312439, -0.010799258016049862, 42442.52734375], [-0.012087545357644558, -0.0018685547402128577, 0.9999251365661621, -206.4530487060547], [0.0, 0.0, 0.0, 1.0]], [[-0.5986068844795227, 0.8010210990905762, -0.005913496948778629, -25884.64453125], [-0.8009486198425293, -0.598633348941803, -0.010925966314971447, 42442.515625], [-0.012291946448385715, -0.0018039512215182185, 0.9999228119850159, -206.45233154296875], [0.0, 0.0, 0.0, 1.0]], [[-0.5983021259307861, 0.8012502193450928, -0.005720013286918402, -25884.71484375], [-0.8011783957481384, -0.5983262658119202, -0.010897756554186344, 42442.50390625], [-0.012154263444244862, -0.0019373996183276176, 0.9999242424964905, -206.45205688476562], [0.0, 0.0, 0.0, 1.0]], [[-0.5983059406280518, 0.8012496829032898, -0.005371894221752882, -25884.77734375], [-0.8011816143989563, -0.5983265042304993, -0.010647510178387165, 42442.4921875], [-0.011745460331439972, -0.0020666061900556087, 0.9999288320541382, -206.4524688720703], [0.0, 0.0, 0.0, 1.0]], [[-0.5982332825660706, 0.8013066649436951, -0.004958619829267263, -25884.833984375], [-0.8012446165084839, -0.5982500314712524, -0.010193204507231712, 42442.484375], [-0.011134376749396324, -0.0021248464472591877, 0.9999356865882874, -206.4527130126953], [0.0, 0.0, 0.0, 1.0]], [[-0.5976999998092651, 0.8017087578773499, -0.0042197187431156635, -25884.888671875], [-0.8016633987426758, -0.5977120399475098, -0.00871850922703743, 42442.48046875], [-0.00951188150793314, -0.0018282589735463262, 0.999953031539917, -206.45216369628906], [0.0, 0.0, 0.0, 1.0]], [[-0.5971677303314209, 0.8021113276481628, -0.0028664872515946627, -25884.947265625], [-0.8020839691162109, -0.5971719026565552, -0.0068513029254972935, 42442.4765625], [-0.007207292132079601, -0.0017922137631103396, 0.9999723434448242, -206.4525909423828], [0.0, 0.0, 0.0, 1.0]], [[-0.5973982810974121, 0.8019421696662903, -0.0020269486121833324, -25885.0078125], [-0.8019253015518188, -0.5973995327949524, -0.005451427772641182, 42442.46875], [-0.00558262737467885, -0.0016312124207615852, 0.999983012676239, -206.45191955566406], [0.0, 0.0, 0.0, 1.0]], [[-0.5975496768951416, 0.8018301725387573, -0.0016927402466535568, -25885.06640625], [-0.8018185496330261, -0.5975505113601685, -0.004522665869444609, 42442.4609375], [-0.004637907724827528, -0.0013452464481815696, 0.999988317489624, -206.45213317871094], [0.0, 0.0, 0.0, 1.0]], [[-0.5973201990127563, 0.8020016551017761, -0.0014075349317863584, -25885.126953125], [-0.8019906282424927, -0.5973197817802429, -0.004477080889046192, 42442.44921875], [-0.004431373439729214, -0.0015454209642484784, 0.9999889731407166, -206.4517059326172], [0.0, 0.0, 0.0, 1.0]]], "2": [[[-0.22981105744838715, -0.973231852054596, 0.0025924004148691893, -25833.759765625], [0.97323077917099, -0.22980105876922607, 0.003656611079350114, 42363.9140625], [-0.002962993923574686, 0.0033633331768214703, 0.999989926815033, -206.04884338378906], [0.0, 0.0, 0.0, 1.0]], [[-0.23323796689510345, -0.9724195003509521, -0.0005735348095186055, -25834.328125], [0.9723915457725525, -0.233235701918602, 0.00747107807546854, 42364.2734375], [-0.007398790679872036, 0.0011848390568047762, 0.9999719262123108, -206.05206298828125], [0.0, 0.0, 0.0, 1.0]], [[-0.23657836019992828, -0.9716112017631531, -0.0015221122885122895, -25834.896484375], [0.9715691804885864, -0.23658230900764465, 0.009065626189112663, 42364.625], [-0.009168368764221668, 0.000665893079712987, 0.9999577403068542, -206.05862426757812], [0.0, 0.0, 0.0, 1.0]], [[-0.23976001143455505, -0.9708310961723328, 0.0014366786926984787, -25835.4609375], [0.9707977175712585, -0.23973873257637024, 0.00878217350691557, 42364.9765625], [-0.008181579411029816, 0.0035003384109586477, 0.9999604225158691, -206.06373596191406], [0.0, 0.0, 0.0, 1.0]], [[-0.2430366575717926, -0.9700061678886414, 0.00459563871845603, -25836.025390625], [0.969998300075531, -0.24299974739551544, 0.007381443865597248, 42365.32421875], [-0.0060433074831962585, 0.006251723039895296, 0.9999622106552124, -206.06759643554688], [0.0, 0.0, 0.0, 1.0]], [[-0.24601680040359497, -0.9692502617835999, 0.005453433841466904, -25836.59375], [0.9692522287368774, -0.24597997963428497, 0.00663061672821641, 42365.671875], [-0.005085292272269726, 0.00691699655726552, 0.9999631643295288, -206.06886291503906], [0.0, 0.0, 0.0, 1.0]], [[-0.2473890632390976, -0.9689155220985413, 0.001144709880463779, -25837.1796875], [0.9688896536827087, -0.24737341701984406, 0.007693435996770859, 42366.0234375], [-0.0071711186319589615, 0.0030123696196824312, 0.9999697208404541, -206.0700225830078], [0.0, 0.0, 0.0, 1.0]], [[-0.24771596491336823, -0.9688230752944946, -0.0043125213123857975, -25837.763671875], [0.9687691926956177, -0.2477482408285141, 0.010351160541176796, 42366.37890625], [-0.011096863076090813, -0.0016136893536895514, 0.999937117099762, -206.07608032226562], [0.0, 0.0, 0.0, 1.0]], [[-0.24852897226810455, -0.9685915112495422, -0.00798496138304472, -25838.345703125], [0.9685446619987488, -0.24860581755638123, 0.010784178972244263, 42366.73046875], [-0.01243057195097208, -0.0050536105409264565, 0.9999099969863892, -206.0850067138672], [0.0, 0.0, 0.0, 1.0]], [[-0.24923095107078552, -0.9684213995933533, -0.006621548905968666, -25838.921875], [0.968410849571228, -0.24927325546741486, 0.006577478721737862, 42367.078125], [-0.00802034605294466, -0.004773067310452461, 0.9999564290046692, -206.09361267089844], [0.0, 0.0, 0.0, 1.0]], [[-0.2489759624004364, -0.9685094356536865, -0.0006453887326642871, -25839.494140625], [0.9684969186782837, -0.24897609651088715, 0.004961286671459675, 42367.421875], [-0.0049657393246889114, 0.0006101843900978565, 0.9999874830245972, -206.10081481933594], [0.0, 0.0, 0.0, 1.0]], [[-0.24917469918727875, -0.9684519171714783, 0.0035653365775942802, -25840.064453125], [0.9684534668922424, -0.24918362498283386, -0.002317094476893544, 42367.76953125], [0.0031324182637035847, 0.0028755010571330786, 0.9999909996986389, -206.1013641357422], [0.0, 0.0, 0.0, 1.0]], [[-0.2490989714860916, -0.9683988690376282, 0.012378242798149586, -25840.623046875], [0.9684779644012451, -0.24908407032489777, 0.002755056833848357, 42368.125], [0.0004152286273892969, 0.012674336321651936, 0.999919593334198, -206.10107421875], [0.0, 0.0, 0.0, 1.0]], [[-0.24854284524917603, -0.9685730338096619, 0.00962273869663477, -25841.1953125], [0.9686155319213867, -0.2484964281320572, 0.005776393227279186, 42368.484375], [-0.00320364348590374, 0.010756416246294975, 0.9999370574951172, -206.0980224609375], [0.0, 0.0, 0.0, 1.0]], [[-0.24701812863349915, -0.9690073132514954, -0.0026029692962765694, -25841.783203125], [0.9689918160438538, -0.24702924489974976, 0.005604417994618416, 42368.83203125], [-0.006073731929063797, -0.0011378631461411715, 0.9999808669090271, -206.09780883789062], [0.0, 0.0, 0.0, 1.0]], [[-0.24387811124324799, -0.9697688817977905, -0.008471702225506306, -25842.359375], [0.969793975353241, -0.2439090609550476, 0.0028218324296176434, 42369.171875], [-0.00480285007506609, -0.007527622859925032, 0.9999601244926453, -206.10362243652344], [0.0, 0.0, 0.0, 1.0]], [[-0.24078446626663208, -0.970495879650116, -0.012673032470047474, -25842.927734375], [0.9705784320831299, -0.24077022075653076, -0.002661374630406499, 42369.515625], [-0.00046843517338857055, -0.012940989807248116, 0.9999161958694458, -206.1090545654297], [0.0, 0.0, 0.0, 1.0]], [[-0.2353418916463852, -0.9718710780143738, -0.008982102386653423, -25843.4765625], [0.9719102382659912, -0.23535190522670746, 6.242725066840649e-05, 42369.875], [-0.002174626337364316, -0.008715106174349785, 0.9999596476554871, -206.11778259277344], [0.0, 0.0, 0.0, 1.0]], [[-0.22739821672439575, -0.9737910628318787, -0.004575642291456461, -25844.01953125], [0.9738004803657532, -0.22740334272384644, 0.0006170338019728661, 42370.23828125], [-0.0016413776902481914, -0.004315449856221676, 0.9999893307685852, -206.12481689453125], [0.0, 0.0, 0.0, 1.0]], [[-0.21854384243488312, -0.9758097529411316, -0.005811074748635292, -25844.564453125], [0.9758257269859314, -0.21852953732013702, -0.002996317110955715, 42370.59765625], [0.0016539444914087653, -0.006325422786176205, 0.9999786615371704, -206.1280517578125], [0.0, 0.0, 0.0, 1.0]], [[-0.21110908687114716, -0.9773858785629272, -0.012237399816513062, -25845.111328125], [0.9774594306945801, -0.21106061339378357, -0.00514457281678915, 42370.953125], [0.002445400459691882, -0.013047627173364162, 0.9999118447303772, -206.13082885742188], [0.0, 0.0, 0.0, 1.0]], [[-0.20349600911140442, -0.9789173603057861, -0.01761074922978878, -25845.65234375], [0.979067325592041, -0.20338669419288635, -0.007808867376297712, 42371.31640625], [0.0040624444372951984, -0.01883118227124214, 0.9998144507408142, -206.13784790039062], [0.0, 0.0, 0.0, 1.0]], [[-0.19518302381038666, -0.980633020401001, -0.016200032085180283, -25846.1796875], [0.9807655811309814, -0.19512954354286194, -0.004835725296288729, 42371.69140625], [0.0015809668693691492, -0.01683228462934494, 0.9998571276664734, -206.14662170410156], [0.0, 0.0, 0.0, 1.0]], [[-0.18592678010463715, -0.9824557304382324, -0.014554714784026146, -25846.697265625], [0.9825635552406311, -0.1859036237001419, -0.00294131925329566, 42372.078125], [0.00018394284415990114, -0.014847802929580212, 0.9998897314071655, -206.15621948242188], [0.0, 0.0, 0.0, 1.0]], [[-0.17490385472774506, -0.9844671487808228, -0.015267744660377502, -25847.216796875], [0.9845854043960571, -0.1748746931552887, -0.0032330024987459183, 42372.45703125], [0.000512843020260334, -0.015597864054143429, 0.9998782277107239, -206.1645965576172], [0.0, 0.0, 0.0, 1.0]], [[-0.16431677341461182, -0.9862302541732788, -0.018702656030654907, -25847.73046875], [0.9863949418067932, -0.16418905556201935, -0.008182011544704437, 42372.83203125], [0.004998577293008566, -0.019792648032307625, 0.9997916221618652, -206.17076110839844], [0.0, 0.0, 0.0, 1.0]], [[-0.15469473600387573, -0.9877458214759827, -0.020679423585534096, -25848.23828125], [0.9879480004310608, -0.1545460969209671, -0.008612305857241154, 42373.21875], [0.00531084556132555, -0.02176247350871563, 0.9997490644454956, -206.17941284179688], [0.0, 0.0, 0.0, 1.0]], [[-0.14317506551742554, -0.9895082116127014, -0.019347909837961197, -25848.73046875], [0.9896931648254395, -0.14308995008468628, -0.005723086651414633, 42373.62109375], [0.0028945505619049072, -0.019967898726463318, 0.999796450138092, -206.18931579589844], [0.0, 0.0, 0.0, 1.0]], [[-0.12847909331321716, -0.9916107654571533, -0.014181867241859436, -25849.208984375], [0.9917093515396118, -0.12843161821365356, -0.004213430918753147, 42374.02734375], [0.002356683136895299, -0.014605630189180374, 0.9998905658721924, -206.19851684570312], [0.0, 0.0, 0.0, 1.0]], [[-0.11253122240304947, -0.993630051612854, -0.005999310873448849, -25849.673828125], [0.9936205744743347, -0.11248103529214859, -0.008136043325066566, 42374.4375], [0.007409408688545227, -0.0068765971809625626, 0.9999489188194275, -206.2030792236328], [0.0, 0.0, 0.0, 1.0]], [[-0.0969591811299324, -0.9952848553657532, -0.002633605618029833, -25850.1328125], [0.9952335953712463, -0.09692583978176117, -0.010741228237748146, 42374.84375], [0.010435317642986774, -0.003662514267489314, 0.9999388456344604, -206.20352172851562], [0.0, 0.0, 0.0, 1.0]], [[-0.08208899199962616, -0.9966102838516235, -0.005410465877503157, -25850.595703125], [0.9965775609016418, -0.08203093707561493, -0.010201535187661648, 42375.265625], [0.009723130613565445, -0.006229382008314133, 0.9999333024024963, -206.20184326171875], [0.0, 0.0, 0.0, 1.0]], [[-0.06732701510190964, -0.9976704716682434, -0.010985597968101501, -25851.05859375], [0.9976970553398132, -0.06722991168498993, -0.008987522684037685, 42375.6953125], [0.008228025399148464, -0.01156540121883154, 0.9998992681503296, -206.20281982421875], [0.0, 0.0, 0.0, 1.0]], [[-0.051921725273132324, -0.9985753893852234, -0.012300876900553703, -25851.505859375], [0.9986344575881958, -0.05184568464756012, -0.006420124787837267, 42376.12890625], [0.005773231387138367, -0.012617423199117184, 0.9999037384986877, -206.20663452148438], [0.0, 0.0, 0.0, 1.0]], [[-0.036020368337631226, -0.9992817640304565, -0.01176854781806469, -25851.9375], [0.9993279576301575, -0.035936977714300156, -0.007226303685456514, 42376.56640625], [0.0067981877364218235, -0.012020932510495186, 0.9999046325683594, -206.2108917236328], [0.0, 0.0, 0.0, 1.0]], [[-0.01950545236468315, -0.9997719526290894, -0.008695654571056366, -25852.359375], [0.9997810125350952, -0.019438158720731735, -0.007757289335131645, 42377.00390625], [0.007586492225527763, -0.008845060132443905, 0.9999321103096008, -206.21383666992188], [0.0, 0.0, 0.0, 1.0]], [[-0.0025356600526720285, -0.9999632835388184, -0.00818340852856636, -25852.76953125], [0.999937891960144, -0.0024466579779982567, -0.010869608260691166, 42377.4453125], [0.010849187150597572, -0.00821046344935894, 0.9999074339866638, -206.2145538330078], [0.0, 0.0, 0.0, 1.0]], [[0.015942931175231934, -0.99982088804245, -0.01019248552620411, -25853.1796875], [0.9997953772544861, 0.0160678643733263, -0.012290808372199535, 42377.89453125], [0.012452378869056702, -0.009994449093937874, 0.9998725056648254, -206.21453857421875], [0.0, 0.0, 0.0, 1.0]], [[0.034880973398685455, -0.999305248260498, -0.013125530444085598, -25853.578125], [0.9993159770965576, 0.03503666818141937, -0.01182879414409399, 42378.35546875], [0.012280451133847237, -0.012703953310847282, 0.9998438954353333, -206.2144317626953], [0.0, 0.0, 0.0, 1.0]], [[0.05370311811566353, -0.9984259605407715, -0.01616927795112133, -25853.9609375], [0.9984724521636963, 0.05390208214521408, -0.012136193923652172, 42378.8203125], [0.012988652102649212, -0.01549282856285572, 0.9997956156730652, -206.2154541015625], [0.0, 0.0, 0.0, 1.0]], [[0.07366877049207687, -0.997117280960083, -0.01816464029252529, -25854.3359375], [0.9971984624862671, 0.07388707250356674, -0.011656336486339569, 42379.2890625], [0.012964867055416107, -0.017255043610930443, 0.9997670650482178, -206.21824645996094], [0.0, 0.0, 0.0, 1.0]], [[0.0948885902762413, -0.9953258037567139, -0.017962995916604996, -25854.693359375], [0.9954233169555664, 0.09507250785827637, -0.009679235517978668, 42379.76953125], [0.011341781355440617, -0.016962336376309395, 0.9997918009757996, -206.2217559814453], [0.0, 0.0, 0.0, 1.0]], [[0.1168961226940155, -0.9930632710456848, -0.012672343291342258, -25855.025390625], [0.993088960647583, 0.11701460927724838, -0.009045693092048168, 42380.25], [0.010465795174241066, -0.011527357622981071, 0.9998787641525269, -206.2252960205078], [0.0, 0.0, 0.0, 1.0]], [[0.140852689743042, -0.9900208115577698, -0.004388025496155024, -25855.37109375], [0.9899585247039795, 0.14089445769786835, -0.011435039341449738, 42380.78515625], [0.011939175426959991, -0.0027333074249327183, 0.9999249577522278, -206.2269287109375], [0.0, 0.0, 0.0, 1.0]], [[0.16335125267505646, -0.9865666627883911, -0.001603908371180296, -25855.677734375], [0.9864395260810852, 0.1633559912443161, -0.015872079879045486, 42381.2734375], [0.01592087186872959, 0.001010564505122602, 0.9998727440834045, -206.2228546142578], [0.0, 0.0, 0.0, 1.0]], [[0.1866820603609085, -0.9824186563491821, -0.001858038129284978, -25855.98046875], [0.9822369813919067, 0.18668338656425476, -0.018967241048812866, 42381.765625], [0.018980635330080986, 0.001715809921734035, 0.9998183846473694, -206.21580505371094], [0.0, 0.0, 0.0, 1.0]], [[0.20875944197177887, -0.9779660105705261, -0.001356045831926167, -25856.267578125], [0.9778212308883667, 0.20875205099582672, -0.01697692833840847, 42382.265625], [0.01688593626022339, 0.0022181225940585136, 0.9998549222946167, -206.2086181640625], [0.0, 0.0, 0.0, 1.0]], [[0.2298896163702011, -0.9731661081314087, -0.009926751255989075, -25856.548828125], [0.9731383323669434, 0.22998858988285065, -0.01034771278500557, 42382.78125], [0.012353083118796349, -0.00728127034381032, 0.9998971819877625, -206.2029571533203], [0.0, 0.0, 0.0, 1.0]], [[0.2515615224838257, -0.9677554965019226, -0.012886816635727882, -25856.8203125], [0.9678378105163574, 0.2515738904476166, 0.0006774043431505561, 42383.30078125], [0.0025864257477223873, -0.012642757035791874, 0.9999167323112488, -206.20626831054688], [0.0, 0.0, 0.0, 1.0]], [[0.2729910612106323, -0.9619848132133484, -0.007815510034561157, -25857.0625], [0.9620152115821838, 0.2729678452014923, 0.003912659827619791, 42383.8203125], [-0.001630535931326449, -0.008586760610342026, 0.999961793422699, -206.2140350341797], [0.0, 0.0, 0.0, 1.0]], [[0.2940387725830078, -0.9557729959487915, -0.006263337098062038, -25857.29296875], [0.9557896852493286, 0.29404956102371216, -0.0008758823969401419, 42384.328125], [0.0026788765098899603, -0.005728889722377062, 0.9999799728393555, -206.21829223632812], [0.0, 0.0, 0.0, 1.0]], [[0.3152408003807068, -0.9490076899528503, -0.0027734062168747187, -25857.517578125], [0.9489956498146057, 0.31525111198425293, -0.004895756486803293, 42384.83984375], [0.005520429462194443, -0.0010886082891374826, 0.9999841451644897, -206.2195281982422], [0.0, 0.0, 0.0, 1.0]], [[0.33571475744247437, -0.9419472813606262, -0.0055591450072824955, -25857.734375], [0.9418904185295105, 0.33575639128685, -0.0104919308796525, 42385.35546875], [0.011749363504350185, -0.00171380874235183, 0.9999295473098755, -206.21714782714844], [0.0, 0.0, 0.0, 1.0]], [[0.35648202896118164, -0.9342697262763977, -0.007792851887643337, -25857.947265625], [0.934156596660614, 0.35656124353408813, -0.014676383696496487, 42385.8828125], [0.016490329056978226, -0.002047878224402666, 0.9998619556427002, -206.2122039794922], [0.0, 0.0, 0.0, 1.0]], [[0.3767022490501404, -0.9262816309928894, -0.009886455722153187, -25858.14453125], [0.926216721534729, 0.37680304050445557, -0.011913689784705639, 42386.421875], [0.014760678634047508, -0.004669087938964367, 0.9998801350593567, -206.20870971679688], [0.0, 0.0, 0.0, 1.0]], [[0.3961048424243927, -0.918099582195282, -0.013930986635386944, -25858.3359375], [0.9181226491928101, 0.3962271511554718, -0.007405291777104139, 42386.98046875], [0.012318631634116173, -0.009857081808149815, 0.9998754858970642, -206.20555114746094], [0.0, 0.0, 0.0, 1.0]], [[0.4160061180591583, -0.9092636108398438, -0.01335973758250475, -25858.513671875], [0.9093357920646667, 0.41606032848358154, -0.0014449912123382092, 42387.55078125], [0.006872335448861122, -0.011547363363206387, 0.9999096989631653, -206.2083282470703], [0.0, 0.0, 0.0, 1.0]], [[0.4355318248271942, -0.9000791311264038, -0.013023296371102333, -25858.6796875], [0.9001596570014954, 0.43555936217308044, 0.0007836347795091569, 42388.12890625], [0.004967086482793093, -0.012064343318343163, 0.9999148845672607, -206.21299743652344], [0.0, 0.0, 0.0, 1.0]], [[0.4542502760887146, -0.8907824754714966, -0.012777090072631836, -25858.837890625], [0.8908569812774658, 0.4542836546897888, 0.0003230705624446273, 42388.7109375], [0.005516637582331896, -0.011529314331710339, 0.9999182820320129, -206.2168426513672], [0.0, 0.0, 0.0, 1.0]], [[0.47125306725502014, -0.8819121718406677, -0.012308062985539436, -25858.98828125], [0.8819828033447266, 0.4712809920310974, 0.0007010933477431536, 42389.296875], [0.0051822541281580925, -0.01118589285761118, 0.9999240040779114, -206.2208709716797], [0.0, 0.0, 0.0, 1.0]], [[0.4856702387332916, -0.8740779161453247, -0.01059359684586525, -25859.130859375], [0.8741385340690613, 0.4856669008731842, 0.003053131280466914, 42389.90234375], [0.0024762852117419243, -0.010743086226284504, 0.9999392032623291, -206.22650146484375], [0.0, 0.0, 0.0, 1.0]], [[0.49771299958229065, -0.8672934770584106, -0.009155443869531155, -25859.267578125], [0.8673403859138489, 0.4976658523082733, 0.007021117024123669, 42390.51953125], [-0.0015330164460465312, -0.011435386724770069, 0.9999334216117859, -206.23275756835938], [0.0, 0.0, 0.0, 1.0]], [[0.5076515078544617, -0.8615068197250366, -0.009796353988349438, -25859.408203125], [0.8615620136260986, 0.5076060891151428, 0.006850783713161945, 42391.140625], [-0.0009293073089793324, -0.011917976662516594, 0.9999285340309143, -206.2405548095703], [0.0, 0.0, 0.0, 1.0]], [[0.5155931115150452, -0.8567294478416443, -0.013354434631764889, -25859.552734375], [0.8568122386932373, 0.5156277418136597, 0.000971662753727287, 42391.7578125], [0.006053464487195015, -0.011943223886191845, 0.9999103546142578, -206.24566650390625], [0.0, 0.0, 0.0, 1.0]], [[0.5236918330192566, -0.8517956733703613, -0.013820098713040352, -25859.69140625], [0.8518290519714355, 0.5237950682640076, -0.005099943373352289, 42392.38671875], [0.011583009734749794, -0.009101561270654202, 0.999891459941864, -206.2476348876953], [0.0, 0.0, 0.0, 1.0]], [[0.5324624180793762, -0.8463112115859985, -0.015527719631791115, -25859.822265625], [0.8463236093521118, 0.5326125025749207, -0.00776080135256052, 42393.02734375], [0.01483831088989973, -0.00900914054363966, 0.9998493194580078, -206.2469024658203], [0.0, 0.0, 0.0, 1.0]], [[0.5416744351387024, -0.8404345512390137, -0.016078302636742592, -25859.947265625], [0.8404282927513123, 0.5418453812599182, -0.009151943027973175, 42393.68359375], [0.0164035651832819, -0.008555286563932896, 0.9998288154602051, -206.24472045898438], [0.0, 0.0, 0.0, 1.0]], [[0.5499740839004517, -0.8348347544670105, -0.02407185733318329, -25860.080078125], [0.8349542617797852, 0.5502685308456421, -0.007479130756109953, 42394.3515625], [0.019489822909235954, -0.015985572710633278, 0.9996822476387024, -206.24075317382812], [0.0, 0.0, 0.0, 1.0]], [[0.5572803616523743, -0.8299164175987244, -0.02602584846317768, -25860.20703125], [0.8301447629928589, 0.5575379729270935, -0.0033247421961277723, 42395.03125], [0.01726965792477131, -0.01975240744650364, 0.9996557831764221, -206.2420196533203], [0.0, 0.0, 0.0, 1.0]], [[0.5624006986618042, -0.826510488986969, -0.024202514439821243, -25860.31640625], [0.8267059326171875, 0.5626254081726074, -0.0031318222172558308, 42395.71484375], [0.016205433756113052, -0.018247023224830627, 0.9997021555900574, -206.24566650390625], [0.0, 0.0, 0.0, 1.0]], [[0.566135823726654, -0.8240976929664612, -0.01879514940083027, -25860.416015625], [0.8241602182388306, 0.566321849822998, -0.006275754421949387, 42396.40234375], [0.015815939754247665, -0.011937285773456097, 0.9998036623001099, -206.24874877929688], [0.0, 0.0, 0.0, 1.0]], [[0.5692822337150574, -0.822023332118988, -0.013977102935314178, -25860.525390625], [0.8219988942146301, 0.5694180727005005, -0.008989128284156322, 42397.1015625], [0.015348088927567005, -0.006371812894940376, 0.9998618960380554, -206.2487335205078], [0.0, 0.0, 0.0, 1.0]], [[0.5707277059555054, -0.8209465742111206, -0.017792897298932076, -25860.6484375], [0.8209103941917419, 0.5709463357925415, -0.011245978996157646, 42397.80859375], [0.01939113810658455, -0.008187981322407722, 0.9997784495353699, -206.2440643310547], [0.0, 0.0, 0.0, 1.0]], [[0.5707624554634094, -0.8208608031272888, -0.020438723266124725, -25860.7734375], [0.8209198713302612, 0.5709927678108215, -0.007602140307426453, 42398.52734375], [0.017910664901137352, -0.012439537793397903, 0.9997621774673462, -206.24093627929688], [0.0, 0.0, 0.0, 1.0]], [[0.571060061454773, -0.8206272721290588, -0.02147667668759823, -25860.89453125], [0.8207745552062988, 0.5712437033653259, -0.003102169604972005, 42399.25390625], [0.014814143069088459, -0.01585598662495613, 0.9997645616531372, -206.24179077148438], [0.0, 0.0, 0.0, 1.0]], [[0.5714538097381592, -0.8203519582748413, -0.02152221091091633, -25861.017578125], [0.820528507232666, 0.5716046094894409, -0.0010590095771476626, 42399.9921875], [0.013170954771339893, -0.01705441251397133, 0.9997677803039551, -206.2448272705078], [0.0, 0.0, 0.0, 1.0]], [[0.5711880922317505, -0.8206110596656799, -0.01848280243575573, -25861.134765625], [0.8207160234451294, 0.5713285207748413, -0.002993223490193486, 42400.7265625], [0.01301602553576231, -0.013459437526762486, 0.9998247027397156, -206.24937438964844], [0.0, 0.0, 0.0, 1.0]], [[0.5706471800804138, -0.8210023045539856, -0.017803778871893883, -25861.259765625], [0.8210830688476562, 0.5707934498786926, -0.004156363196671009, 42401.47265625], [0.013574665412306786, -0.012246565893292427, 0.9998328685760498, -206.25054931640625], [0.0, 0.0, 0.0, 1.0]], [[0.5703201293945312, -0.8212106823921204, -0.01865282654762268, -25861.38671875], [0.8213310241699219, 0.5704485177993774, -0.001972606172785163, 42402.23046875], [0.012260401621460915, -0.014195128343999386, 0.9998241066932678, -206.25296020507812], [0.0, 0.0, 0.0, 1.0]], [[0.5701214075088501, -0.8212826251983643, -0.021363021805882454, -25861.521484375], [0.8214933276176453, 0.5702144503593445, 0.0020418446511030197, 42402.99609375], [0.010504573583602905, -0.018713679164648056, 0.9997696876525879, -206.2564697265625], [0.0, 0.0, 0.0, 1.0]], [[0.5698682069778442, -0.8214293122291565, -0.0224514901638031, -25861.65625], [0.8216800093650818, 0.5699356198310852, 0.0038945956621319056, 42403.76953125], [0.00959677156060934, -0.02066734805703163, 0.9997403621673584, -206.2644805908203], [0.0, 0.0, 0.0, 1.0]], [[0.5694595575332642, -0.8216938376426697, -0.023129479959607124, -25861.787109375], [0.8219398260116577, 0.5695704221725464, 0.002118255477398634, 42404.54296875], [0.011433308944106102, -0.020217299461364746, 0.9997302889823914, -206.27197265625], [0.0, 0.0, 0.0, 1.0]], [[0.5692214369773865, -0.8218621015548706, -0.023015212267637253, -25861.919921875], [0.8220512270927429, 0.5694097876548767, -0.002048301277682185, 42405.3203125], [0.014788508415222168, -0.01775374449789524, 0.999733030796051, -206.27774047851562], [0.0, 0.0, 0.0, 1.0]], [[0.5689845085144043, -0.8220276236534119, -0.022961298003792763, -25862.0546875], [0.8221434950828552, 0.5692440867424011, -0.006419642828404903, 42406.09375], [0.018347706645727158, -0.015224806033074856, 0.9997157454490662, -206.2792510986328], [0.0, 0.0, 0.0, 1.0]], [[0.5691143274307251, -0.8218960165977478, -0.0244113989174366, -25862.189453125], [0.8220226168632507, 0.5694112181663513, -0.0070472476072609425, 42406.8828125], [0.019692230969667435, -0.016056030988693237, 0.9996771812438965, -206.2786102294922], [0.0, 0.0, 0.0, 1.0]], [[0.569645345211029, -0.8214967250823975, -0.025440750643610954, -25862.326171875], [0.8216582536697388, 0.5699473023414612, -0.006133297923952341, 42407.66796875], [0.01953837275505066, -0.017409797757864, 0.9996575117111206, -206.27914428710938], [0.0, 0.0, 0.0, 1.0]], [[0.570820152759552, -0.8206707835197449, -0.025763999670743942, -25862.458984375], [0.8208523392677307, 0.5711150765419006, -0.005371640436351299, 42408.453125], [0.01912255771458149, -0.01808219961822033, 0.9996536374092102, -206.28028869628906], [0.0, 0.0, 0.0, 1.0]], [[0.5717090964317322, -0.8200877904891968, -0.024590423330664635, -25862.5859375], [0.8202250003814697, 0.5720028281211853, -0.0066072712652385235, 42409.234375], [0.01948433555662632, -0.016392244026064873, 0.9996757507324219, -206.2811737060547], [0.0, 0.0, 0.0, 1.0]], [[0.5721312761306763, -0.8198366761207581, -0.023101573809981346, -25862.7109375], [0.819911777973175, 0.5724255442619324, -0.008584496565163136, 42410.0078125], [0.020261816680431366, -0.014029793441295624, 0.9996962547302246, -206.28094482421875], [0.0, 0.0, 0.0, 1.0]], [[0.5724048614501953, -0.8198163509368896, -0.015931379050016403, -25862.828125], [0.8198229670524597, 0.5725627541542053, -0.007890197448432446, 42410.7890625], [0.015590227209031582, -0.00854452233761549, 0.9998419284820557, -206.28160095214844], [0.0, 0.0, 0.0, 1.0]], [[0.5725196599960327, -0.8197869062423706, -0.013061512261629105, -25862.9453125], [0.8197840452194214, 0.5726296305656433, -0.007029849570244551, 42411.5625], [0.013242388144135475, -0.0066828918643295765, 0.9998899698257446, -206.28036499023438], [0.0, 0.0, 0.0, 1.0]], [[0.5730167627334595, -0.8194740414619446, -0.010678977705538273, -25863.068359375], [0.8194801211357117, 0.5730859041213989, -0.004981447476893663, 42412.33984375], [0.010202138684689999, -0.005896757356822491, 0.9999305605888367, -206.27999877929688], [0.0, 0.0, 0.0, 1.0]], [[0.5735745429992676, -0.8190937042236328, -0.009885255247354507, -25863.1953125], [0.819115400314331, 0.5736224055290222, -0.0027079018764197826, 42413.11328125], [0.007888429798185825, -0.006543980911374092, 0.9999474883079529, -206.28121948242188], [0.0, 0.0, 0.0, 1.0]], [[0.5738248825073242, -0.8189366459846497, -0.008226254023611546, -25863.314453125], [0.8189559578895569, 0.5738541483879089, -0.0015615603188052773, 42413.8828125], [0.005999489687383175, -0.005840878002345562, 0.9999649524688721, -206.2836456298828], [0.0, 0.0, 0.0, 1.0]], [[0.5743870735168457, -0.8185572028160095, -0.006600199267268181, -25863.43359375], [0.8185716867446899, 0.5744037628173828, -0.0008075042278505862, 42414.65234375], [0.004452168010175228, -0.004938916768878698, 0.9999778866767883, -206.2864990234375], [0.0, 0.0, 0.0, 1.0]], [[0.5753434896469116, -0.8179008364677429, -0.00424621719866991, -25863.552734375], [0.8179088234901428, 0.5753477215766907, 0.00025757294497452676, 42415.4140625], [0.0022323820739984512, -0.003621211741119623, 0.9999909400939941, -206.2897186279297], [0.0, 0.0, 0.0, 1.0]], [[0.5759736895561218, -0.8174585700035095, -0.003976218868046999, -25863.671875], [0.8174681663513184, 0.5759667158126831, 0.002828159835189581, 42416.18359375], [-2.1733649191446602e-05, -0.004879377782344818, 0.9999880790710449, -206.29351806640625], [0.0, 0.0, 0.0, 1.0]], [[0.5762921571731567, -0.8172370791435242, -0.0032996998634189367, -25863.79296875], [0.8172414898872375, 0.5762763023376465, 0.004684699233621359, 42416.9453125], [-0.0019269711337983608, -0.005396407563239336, 0.9999836087226868, -206.29920959472656], [0.0, 0.0, 0.0, 1.0]], [[0.5767677426338196, -0.8169058561325073, -0.0019563387613743544, -25863.91015625], [0.8169033527374268, 0.5767545104026794, 0.004802228417247534, 42417.69921875], [-0.002794641302898526, -0.004367910325527191, 0.9999865293502808, -206.3061981201172], [0.0, 0.0, 0.0, 1.0]], [[0.5772375464439392, -0.816573977470398, -0.0019236418884247541, -25864.029296875], [0.8165718913078308, 0.5772251486778259, 0.004613958764821291, 42418.45703125], [-0.0026572642382234335, -0.004234141670167446, 0.9999874830245972, -206.31190490722656], [0.0, 0.0, 0.0, 1.0]], [[0.5776253342628479, -0.8162987232208252, -0.0023209506180137396, -25864.1484375], [0.8163018226623535, 0.5776212215423584, 0.002222586888819933, 42419.20703125], [-0.0004736647242680192, -0.0031784188468009233, 0.9999948740005493, -206.3173828125], [0.0, 0.0, 0.0, 1.0]], [[0.5778574347496033, -0.8161331415176392, -0.002715174574404955, -25864.267578125], [0.8161367177963257, 0.577858567237854, 0.0004142182588111609, 42419.953125], [0.001230929628945887, -0.0024553132243454456, 0.9999962449073792, -206.3207244873047], [0.0, 0.0, 0.0, 1.0]], [[0.5780884027481079, -0.815965473651886, -0.0037552351132035255, -25864.384765625], [0.8159739375114441, 0.5780847668647766, 0.0020956078078597784, 42420.703125], [0.00046090068644843996, -0.004275619983673096, 0.9999907612800598, -206.32337951660156], [0.0, 0.0, 0.0, 1.0]], [[0.5784014463424683, -0.8157417178153992, -0.004140028730034828, -25864.501953125], [0.8157511353492737, 0.5783849954605103, 0.004559728316962719, 42421.4453125], [-0.0013250302290543914, -0.006014587357640266, 0.9999810457229614, -206.32814025878906], [0.0, 0.0, 0.0, 1.0]], [[0.5788713693618774, -0.8154081106185913, -0.004190439824014902, -25864.619140625], [0.8154162764549255, 0.578848659992218, 0.005524149164557457, 42422.18359375], [-0.002078806050121784, -0.006614724174141884, 0.9999759793281555, -206.33560180664062], [0.0, 0.0, 0.0, 1.0]], [[0.5791725516319275, -0.8151843547821045, -0.0058029089123010635, -25864.734375], [0.8152047395706177, 0.5791626572608948, 0.0034309900365769863, 42422.91015625], [0.0005639385781250894, -0.006717694457620382, 0.9999772906303406, -206.34068298339844], [0.0, 0.0, 0.0, 1.0]], [[0.5795511603355408, -0.8149030208587646, -0.007322097662836313, -25864.849609375], [0.8149298429489136, 0.5795580148696899, 0.00135447655338794, 42423.6328125], [0.003139813430607319, -0.006751983892172575, 0.9999722838401794, -206.34478759765625], [0.0, 0.0, 0.0, 1.0]], [[0.579710066318512, -0.814789354801178, -0.007376472000032663, -25864.962890625], [0.8148227334022522, 0.5796899795532227, 0.0048399497754871845, 42424.35546875], [0.0003325275029055774, -0.00881628505885601, 0.9999610781669617, -206.3500213623047], [0.0, 0.0, 0.0, 1.0]], [[0.5795613527297974, -0.8148916959762573, -0.007757491432130337, -25865.072265625], [0.8149131536483765, 0.5794655084609985, 0.011667619459331036, 42425.07421875], [-0.005012647248804569, -0.01308378390967846, 0.9999018311500549, -206.3583984375], [0.0, 0.0, 0.0, 1.0]], [[0.5798725485801697, -0.8146588206291199, -0.008878420107066631, -25865.181640625], [0.8146752119064331, 0.5797187089920044, 0.01518250536173582, 42425.78125], [-0.007221574895083904, -0.016036946326494217, 0.9998452663421631, -206.37091064453125], [0.0, 0.0, 0.0, 1.0]], [[0.5809656381607056, -0.813879668712616, -0.0088725620880723, -25865.291015625], [0.8138953447341919, 0.5808101892471313, 0.01528916135430336, 42426.46875], [-0.00729026272892952, -0.01610381342470646, 0.9998437166213989, -206.3850555419922], [0.0, 0.0, 0.0, 1.0]], [[0.5819469690322876, -0.8131469488143921, -0.01139034517109394, -25865.39453125], [0.8132236003875732, 0.5818485021591187, 0.010938543826341629, 42427.13671875], [-0.0022671876940876245, -0.015628548339009285, 0.9998752474784851, -206.3957977294922], [0.0, 0.0, 0.0, 1.0]], [[0.5832839012145996, -0.8122202157974243, -0.00884265173226595, -25865.4921875], [0.8122623562812805, 0.583286702632904, 0.0025157437194138765, 42427.796875], [0.003114463062956929, -0.008649946190416813, 0.9999576807022095, -206.4036865234375], [0.0, 0.0, 0.0, 1.0]], [[0.5839034914970398, -0.8117741346359253, -0.008913857862353325, -25865.58984375], [0.8117884993553162, 0.5839446783065796, -0.0028199779335409403, 42428.4453125], [0.007494385354220867, -0.0055895717814564705, 0.9999563097953796, -206.40672302246094], [0.0, 0.0, 0.0, 1.0]], [[0.5839871168136597, -0.8117213249206543, -0.0082220658659935, -25865.68359375], [0.8117313981056213, 0.5840237736701965, -0.002909514820203185, 42429.09375], [0.007163597270846367, -0.00497498968616128, 0.9999619722366333, -206.40748596191406], [0.0, 0.0, 0.0, 1.0]], [[0.5842991471290588, -0.8114972114562988, -0.00816495530307293, -25865.783203125], [0.8115171790122986, 0.5843270421028137, -0.0013467875542119145, 42429.7421875], [0.005863918457180262, -0.005839074961841106, 0.9999657273292542, -206.40879821777344], [0.0, 0.0, 0.0, 1.0]], [[0.5846838355064392, -0.8112130165100098, -0.008844725787639618, -25865.87890625], [0.811250627040863, 0.5846971273422241, 0.0012700356310233474, 42430.37890625], [0.004141215700656176, -0.007917859591543674, 0.9999600648880005, -206.41079711914062], [0.0, 0.0, 0.0, 1.0]], [[0.5842953324317932, -0.8114930391311646, -0.008835351094603539, -25865.97265625], [0.8115329146385193, 0.5843037962913513, 0.0018488684436306357, 42431.01171875], [0.0036621862091124058, -0.008250463753938675, 0.9999592304229736, -206.41468811035156], [0.0, 0.0, 0.0, 1.0]], [[0.5835822820663452, -0.8119902610778809, -0.010173733346164227, -25866.068359375], [0.8120380640029907, 0.5836033821105957, 0.0010589939774945378, 42431.62890625], [0.005077532026916742, -0.008879469707608223, 0.9999476671218872, -206.41770935058594], [0.0, 0.0, 0.0, 1.0]], [[0.5832040309906006, -0.8122758865356445, -0.008998895063996315, -25866.162109375], [0.8123161792755127, 0.5832149386405945, 0.001618859707377851, 42432.234375], [0.003933330066502094, -0.008254073560237885, 0.9999581575393677, -206.42138671875], [0.0, 0.0, 0.0, 1.0]], [[0.5813269019126892, -0.8136112689971924, -0.0097818523645401, -25866.25390625], [0.8136609792709351, 0.5813353061676025, 0.002254866762086749, 42432.8359375], [0.0038519510999321938, -0.009269925765693188, 0.99994957447052, -206.42547607421875], [0.0, 0.0, 0.0, 1.0]], [[0.5772289633750916, -0.8165753483772278, -0.0033685825765132904, -25866.34375], [0.8165820240974426, 0.5772275328636169, 0.0014736691955477, 42433.421875], [0.0007410770049318671, -0.003601368283852935, 0.9999932050704956, -206.43040466308594], [0.0, 0.0, 0.0, 1.0]], [[0.5694677233695984, -0.8220103979110718, 0.0023078129161149263, -25866.43359375], [0.8220106363296509, 0.569471001625061, 0.0011326466919854283, 42434.0], [-0.002245279960334301, 0.0012520409654825926, 0.9999967217445374, -206.43507385253906], [0.0, 0.0, 0.0, 1.0]], [[0.5579827427864075, -0.8298283219337463, 0.006347969174385071, -25866.54296875], [0.8298417925834656, 0.5579981207847595, 0.0008313246071338654, 42434.5625], [-0.004232012201100588, 0.004803945310413837, 0.9999794960021973, -206.4379119873047], [0.0, 0.0, 0.0, 1.0]], [[0.5429201126098633, -0.839717447757721, 0.010600566864013672, -25866.666015625], [0.8397638201713562, 0.5429518222808838, 0.00014037807704880834, 42435.11328125], [-0.005873474292457104, 0.008825757540762424, 0.9999437928199768, -206.43992614746094], [0.0, 0.0, 0.0, 1.0]], [[0.5246837139129639, -0.851239800453186, 0.0098912063986063, -25866.806640625], [0.8512921333312988, 0.5246853232383728, -0.0026369064580649137, 42435.64453125], [-0.002945130690932274, 0.009803848341107368, 0.9999476075172424, -206.4386444091797], [0.0, 0.0, 0.0, 1.0]], [[0.5037003755569458, -0.8637778162956238, 0.013182851485908031, -25866.962890625], [0.8638744354248047, 0.503685712814331, -0.004652129951864481, 42436.16796875], [-0.0026216066908091307, 0.013731608167290688, 0.9999022483825684, -206.4375457763672], [0.0, 0.0, 0.0, 1.0]], [[0.4792275130748749, -0.8775578737258911, 0.015267055481672287, -25867.130859375], [0.8776899576187134, 0.4791775345802307, -0.007017477881163359, 42436.6796875], [-0.001157386228442192, 0.016762709245085716, 0.9998587965965271, -206.4340057373047], [0.0, 0.0, 0.0, 1.0]], [[0.4522390365600586, -0.8917431235313416, 0.01655481569468975, -25867.318359375], [0.8918930292129517, 0.45221278071403503, -0.005508034024387598, 42437.1875], [-0.0025745490565896034, 0.017256075516343117, 0.9998477697372437, -206.43016052246094], [0.0, 0.0, 0.0, 1.0]], [[0.42200958728790283, -0.9063470959663391, 0.021042684093117714, -25867.5234375], [0.906558632850647, 0.4220777153968811, -0.0013088444247841835, 42437.68359375], [-0.0076953815296292305, 0.019628772512078285, 0.9997777342796326, -206.4268341064453], [0.0, 0.0, 0.0, 1.0]], [[0.38853657245635986, -0.9212058782577515, 0.020471151918172836, -25867.748046875], [0.9213831424713135, 0.3886512517929077, 0.0017989207990467548, 42438.171875], [-0.00961331743746996, 0.01816282980144024, 0.9997888207435608, -206.4244384765625], [0.0, 0.0, 0.0, 1.0]], [[0.35347980260849, -0.9352509379386902, 0.018913034349679947, -25867.99609375], [0.935393214225769, 0.35359588265419006, 0.0030781275127083063, 42438.640625], [-0.009566391818225384, 0.01660306751728058, 0.999816358089447, -206.4218292236328], [0.0, 0.0, 0.0, 1.0]], [[0.31693246960639954, -0.9483166933059692, 0.015784790739417076, -25868.267578125], [0.9484047889709473, 0.31703314185142517, 0.004280900117009878, 42439.08984375], [-0.00906395073980093, 0.013613615185022354, 0.999866247177124, -206.41908264160156], [0.0, 0.0, 0.0, 1.0]], [[0.2786306142807007, -0.960349440574646, 0.009684653021395206, -25868.55859375], [0.9603705406188965, 0.2786850929260254, 0.004800049122422934, 42439.5234375], [-0.007308694068342447, 0.00796341523528099, 0.9999415874481201, -206.41741943359375], [0.0, 0.0, 0.0, 1.0]], [[0.23945829272270203, -0.9708499312400818, 0.010491713881492615, -25868.859375], [0.9708841443061829, 0.2395123690366745, 0.004224866628646851, 42439.94140625], [-0.006614607758820057, 0.009174559265375137, 0.9999359846115112, -206.41744995117188], [0.0, 0.0, 0.0, 1.0]], [[0.19853931665420532, -0.9799604415893555, 0.016111087054014206, -25869.1640625], [0.9800577163696289, 0.19864462316036224, 0.005206907168030739, 42440.34765625], [-0.008302944712340832, 0.014756021089851856, 0.9998566508293152, -206.41810607910156], [0.0, 0.0, 0.0, 1.0]], [[0.15499839186668396, -0.9877159595489502, 0.019815297797322273, -25869.484375], [0.987861156463623, 0.15516765415668488, 0.007302531506866217, 42440.73828125], [-0.010287519544363022, 0.018442880362272263, 0.9997770190238953, -206.41795349121094], [0.0, 0.0, 0.0, 1.0]], [[0.11530853807926178, -0.9931604266166687, 0.018336821347475052, -25869.796875], [0.9932612776756287, 0.11549758911132812, 0.00960918515920639, 42441.07421875], [-0.01166132278740406, 0.017105232924222946, 0.9997856616973877, -206.41580200195312], [0.0, 0.0, 0.0, 1.0]], [[0.07106970250606537, -0.9973757266998291, 0.013811486773192883, -25870.166015625], [0.9973968267440796, 0.07122679799795151, 0.011236753314733505, 42441.41796875], [-0.012191012501716614, 0.012976941652595997, 0.9998415112495422, -206.414306640625], [0.0, 0.0, 0.0, 1.0]], [[0.025256287306547165, -0.9996659159660339, 0.005488209892064333, -25870.5546875], [0.9996431469917297, 0.025302724912762642, 0.008564428426325321, 42441.74609375], [-0.008700434118509293, 0.005269946064800024, 0.999948263168335, -206.41261291503906], [0.0, 0.0, 0.0, 1.0]], [[-0.022471535950899124, -0.9997473955154419, -0.00040592983714304864, -25870.953125], [0.9997194409370422, -0.02247394062578678, 0.007478256709873676, 42442.046875], [-0.007485490757972002, -0.00023776822490617633, 0.9999719262123108, -206.41355895996094], [0.0, 0.0, 0.0, 1.0]], [[-0.07148410379886627, -0.9974416494369507, -0.0004288868512958288, -25871.35546875], [0.9973944425582886, -0.07148491591215134, 0.009710214100778103, 42442.33203125], [-0.009716032072901726, 0.0002663565974216908, 0.9999527335166931, -206.4176483154297], [0.0, 0.0, 0.0, 1.0]], [[-0.12058379501104355, -0.9927021265029907, 0.0014028357109054923, -25871.76171875], [0.9925777316093445, -0.12054608762264252, 0.016062939539551735, 42442.609375], [-0.015776606276631355, 0.003329354105517268, 0.9998700022697449, -206.42474365234375], [0.0, 0.0, 0.0, 1.0]], [[-0.16878588497638702, -0.9856420755386353, 0.00457541411742568, -25872.177734375], [0.9852988719940186, -0.16859909892082214, 0.02757592685520649, 42442.87109375], [-0.026408584788441658, 0.009162576869130135, 0.9996092319488525, -206.43272399902344], [0.0, 0.0, 0.0, 1.0]], [[-0.21764039993286133, -0.976016104221344, 0.005008718464523554, -25872.60546875], [0.9755221009254456, -0.21735915541648865, 0.03333984687924385, 42443.1015625], [-0.03145154193043709, 0.012142213992774487, 0.9994314908981323, -206.44093322753906], [0.0, 0.0, 0.0, 1.0]], [[-0.26631686091423035, -0.9638772010803223, 0.0039949072524905205, -25873.041015625], [0.9632588624954224, -0.26599204540252686, 0.03715512901544571, 42443.30078125], [-0.03475036844611168, 0.013743169605731964, 0.9993014931678772, -206.44741821289062], [0.0, 0.0, 0.0, 1.0]], [[-0.31471917033195496, -0.949182391166687, -0.0021500273142009974, -25873.4921875], [0.9487206339836121, -0.31463533639907837, 0.030558820813894272, 42443.453125], [-0.029682371765375137, 0.007577673997730017, 0.9995306730270386, -206.451171875], [0.0, 0.0, 0.0, 1.0]], [[-0.36115196347236633, -0.9325021505355835, -0.002982735401019454, -25873.935546875], [0.9321657419204712, -0.36110448837280273, 0.025893686339259148, 42443.57421875], [-0.02522299811244011, 0.0065711503848433495, 0.9996602535247803, -206.45579528808594], [0.0, 0.0, 0.0, 1.0]], [[-0.4069823920726776, -0.9134243726730347, -0.00459394371137023, -25874.376953125], [0.9132447242736816, -0.4069949984550476, 0.018416503444314003, 42443.67578125], [-0.01869179680943489, 0.0032998002134263515, 0.999819815158844, -206.45819091796875], [0.0, 0.0, 0.0, 1.0]], [[-0.4514981508255005, -0.8922654986381531, -0.0034275217913091183, -25874.810546875], [0.8919220566749573, -0.45142531394958496, 0.026273375377058983, 42443.78125], [-0.02499009482562542, 0.008805298246443272, 0.999648928642273, -206.4663848876953], [0.0, 0.0, 0.0, 1.0]], [[-0.48912185430526733, -0.8722002506256104, -0.005138672888278961, -25875.203125], [0.8716885447502136, -0.4890226423740387, 0.03187379613518715, 42443.859375], [-0.03031325712800026, 0.011110848747193813, 0.9994786977767944, -206.47256469726562], [0.0, 0.0, 0.0, 1.0]], [[-0.527576208114624, -0.849484384059906, -0.006291985046118498, -25875.6328125], [0.8487941026687622, -0.5274221301078796, 0.03707341477274895, 42443.91796875], [-0.03481181710958481, 0.014218452386558056, 0.99929279088974, -206.4790496826172], [0.0, 0.0, 0.0, 1.0]], [[-0.5640772581100464, -0.8257071375846863, -0.004944676999002695, -25876.048828125], [0.82498699426651, -0.5638177394866943, 0.03880785033106804, 42443.953125], [-0.03483181446790695, 0.017811331897974014, 0.9992344975471497, -206.4846954345703], [0.0, 0.0, 0.0, 1.0]], [[-0.5995249152183533, -0.8003502488136292, -0.003065043594688177, -25876.45703125], [0.7998449206352234, -0.5992743968963623, 0.033440154045820236, 42443.95703125], [-0.02860063686966896, 0.017596643418073654, 0.9994360208511353, -206.48635864257812], [0.0, 0.0, 0.0, 1.0]], [[-0.6329834461212158, -0.7741612792015076, -0.0024923072196543217, -25876.85546875], [0.7738932967185974, -0.6328430771827698, 0.024470413103699684, 42443.9296875], [-0.020521285012364388, 0.013560586608946323, 0.9996974468231201, -206.4845428466797], [0.0, 0.0, 0.0, 1.0]], [[-0.6627523303031921, -0.7488373517990112, -0.0013895839219912887, -25877.24609375], [0.7486748695373535, -0.6626451015472412, 0.019684571772813797, 42443.8984375], [-0.015661343932151794, 0.012005649507045746, 0.9998053312301636, -206.4842987060547], [0.0, 0.0, 0.0, 1.0]], [[-0.688478946685791, -0.725246250629425, 0.003801829181611538, -25877.623046875], [0.7252392172813416, -0.6884173154830933, 0.010471153073012829, 42443.8515625], [-0.00497691985219717, 0.009966404177248478, 0.9999379515647888, -206.4818115234375], [0.0, 0.0, 0.0, 1.0]], [[-0.7131657004356384, -0.7009854316711426, 0.003739195642992854, -25877.994140625], [0.7009614109992981, -0.7130683064460754, 0.013662774115800858, 42443.81640625], [-0.00691110547631979, 0.012364853173494339, 0.9998996257781982, -206.4817657470703], [0.0, 0.0, 0.0, 1.0]], [[-0.7345044016838074, -0.6786035895347595, 0.0006059394800104201, -25878.365234375], [0.6785373687744141, -0.7344196438789368, 0.014659802429378033, 42443.77734375], [-0.009503180161118507, 0.011178841814398766, 0.9998923540115356, -206.4818878173828], [0.0, 0.0, 0.0, 1.0]], [[-0.7500779032707214, -0.6613463163375854, -0.0020271181128919125, -25878.720703125], [0.6612091660499573, -0.7499783635139465, 0.018299417570233345, 42443.73046875], [-0.013622546568512917, 0.012385638430714607, 0.9998304843902588, -206.48355102539062], [0.0, 0.0, 0.0, 1.0]], [[-0.7636971473693848, -0.6455743908882141, -0.000564945803489536, -25879.06640625], [0.6454777717590332, -0.7635974884033203, 0.01665341667830944, 42443.67578125], [-0.011182410642504692, 0.012353508733212948, 0.9998611211776733, -206.4837188720703], [0.0, 0.0, 0.0, 1.0]], [[-0.7765699625015259, -0.6300246715545654, 0.0028224156703799963, -25879.3984375], [0.6300098299980164, -0.7764992713928223, 0.011682315729558468, 42443.61328125], [-0.005168544128537178, 0.01085028424859047, 0.9999277591705322, -206.481201171875], [0.0, 0.0, 0.0, 1.0]], [[-0.7866201996803284, -0.6174109578132629, 0.005680482368916273, -25879.716796875], [0.6174366474151611, -0.7865748405456543, 0.008484892547130585, 42443.55078125], [-0.0007705415482632816, 0.010181725956499577, 0.9999478459358215, -206.47979736328125], [0.0, 0.0, 0.0, 1.0]], [[-0.7938373684883118, -0.6080707907676697, 0.008494778536260128, -25880.03125], [0.6081244349479675, -0.7938128113746643, 0.006773989647626877, 42443.4921875], [0.0026241980958729982, 0.010543327778577805, 0.9999409317970276, -206.4775848388672], [0.0, 0.0, 0.0, 1.0]], [[-0.799691915512085, -0.6003465056419373, 0.008769446052610874, -25880.337890625], [0.6004064083099365, -0.7996554374694824, 0.007963098585605621, 42443.44140625], [0.0022319161798805, 0.011633256450295448, 0.9999298453330994, -206.47499084472656], [0.0, 0.0, 0.0, 1.0]], [[-0.8053072094917297, -0.5928120613098145, 0.007360493298619986, -25880.63671875], [0.5928577780723572, -0.8052452802658081, 0.009989851154386997, 42443.390625], [4.898237421002705e-06, 0.012408624403178692, 0.9999229907989502, -206.47378540039062], [0.0, 0.0, 0.0, 1.0]], [[-0.8100031614303589, -0.5863859057426453, 0.006815516855567694, -25880.921875], [0.5864245891571045, -0.80992591381073, 0.011239553801715374, 42443.34375], [-0.0010706528555601835, 0.013100860640406609, 0.9999136328697205, -206.4723663330078], [0.0, 0.0, 0.0, 1.0]], [[-0.8130217790603638, -0.5821732878684998, 0.008357579819858074, -25881.1953125], [0.5822320580482483, -0.8129650354385376, 0.009675193578004837, 42443.2890625], [0.0011617809068411589, 0.012732194736599922, 0.9999182820320129, -206.4701690673828], [0.0, 0.0, 0.0, 1.0]], [[-0.8148959875106812, -0.579521119594574, 0.00999196246266365, -25881.453125], [0.5795906782150269, -0.8148820996284485, 0.006468147970736027, 42443.234375], [0.004393843002617359, 0.01106211543083191, 0.9999291896820068, -206.46707153320312], [0.0, 0.0, 0.0, 1.0]], [[-0.816378653049469, -0.5774353742599487, 0.009705645963549614, -25881.701171875], [0.5774816870689392, -0.8163992762565613, 0.002666184911504388, 42443.18359375], [0.00638413242995739, 0.007781448774039745, 0.9999493956565857, -206.46395874023438], [0.0, 0.0, 0.0, 1.0]], [[-0.8171114921569824, -0.5764322876930237, 0.007384817115962505, -25881.943359375], [0.5764617919921875, -0.8171199560165405, 0.0026009557768702507, 42443.1484375], [0.004535005893558264, 0.006382335908710957, 0.9999693632125854, -206.46240234375], [0.0, 0.0, 0.0, 1.0]], [[-0.8169057965278625, -0.5767596364021301, 0.003634750610217452, -25882.177734375], [0.5767704248428345, -0.8168790936470032, 0.006662489380687475, 42443.1171875], [-0.0008735030423849821, 0.007539042271673679, 0.9999712109565735, -206.4634246826172], [0.0, 0.0, 0.0, 1.0]], [[-0.8165742754936218, -0.5772344470024109, 0.002621383871883154, -25882.396484375], [0.5772310495376587, -0.8165266513824463, 0.009411451406776905, 42443.09375], [-0.0032921843230724335, 0.009198293089866638, 0.9999523162841797, -206.463623046875], [0.0, 0.0, 0.0, 1.0]], [[-0.8168002963066101, -0.5768991112709045, 0.004965304397046566, -25882.595703125], [0.5769190788269043, -0.816748321056366, 0.009299077093601227, 42443.0625], [-0.0013092258013784885, 0.010460068471729755, 0.9999444484710693, -206.463134765625], [0.0, 0.0, 0.0, 1.0]], [[-0.8176702857017517, -0.5756304264068604, 0.008048659190535545, -25882.783203125], [0.5756773352622986, -0.8176568746566772, 0.005724531132727861, 42443.01953125], [0.003285827115178108, 0.009314210154116154, 0.9999512434005737, -206.4610137939453], [0.0, 0.0, 0.0, 1.0]], [[-0.8177518844604492, -0.5754919648170471, 0.00952558871358633, -25882.962890625], [0.5755192041397095, -0.817788302898407, 0.00013712429790757596, 42442.9765625], [0.007711000274866819, 0.005594292655587196, 0.9999545812606812, -206.4585723876953], [0.0, 0.0, 0.0, 1.0]], [[-0.8170190453529358, -0.5765275359153748, 0.009783394634723663, -25883.13671875], [0.5765253901481628, -0.8170724511146545, -0.003328542457893491, 42442.9453125], [0.00991273857653141, 0.0029208918567746878, 0.9999465942382812, -206.45669555664062], [0.0, 0.0, 0.0, 1.0]], [[-0.8169112801551819, -0.5766851902008057, 0.009498706087470055, -25883.30078125], [0.5766817331314087, -0.8169618844985962, -0.0033755097538232803, 42442.91796875], [0.009706687182188034, 0.0027202379424124956, 0.9999491572380066, -206.4551239013672], [0.0, 0.0, 0.0, 1.0]], [[-0.8176383972167969, -0.5756615996360779, 0.0090068643912673, -25883.45703125], [0.5756683945655823, -0.8176806569099426, -0.0020837511401623487, 42442.8984375], [0.008564273826777935, 0.003481211606413126, 0.999957263469696, -206.45465087890625], [0.0, 0.0, 0.0, 1.0]], [[-0.8178718090057373, -0.5753428936004639, 0.008136031217873096, -25883.60546875], [0.575360119342804, -0.8179001212120056, -0.0002781395160127431, 42442.875], [0.006814486347138882, 0.0044536651112139225, 0.9999668598175049, -206.45468139648438], [0.0, 0.0, 0.0, 1.0]], [[-0.8173822164535522, -0.5760523676872253, 0.007064169738441706, -25883.744140625], [0.5760652422904968, -0.8174036741256714, -0.000261939741903916, 42442.8515625], [0.005925169214606285, 0.0038553173653781414, 0.9999750256538391, -206.4542694091797], [0.0, 0.0, 0.0, 1.0]], [[-0.8172771334648132, -0.5762137174606323, 0.0059883263893425465, -25883.87890625], [0.5762198567390442, -0.8172943592071533, -0.0008230279199779034, 42442.83203125], [0.005368465557694435, 0.0027779503725469112, 0.9999817609786987, -206.45294189453125], [0.0, 0.0, 0.0, 1.0]], [[-0.8177781105041504, -0.5755105018615723, 0.005152215715497732, -25884.005859375], [0.5755134224891663, -0.8177916407585144, -0.001049025566317141, 42442.80859375], [0.004817164037376642, 0.00210729893296957, 0.9999861717224121, -206.45321655273438], [0.0, 0.0, 0.0, 1.0]], [[-0.8179988265037537, -0.5751981735229492, 0.004990260116755962, -25884.12109375], [0.5751968026161194, -0.8180128931999207, -0.0018499388825148344, 42442.7890625], [0.005146178882569075, 0.0013571340823546052, 0.9999858140945435, -206.4525604248047], [0.0, 0.0, 0.0, 1.0]], [[-0.8176689147949219, -0.5756692290306091, 0.004735487047582865, -25884.23046875], [0.5756664276123047, -0.8176819682121277, -0.0020875923801213503, 42442.7734375], [0.005073885433375835, 0.0010191012406721711, 0.9999866485595703, -206.4519805908203], [0.0, 0.0, 0.0, 1.0]], [[-0.817669689655304, -0.575669527053833, 0.004561298992484808, -25884.333984375], [0.5756658911705017, -0.8176820278167725, -0.002212557941675186, 42442.7578125], [0.005003394093364477, 0.0008166423649527133, 0.9999871850013733, -206.45225524902344], [0.0, 0.0, 0.0, 1.0]], [[-0.817945122718811, -0.5752769708633423, 0.004706100095063448, -25884.42578125], [0.5752750039100647, -0.8179578185081482, -0.001895998721010983, 42442.7421875], [0.004940116312354803, 0.001156478887423873, 0.9999871850013733, -206.45199584960938], [0.0, 0.0, 0.0, 1.0]], [[-0.8179985284805298, -0.5751988887786865, 0.004971613641828299, -25884.515625], [0.5751949548721313, -0.8180130124092102, -0.002338441787287593, 42442.72265625], [0.005411914084106684, 0.0009468049975112081, 0.999984860420227, -206.45167541503906], [0.0, 0.0, 0.0, 1.0]], [[-0.8178331255912781, -0.5754349231719971, 0.004857489373534918, -25884.59765625], [0.5754281282424927, -0.8178473711013794, -0.002843545051291585, 42442.7109375], [0.0056089600548148155, 0.0004695906536653638, 0.9999841451644897, -206.45191955566406], [0.0, 0.0, 0.0, 1.0]], [[-0.8179996013641357, -0.5751997232437134, 0.004684191197156906, -25884.671875], [0.5751921534538269, -0.8180128931999207, -0.002967212349176407, 42442.69921875], [0.005538469180464745, 0.000267131719738245, 0.999984622001648, -206.45118713378906], [0.0, 0.0, 0.0, 1.0]], [[-0.818219006061554, -0.5748860239982605, 0.004874635487794876, -25884.7421875], [0.5748786926269531, -0.8182333707809448, -0.0029349380638450384, 42442.6875], [0.005675844382494688, 0.0004009019467048347, 0.9999838471412659, -206.45089721679688], [0.0, 0.0, 0.0, 1.0]], [[-0.8182171583175659, -0.5748854875564575, 0.005222771316766739, -25884.8046875], [0.5748798251152039, -0.8182334303855896, -0.0026846735272556543, 42442.67578125], [0.00581682613119483, 0.0008058199309743941, 0.9999827742576599, -206.45135498046875], [0.0, 0.0, 0.0, 1.0]], [[-0.8182702660560608, -0.5748059749603271, 0.005635296460241079, -25884.861328125], [0.5748034715652466, -0.8182883858680725, -0.002229321515187621, 42442.66796875], [0.005892725195735693, 0.0014150001807138324, 0.9999816417694092, -206.45166015625], [0.0, 0.0, 0.0, 1.0]], [[-0.8186538219451904, -0.5742520093917847, 0.006368847563862801, -25884.91796875], [0.5742595791816711, -0.8186728954315186, -0.0007474257727153599, 42442.6640625], [0.0056432136334478855, 0.0030454888474196196, 0.9999794363975525, -206.4513702392578], [0.0, 0.0, 0.0, 1.0]], [[-0.8190352320671082, -0.5736913084983826, 0.007716689724475145, -25884.974609375], [0.5737152099609375, -0.8190540671348572, 0.0011269778478890657, 42442.66015625], [0.005673848558217287, 0.005350216291844845, 0.9999695420265198, -206.4521026611328], [0.0, 0.0, 0.0, 1.0]], [[-0.8188693523406982, -0.5739161372184753, 0.008558456785976887, -25885.03515625], [0.5739530324935913, -0.8188843131065369, 0.002523863222450018, 42442.65234375], [0.005559900309890509, 0.006978865712881088, 0.99996018409729, -206.45167541503906], [0.0, 0.0, 0.0, 1.0]], [[-0.8187603950500488, -0.5740663409233093, 0.008894143626093864, -25885.09375], [0.5741108655929565, -0.8187702894210815, 0.003450623480603099, 42442.64453125], [0.005301373079419136, 0.007931457832455635, 0.9999545216560364, -206.45204162597656], [0.0, 0.0, 0.0, 1.0]], [[-0.818924069404602, -0.573828399181366, 0.009177044034004211, -25885.154296875], [0.5738754272460938, -0.8189351558685303, 0.003499253187328577, 42442.6328125], [0.0055074323900043964, 0.008132102899253368, 0.9999517798423767, -206.4516143798828], [0.0, 0.0, 0.0, 1.0]]], "3": [[[-0.5404567122459412, 0.841334879398346, -0.007881584577262402, -25833.8203125], [-0.841317892074585, -0.5405044555664062, -0.006257042288780212, 42363.70703125], [-0.009524297900497913, 0.0032492580357939005, 0.9999493360519409, -206.0481719970703], [0.0, 0.0, 0.0, 1.0]], [[-0.5374479293823242, 0.8432250618934631, -0.0110122449696064, -25834.38671875], [-0.8432589769363403, -0.5375018119812012, -0.0024797306396067142, 42364.06640625], [-0.008010071702301502, 0.007953448221087456, 0.9999362826347351, -206.05081176757812], [0.0, 0.0, 0.0, 1.0]], [[-0.5345330834388733, 0.8450633883476257, -0.011926455423235893, -25834.955078125], [-0.8451173305511475, -0.5345802307128906, -0.000921130646020174, 42364.421875], [-0.007154060062021017, 0.0095868781208992, 0.9999284148216248, -206.0570831298828], [0.0, 0.0, 0.0, 1.0]], [[-0.5318034887313843, 0.8468206524848938, -0.008935203775763512, -25835.51953125], [-0.8468478918075562, -0.5318337082862854, -0.001238334341906011, 42364.7734375], [-0.005800689570605755, 0.006908208131790161, 0.9999592900276184, -206.06207275390625], [0.0, 0.0, 0.0, 1.0]], [[-0.5289785265922546, 0.8486157655715942, -0.005742557812482119, -25836.08203125], [-0.8486186265945435, -0.5289984345436096, -0.0026737998705357313, 42365.12109375], [-0.005306830629706383, 0.00345885893329978, 0.9999799132347107, -206.0660400390625], [0.0, 0.0, 0.0, 1.0]], [[-0.5263769030570984, 0.8502375483512878, -0.004853844176977873, -25836.650390625], [-0.8502336144447327, -0.526394248008728, -0.003456314792856574, 42365.46484375], [-0.005493722390383482, 0.0023075772915035486, 0.9999822378158569, -206.06741333007812], [0.0, 0.0, 0.0, 1.0]], [[-0.5251227021217346, 0.85097736120224, -0.009148009121418, -25837.236328125], [-0.8509989380836487, -0.525161862373352, -0.0024083589669317007, 42365.8203125], [-0.0068536438047885895, 0.006520261988043785, 0.9999552369117737, -206.068603515625], [0.0, 0.0, 0.0, 1.0]], [[-0.5247527956962585, 0.851129412651062, -0.014601152390241623, -25837.8203125], [-0.8512220978736877, -0.5248056650161743, 0.0002452860353514552, 42366.171875], [-0.0074539948254823685, 0.012557539157569408, 0.999893307685852, -206.07443237304688], [0.0, 0.0, 0.0, 1.0]], [[-0.5239701271057129, 0.8515408039093018, -0.01826443523168564, -25838.40234375], [-0.8516891002655029, -0.5240469574928284, 0.0006691901362501085, 42366.52734375], [-0.009001580066978931, 0.015906257554888725, 0.9998329877853394, -206.08348083496094], [0.0, 0.0, 0.0, 1.0]], [[-0.5233792662620544, 0.8519323468208313, -0.016893960535526276, -25838.978515625], [-0.852017343044281, -0.5235016345977783, -0.003544427454471588, 42366.87109375], [-0.011863627471029758, 0.012538867071270943, 0.9998509883880615, -206.0928497314453], [0.0, 0.0, 0.0, 1.0]], [[-0.5237033367156982, 0.8518307209014893, -0.010921365581452847, -25839.55078125], [-0.8518407344818115, -0.5237755179405212, -0.005157368257641792, 42367.21484375], [-0.010113547556102276, 0.006602332927286625, 0.9999270439147949, -206.10006713867188], [0.0, 0.0, 0.0, 1.0]], [[-0.5235828757286072, 0.8519482612609863, -0.006708923261612654, -25840.12109375], [-0.8518578410148621, -0.5236252546310425, -0.012437266297638416, 42367.5625], [-0.0141088692471385, -0.0007968913414515555, 0.9999001622200012, -206.10186767578125], [0.0, 0.0, 0.0, 1.0]], [[-0.5237208008766174, 0.8518873453140259, 0.002103179693222046, -25840.6796875], [-0.8518742918968201, -0.5236945152282715, -0.007364577613770962, 42367.91796875], [-0.005172365345060825, -0.005648627877235413, 0.9999706149101257, -206.1000518798828], [0.0, 0.0, 0.0, 1.0]], [[-0.5241937041282654, 0.8515987992286682, -0.0006582366186194122, -25841.251953125], [-0.8515895009040833, -0.5241912603378296, -0.0043372237123548985, 42368.27734375], [-0.004038614220917225, -0.001712998142465949, 0.9999904036521912, -206.09652709960938], [0.0, 0.0, 0.0, 1.0]], [[-0.5253939032554626, 0.8507612943649292, -0.012899128720164299, -25841.837890625], [-0.8507930636405945, -0.5254815816879272, -0.0044936067424714565, 42368.625], [-0.010601241141557693, 0.00861357618123293, 0.9999066591262817, -206.09703063964844], [0.0, 0.0, 0.0, 1.0]], [[-0.5280404686927795, 0.8490111231803894, -0.01879948563873768, -25842.41796875], [-0.8490669131278992, -0.5282356142997742, -0.007242798339575529, 42368.96484375], [-0.016079772263765335, 0.01213753316551447, 0.999796986579895, -206.103759765625], [0.0, 0.0, 0.0, 1.0]], [[-0.5306532979011536, 0.8472760319709778, -0.023031849414110184, -25842.984375], [-0.8472773432731628, -0.5309990644454956, -0.01269196905195713, 42369.30859375], [-0.02298349142074585, 0.012779328040778637, 0.9996541738510132, -206.11056518554688], [0.0, 0.0, 0.0, 1.0]], [[-0.535474419593811, 0.8443286418914795, -0.019397864118218422, -25843.53515625], [-0.8443431258201599, -0.5357112884521484, -0.009910518303513527, 42369.66796875], [-0.018759388476610184, 0.011071624234318733, 0.9997627139091492, -206.1185302734375], [0.0, 0.0, 0.0, 1.0]], [[-0.5424345135688782, 0.8399628400802612, -0.015073328278958797, -25844.080078125], [-0.8399463891983032, -0.5425902605056763, -0.009270524606108665, 42370.03125], [-0.015965536236763, 0.007632134947925806, 0.999843418598175, -206.1251983642578], [0.0, 0.0, 0.0, 1.0]], [[-0.5500192046165466, 0.8349910378456116, -0.016397925093770027, -25844.626953125], [-0.8349196314811707, -0.5502233505249023, -0.012787493877112865, 42370.390625], [-0.01969996467232704, 0.006657581776380539, 0.9997837543487549, -206.12925720214844], [0.0, 0.0, 0.0, 1.0]], [[-0.5562267899513245, 0.8307151198387146, -0.02289707399904728, -25845.17578125], [-0.8306519389152527, -0.5565938949584961, -0.014854206703603268, 42370.75], [-0.025083983317017555, 0.01075719017535448, 0.999627411365509, -206.13290405273438], [0.0, 0.0, 0.0, 1.0]], [[-0.5625409483909607, 0.8262833952903748, -0.028344091027975082, -25845.71875], [-0.8262115716934204, -0.5630900859832764, -0.01743416115641594, 42371.11328125], [-0.0303658340126276, 0.013610786758363247, 0.9994462132453918, -206.14080810546875], [0.0, 0.0, 0.0, 1.0]], [[-0.5695686340332031, 0.8214997053146362, -0.027015112340450287, -25846.24609375], [-0.8214934468269348, -0.5700368881225586, -0.014370644465088844, 42371.48828125], [-0.02720509096980095, 0.014007669873535633, 0.9995317459106445, -206.14892578125], [0.0, 0.0, 0.0, 1.0]], [[-0.5773318409919739, 0.8161125779151917, -0.02545972540974617, -25846.765625], [-0.816132664680481, -0.5777320861816406, -0.012374392710626125, 42371.875], [-0.024807797744870186, 0.01363438181579113, 0.9995992183685303, -206.15806579589844], [0.0, 0.0, 0.0, 1.0]], [[-0.5864247679710388, 0.8095773458480835, -0.026277583092451096, -25847.287109375], [-0.8095998167991638, -0.586848258972168, -0.012543137185275555, 42372.25390625], [-0.02557559311389923, 0.013918721117079258, 0.9995759725570679, -206.1665802001953], [0.0, 0.0, 0.0, 1.0]], [[-0.5949983596801758, 0.8031738996505737, -0.029810789972543716, -25847.8046875], [-0.8031012415885925, -0.595589280128479, -0.017371468245983124, 42372.6328125], [-0.03170729801058769, 0.01360508892685175, 0.9994046092033386, -206.17401123046875], [0.0, 0.0, 0.0, 1.0]], [[-0.6027438640594482, 0.7972977757453918, -0.03187593072652817, -25848.3125], [-0.7972378730773926, -0.6034060120582581, -0.017692700028419495, 42373.01953125], [-0.03334047645330429, 0.01474852953106165, 0.9993352293968201, -206.18292236328125], [0.0, 0.0, 0.0, 1.0]], [[-0.6120314002037048, 0.7902393341064453, -0.030649948865175247, -25848.80859375], [-0.7902500629425049, -0.612608790397644, -0.014673540368676186, 42373.421875], [-0.03037203662097454, 0.015240456908941269, 0.9994224309921265, -206.1921844482422], [0.0, 0.0, 0.0, 1.0]], [[-0.6238234043121338, 0.7811453938484192, -0.025617079809308052, -25849.291015625], [-0.7811279892921448, -0.6242356300354004, -0.012995827943086624, 42373.83203125], [-0.026142725721001625, 0.01190311647951603, 0.9995872974395752, -206.20071411132812], [0.0, 0.0, 0.0, 1.0]], [[-0.6364672780036926, 0.7711035013198853, -0.01757603883743286, -25849.7578125], [-0.7709273099899292, -0.6367033123970032, -0.016732942312955856, 42374.23828125], [-0.024093549698591232, 0.0028998791240155697, 0.9997054934501648, -206.20541381835938], [0.0, 0.0, 0.0, 1.0]], [[-0.6485204696655273, 0.7610621452331543, -0.014344081282615662, -25850.220703125], [-0.7608225345611572, -0.6486772298812866, -0.019155535846948624, 42374.65234375], [-0.023883230984210968, -0.0015094580594450235, 0.9997135996818542, -206.20608520507812], [0.0, 0.0, 0.0, 1.0]], [[-0.6597632765769958, 0.7512755990028381, -0.017244858667254448, -25850.685546875], [-0.7510496973991394, -0.6599881649017334, -0.018439868465065956, 42375.0703125], [-0.025234824046492577, 0.0007857978926040232, 0.9996812343597412, -206.2045440673828], [0.0, 0.0, 0.0, 1.0]], [[-0.6707009077072144, 0.7413730621337891, -0.022939525544643402, -25851.15234375], [-0.741197943687439, -0.6710700392723083, -0.01704949326813221, 42375.5], [-0.028034063056111336, 0.005567618180066347, 0.9995914101600647, -206.205810546875], [0.0, 0.0, 0.0, 1.0]], [[-0.6820334196090698, 0.7309145927429199, -0.024377506226301193, -25851.6015625], [-0.7308192849159241, -0.6824212670326233, -0.014297284185886383, 42375.9375], [-0.02708582393825054, 0.008064324967563152, 0.9996005892753601, -206.2092742919922], [0.0, 0.0, 0.0, 1.0]], [[-0.6935935020446777, 0.719967782497406, -0.02396918460726738, -25852.037109375], [-0.7198466062545776, -0.6939731240272522, -0.014909992925822735, 42376.375], [-0.027368683367967606, 0.006912661250680685, 0.9996014833450317, -206.21365356445312], [0.0, 0.0, 0.0, 1.0]], [[-0.7054597735404968, 0.7084381580352783, -0.021022310480475426, -25852.4609375], [-0.7082863450050354, -0.7057607769966125, -0.015238436870276928, 42376.81640625], [-0.025632211938500404, 0.004139711614698172, 0.9996628165245056, -206.21641540527344], [0.0, 0.0, 0.0, 1.0]], [[-0.7173911929130554, 0.6963649392127991, -0.020635385066270828, -25852.875], [-0.6961298584938049, -0.717686653137207, -0.018139656633138657, 42377.26171875], [-0.027441561222076416, 0.001351678860373795, 0.9996224641799927, -206.21766662597656], [0.0, 0.0, 0.0, 1.0]], [[-0.7300952672958374, 0.6829656362533569, -0.0227762833237648, -25853.287109375], [-0.6826935410499573, -0.7304490804672241, -0.01932877115905285, 42377.7109375], [-0.029837802052497864, 0.0014373790472745895, 0.9995536804199219, -206.2181396484375], [0.0, 0.0, 0.0, 1.0]], [[-0.7428370118141174, 0.668973445892334, -0.025839725509285927, -25853.689453125], [-0.6687229871749878, -0.7432783246040344, -0.018626760691404343, 42378.171875], [-0.031666915863752365, 0.0034429715014994144, 0.9994925260543823, -206.21827697753906], [0.0, 0.0, 0.0, 1.0]], [[-0.7552351355552673, 0.654811680316925, -0.02900848351418972, -25854.076171875], [-0.6545630097389221, -0.7557763457298279, -0.018692737445235252, 42378.640625], [-0.034164149314165115, 0.004870468284934759, 0.9994043707847595, -206.2197265625], [0.0, 0.0, 0.0, 1.0]], [[-0.7681333422660828, 0.6395326852798462, -0.031131889671087265, -25854.455078125], [-0.639309823513031, -0.7687397003173828, -0.01795445941388607, 42379.11328125], [-0.03541478142142296, 0.006111504044383764, 0.9993540048599243, -206.2227020263672], [0.0, 0.0, 0.0, 1.0]], [[-0.7815861105918884, 0.6230235695838928, -0.031061578541994095, -25854.81640625], [-0.6228659749031067, -0.7821710705757141, -0.01570049114525318, 42379.59375], [-0.034077245742082596, 0.007075912784785032, 0.9993941187858582, -206.22589111328125], [0.0, 0.0, 0.0, 1.0]], [[-0.7953184247016907, 0.6056382656097412, -0.02590261772274971, -25855.15234375], [-0.6054707169532776, -0.795730471611023, -0.014776458032429218, 42380.078125], [-0.029560688883066177, 0.003931286744773388, 0.999555230140686, -206.2286834716797], [0.0, 0.0, 0.0, 1.0]], [[-0.8098854422569275, 0.5863193869590759, -0.0177546888589859, -25855.5], [-0.5860863327980042, -0.8100734949111938, -0.016845066100358963, 42380.6171875], [-0.024259192869067192, -0.0032367927487939596, 0.9997004270553589, -206.22967529296875], [0.0, 0.0, 0.0, 1.0]], [[-0.8230665326118469, 0.5677444338798523, -0.015090697444975376, -25855.8125], [-0.5674235224723816, -0.823158860206604, -0.020976031199097633, 42381.109375], [-0.024331066757440567, -0.008701852522790432, 0.9996660947799683, -206.22593688964844], [0.0, 0.0, 0.0, 1.0]], [[-0.8362841010093689, 0.5480783581733704, -0.015462009236216545, -25856.119140625], [-0.5476820468902588, -0.836349368095398, -0.02374950982630253, 42381.6015625], [-0.025948233902454376, -0.01139307301491499, 0.9995983839035034, -206.21937561035156], [0.0, 0.0, 0.0, 1.0]], [[-0.848422646522522, 0.529105007648468, -0.015064403414726257, -25856.408203125], [-0.5287690162658691, -0.8484946489334106, -0.021452195942401886, 42382.10546875], [-0.02413252927362919, -0.010234937071800232, 0.9996563196182251, -206.21176147460938], [0.0, 0.0, 0.0, 1.0]], [[-0.8595262765884399, 0.5105404853820801, -0.023727359250187874, -25856.6953125], [-0.5103339552879333, -0.8598536252975464, -0.014524905942380428, 42382.625], [-0.027817608788609505, -0.0003756610385607928, 0.9996129274368286, -206.2062530517578], [0.0, 0.0, 0.0, 1.0]], [[-0.8706456422805786, 0.49118146300315857, -0.026776567101478577, -25856.96875], [-0.4912806451320648, -0.8709954619407654, -0.0031933493446558714, 42383.14453125], [-0.024890782311558723, 0.010374532081186771, 0.9996362924575806, -206.20831298828125], [0.0, 0.0, 0.0, 1.0]], [[-0.8814499974250793, 0.47177445888519287, -0.02178899198770523, -25857.21484375], [-0.47189319133758545, -0.8816556334495544, 0.000350549555150792, 42383.671875], [-0.01904500648379326, 0.010591068305075169, 0.9997624754905701, -206.21487426757812], [0.0, 0.0, 0.0, 1.0]], [[-0.8916226029396057, 0.4523235261440277, -0.02031191624701023, -25857.44921875], [-0.45233821868896484, -0.8918368816375732, -0.004129491746425629, 42384.1796875], [-0.019982783123850822, 0.005505908280611038, 0.9997851252555847, -206.21961975097656], [0.0, 0.0, 0.0, 1.0]], [[-0.901531457901001, 0.43238383531570435, -0.01689150184392929, -25857.67578125], [-0.43231287598609924, -0.9016895890235901, -0.007835332304239273, 42384.6953125], [-0.018618760630488396, 0.00023861470981501043, 0.9998266100883484, -206.2209014892578], [0.0, 0.0, 0.0, 1.0]], [[-0.910634458065033, 0.4127412736415863, -0.01973715052008629, -25857.896484375], [-0.412550151348114, -0.9108403921127319, -0.013123739510774612, 42385.21484375], [-0.02339410036802292, -0.003808366833254695, 0.9997190833091736, -206.21974182128906], [0.0, 0.0, 0.0, 1.0]], [[-0.9195050001144409, 0.3924609124660492, -0.022025249898433685, -25858.111328125], [-0.39215511083602905, -0.9197421073913574, -0.01699228584766388, 42385.74609375], [-0.02692635916173458, -0.006987175438553095, 0.9996129870414734, -206.21571350097656], [0.0, 0.0, 0.0, 1.0]], [[-0.9277741312980652, 0.37235891819000244, -0.024165429174900055, -25858.3125], [-0.37211930751800537, -0.9280804395675659, -0.013919937424361706, 42386.2890625], [-0.027610676363110542, -0.0039221360348165035, 0.9996110200881958, -206.21217346191406], [0.0, 0.0, 0.0, 1.0]], [[-0.9353086352348328, 0.35270366072654724, -0.02824799157679081, -25858.505859375], [-0.3525888919830322, -0.93573397397995, -0.009111147373914719, 42386.8515625], [-0.029646141454577446, 0.0014381929067894816, 0.9995594024658203, -206.2091064453125], [0.0, 0.0, 0.0, 1.0]], [[-0.9427837133407593, 0.3322513997554779, -0.02771071158349514, -25858.6875], [-0.3323034644126892, -0.9431682229042053, -0.0028404882177710533, 42387.42578125], [-0.027079619467258453, 0.006530399434268475, 0.9996119141578674, -206.2110595703125], [0.0, 0.0, 0.0, 1.0]], [[-0.9497439861297607, 0.3118264079093933, -0.027401147410273552, -25858.85546875], [-0.3119356632232666, -0.950103223323822, -0.0003024629259016365, 42388.0078125], [-0.02612823247909546, 0.008260131813585758, 0.9996244311332703, -206.21542358398438], [0.0, 0.0, 0.0, 1.0]], [[-0.9560615420341492, 0.29190388321876526, -0.027174564078450203, -25859.015625], [-0.2919996976852417, -0.9564183354377747, -0.0004620955733116716, 42388.58984375], [-0.026125136762857437, 0.007493172772228718, 0.9996305704116821, -206.2193145751953], [0.0, 0.0, 0.0, 1.0]], [[-0.9614972472190857, 0.2735125720500946, -0.026718078181147575, -25859.169921875], [-0.2736152112483978, -0.9618392586708069, 0.00019213232735637575, 42389.18359375], [-0.025645943358540535, 0.007495206780731678, 0.9996429681777954, -206.22325134277344], [0.0, 0.0, 0.0, 1.0]], [[-0.9658998847007751, 0.2577052712440491, -0.025010479614138603, -25859.314453125], [-0.25785210728645325, -0.9661803245544434, 0.002780142007395625, 42389.79296875], [-0.023448176681995392, 0.009134344756603241, 0.9996832609176636, -206.22833251953125], [0.0, 0.0, 0.0, 1.0]], [[-0.9694044589996338, 0.24433428049087524, -0.023575065657496452, -25859.451171875], [-0.24455514550209045, -0.9696105122566223, 0.006946588400751352, 42390.41015625], [-0.021161342039704323, 0.012499457225203514, 0.9996979236602783, -206.23390197753906], [0.0, 0.0, 0.0, 1.0]], [[-0.9721361398696899, 0.2331629991531372, -0.024215707555413246, -25859.59375], [-0.23338928818702698, -0.9723585247993469, 0.006942086387425661, 42391.03515625], [-0.021927712485194206, 0.012400339357554913, 0.999682605266571, -206.2418670654297], [0.0, 0.0, 0.0, 1.0]], [[-0.9741659164428711, 0.22411926090717316, -0.027771340683102608, -25859.73828125], [-0.22423802316188812, -0.97453373670578, 0.0011980137787759304, 42391.65234375], [-0.026795608922839165, 0.007394454441964626, 0.9996135830879211, -206.24827575683594], [0.0, 0.0, 0.0, 1.0]], [[-0.9762356877326965, 0.21486447751522064, -0.028234068304300308, -25859.87890625], [-0.2148171365261078, -0.9766428470611572, -0.004735016729682684, 42392.28125], [-0.028591984882950783, 0.0014426691923290491, 0.9995900988578796, -206.2509765625], [0.0, 0.0, 0.0, 1.0]], [[-0.9783585667610168, 0.20473966002464294, -0.029936619102954865, -25860.01171875], [-0.20461368560791016, -0.9788159728050232, -0.007245764601975679, 42392.92578125], [-0.03078593872487545, -0.0009635135065764189, 0.9995255470275879, -206.25083923339844], [0.0, 0.0, 0.0, 1.0]], [[-0.9805221557617188, 0.19402876496315002, -0.030480559915304184, -25860.134765625], [-0.1938583254814148, -0.98099285364151, -0.0084789227694273, 42393.58203125], [-0.031546369194984436, -0.0024048613850027323, 0.9994993805885315, -206.2489013671875], [0.0, 0.0, 0.0, 1.0]], [[-0.9821469783782959, 0.1841408759355545, -0.038463544100522995, -25860.26953125], [-0.18402110040187836, -0.9828997254371643, -0.006661598104983568, 42394.25390625], [-0.039032477885484695, 0.0005354350432753563, 0.9992377758026123, -206.2462921142578], [0.0, 0.0, 0.0, 1.0]], [[-0.9836565852165222, 0.17546173930168152, -0.0404088981449604, -25860.3984375], [-0.17550982534885406, -0.984474778175354, -0.002382152946665883, 42394.93359375], [-0.04019951820373535, 0.004748938139528036, 0.9991803765296936, -206.24752807617188], [0.0, 0.0, 0.0, 1.0]], [[-0.9847887754440308, 0.1694187968969345, -0.03858035057783127, -25860.5078125], [-0.16946470737457275, -0.9855339527130127, -0.002100903308019042, 42395.62109375], [-0.03837818279862404, 0.004469062201678753, 0.9992532730102539, -206.25082397460938], [0.0, 0.0, 0.0, 1.0]], [[-0.9857231974601746, 0.16507451236248016, -0.03317050635814667, -25860.607421875], [-0.16499362885951996, -0.9862810373306274, -0.005179973319172859, 42396.3125], [-0.03357052057981491, 0.0003669023863039911, 0.9994362592697144, -206.253173828125], [0.0, 0.0, 0.0, 1.0]], [[-0.9864839315414429, 0.1613868772983551, -0.028349872678518295, -25860.71875], [-0.16122739017009735, -0.9868860840797424, -0.007838799618184566, 42397.0078125], [-0.02924317494034767, -0.0031620743684470654, 0.9995673298835754, -206.2524871826172], [0.0, 0.0, 0.0, 1.0]], [[-0.9866600036621094, 0.1595858633518219, -0.03216230869293213, -25860.83984375], [-0.1593405157327652, -0.9871723651885986, -0.010068475268781185, 42397.71484375], [-0.033356525003910065, -0.0048094033263623714, 0.9994319677352905, -206.24876403808594], [0.0, 0.0, 0.0, 1.0]], [[-0.9865854382514954, 0.15949216485023499, -0.03480704128742218, -25860.96484375], [-0.15936468541622162, -0.9871989488601685, -0.006424503866583109, 42398.43359375], [-0.03538613021373749, -0.0007913089939393103, 0.9993733763694763, -206.2458038330078], [0.0, 0.0, 0.0, 1.0]], [[-0.9866096377372742, 0.15911228954792023, -0.03584412857890129, -25861.087890625], [-0.15914630889892578, -0.9872531294822693, -0.0019208197481930256, 42399.1640625], [-0.03569285571575165, 0.0038093612529337406, 0.9993555545806885, -206.24644470214844], [0.0, 0.0, 0.0, 1.0]], [[-0.9866843819618225, 0.15863759815692902, -0.035889074206352234, -25861.2109375], [-0.1587444245815277, -0.9873196482658386, 0.00012841032003052533, 42399.8984375], [-0.03541361913084984, 0.005823891144245863, 0.9993557333946228, -206.24929809570312], [0.0, 0.0, 0.0, 1.0]], [[-0.9867287874221802, 0.15901923179626465, -0.032851289957761765, -25861.328125], [-0.15904612839221954, -0.9872695207595825, -0.0018104028422385454, 42400.63671875], [-0.03272096440196037, 0.0034384937025606632, 0.9994586110115051, -206.25344848632812], [0.0, 0.0, 0.0, 1.0]], [[-0.9866440296173096, 0.1596824824810028, -0.03217330202460289, -25861.451171875], [-0.15966971218585968, -0.9871659874916077, -0.0029827530961483717, 42401.37890625], [-0.03223668038845062, 0.0021941859740763903, 0.9994778633117676, -206.2545928955078], [0.0, 0.0, 0.0, 1.0]], [[-0.9865548014640808, 0.16005955636501312, -0.03302248939871788, -25861.580078125], [-0.16012059152126312, -0.9870971441268921, -0.0008053207420744002, 42402.13671875], [-0.03272530436515808, 0.004493087995797396, 0.9994542598724365, -206.2569580078125], [0.0, 0.0, 0.0, 1.0]], [[-0.9864305853843689, 0.16024312376976013, -0.03573186323046684, -25861.71484375], [-0.1604578047990799, -0.9870374798774719, 0.003204619511961937, 42402.90625], [-0.0347551666200161, 0.008894591592252254, 0.9993562698364258, -206.2606201171875], [0.0, 0.0, 0.0, 1.0]], [[-0.9863449931144714, 0.1605234295129776, -0.03682021051645279, -25861.84765625], [-0.1608140468597412, -0.9869717955589294, 0.005052348133176565, 42403.67578125], [-0.035529494285583496, 0.010904566384851933, 0.9993091225624084, -206.26866149902344], [0.0, 0.0, 0.0, 1.0]], [[-0.9862419962882996, 0.16099880635738373, -0.03749847039580345, -25861.978515625], [-0.1612323522567749, -0.9869111180305481, 0.0032697555143386126, 42404.44921875], [-0.03648122772574425, 0.009270736947655678, 0.9992913603782654, -206.27645874023438], [0.0, 0.0, 0.0, 1.0]], [[-0.9861992001533508, 0.1612873524427414, -0.03738458827137947, -25862.111328125], [-0.16136690974235535, -0.9868940711021423, -0.0008991777431219816, 42405.2265625], [-0.037039656192064285, 0.005145867355167866, 0.9993005394935608, -206.28257751464844], [0.0, 0.0, 0.0, 1.0]], [[-0.9861544370651245, 0.16157294809818268, -0.037331029772758484, -25862.24609375], [-0.16148900985717773, -0.9868603944778442, -0.005272799637168646, 42406.00390625], [-0.03769245743751526, 0.0008287565433420241, 0.9992890357971191, -206.28448486328125], [0.0, 0.0, 0.0, 1.0]], [[-0.986129641532898, 0.16138315200805664, -0.03878027945756912, -25862.3828125], [-0.1612759679555893, -0.9868917465209961, -0.005897378083318472, 42406.79296875], [-0.039223670959472656, 0.0004387473745737225, 0.9992303252220154, -206.2841796875], [0.0, 0.0, 0.0, 1.0]], [[-0.9861969351768494, 0.16072021424770355, -0.03980838879942894, -25862.517578125], [-0.1606501340866089, -0.9869988560676575, -0.0049741994589567184, 42407.57421875], [-0.04009028896689415, 0.0014896825887262821, 0.9991949200630188, -206.2848358154297], [0.0, 0.0, 0.0, 1.0]], [[-0.9864142537117004, 0.1593002825975418, -0.04012981057167053, -25862.65234375], [-0.15926110744476318, -0.9872275590896606, -0.004192222375422716, 42408.36328125], [-0.040285080671310425, 0.0022558497730642557, 0.9991856813430786, -206.28598022460938], [0.0, 0.0, 0.0, 1.0]], [[-0.9866287112236023, 0.15825992822647095, -0.03895552083849907, -25862.779296875], [-0.15816949307918549, -0.9873971939086914, -0.00541211012750864, 42409.140625], [-0.039321091026067734, 0.0008218316361308098, 0.9992262721061707, -206.2867431640625], [0.0, 0.0, 0.0, 1.0]], [[-0.9867622256278992, 0.15778689086437225, -0.03746675327420235, -25862.904296875], [-0.1576201617717743, -0.9874722957611084, -0.007381588686257601, 42409.91796875], [-0.03816209360957146, -0.001378356944769621, 0.9992705583572388, -206.28640747070312], [0.0, 0.0, 0.0, 1.0]], [[-0.9870380163192749, 0.15760073065757751, -0.030299032106995583, -25863.021484375], [-0.1574694961309433, -0.9875012040138245, -0.0066850073635578156, 42410.6953125], [-0.0309738926589489, -0.0018271830631420016, 0.9995185136795044, -206.28562927246094], [0.0, 0.0, 0.0, 1.0]], [[-0.987136721611023, 0.15750758349895477, -0.02742992714047432, -25863.138671875], [-0.15740641951560974, -0.9875167608261108, -0.005823633633553982, 42411.47265625], [-0.028004778549075127, -0.0014310767874121666, 0.999606728553772, -206.2837677001953], [0.0, 0.0, 0.0, 1.0]], [[-0.9872903227806091, 0.15694092214107513, -0.025047337636351585, -25863.26171875], [-0.15689583122730255, -0.9876079559326172, -0.0037675537168979645, 42412.25], [-0.02532823383808136, 0.00021015360835008323, 0.9996791481971741, -206.28274536132812], [0.0, 0.0, 0.0, 1.0]], [[-0.9874152541160583, 0.15627820789813995, -0.02425299398601055, -25863.38671875], [-0.15628843009471893, -0.9877103567123413, -0.0014849124709144235, 42413.01953125], [-0.0241869930177927, 0.0023242367897182703, 0.9997047781944275, -206.2836151123047], [0.0, 0.0, 0.0, 1.0]], [[-0.987499475479126, 0.1559947282075882, -0.022594019770622253, -25863.505859375], [-0.15602707862854004, -0.9877527356147766, -0.0003346455632708967, 42413.79296875], [-0.02236950770020485, 0.0031948164105415344, 0.9997446537017822, -206.28561401367188], [0.0, 0.0, 0.0, 1.0]], [[-0.9876397848129272, 0.15533235669136047, -0.020967472344636917, -25863.626953125], [-0.1553753912448883, -0.9878554344177246, 0.00042896941886283457, 42414.55859375], [-0.020646197721362114, 0.003681496251374483, 0.9997800588607788, -206.2880859375], [0.0, 0.0, 0.0, 1.0]], [[-0.9878649115562439, 0.1541965901851654, -0.01861247420310974, -25863.74609375], [-0.15425091981887817, -0.9880306124687195, 0.0015105083584785461, 42415.32421875], [-0.018156778067350388, 0.004363169427961111, 0.9998255968093872, -206.29075622558594], [0.0, 0.0, 0.0, 1.0]], [[-0.9879882335662842, 0.15343688428401947, -0.01834155060350895, -25863.865234375], [-0.15353557467460632, -0.9881346225738525, 0.004091739188879728, 42416.09375], [-0.017496097832918167, 0.006858670152723789, 0.9998233914375305, -206.2942657470703], [0.0, 0.0, 0.0, 1.0]], [[-0.9880595803260803, 0.1530563235282898, -0.017664646729826927, -25863.986328125], [-0.15318144857883453, -0.9881801605224609, 0.005953587591648102, 42416.85546875], [-0.01654461771249771, 0.00858839601278305, 0.9998262524604797, -206.29966735839844], [0.0, 0.0, 0.0, 1.0]], [[-0.9881704449653625, 0.15248873829841614, -0.016320737078785896, -25864.103515625], [-0.15260432660579681, -0.9882686734199524, 0.006079515907913446, 42417.609375], [-0.015202214941382408, 0.008498214185237885, 0.9998483061790466, -206.30638122558594], [0.0, 0.0, 0.0, 1.0]], [[-0.9882584810256958, 0.1519204080104828, -0.01628730818629265, -25864.22265625], [-0.15203292667865753, -0.9883577823638916, 0.005899536423385143, 42418.3671875], [-0.015201427042484283, 0.00830647349357605, 0.9998499155044556, -206.3120880126953], [0.0, 0.0, 0.0, 1.0]], [[-0.9883243441581726, 0.1514488011598587, -0.016683949157595634, -25864.341796875], [-0.15152692794799805, -0.9884468913078308, 0.0035153087228536606, 42419.11328125], [-0.01595880836248398, 0.006002333480864763, 0.9998546242713928, -206.31785583496094], [0.0, 0.0, 0.0, 1.0]], [[-0.9883610010147095, 0.15116548538208008, -0.017077747732400894, -25864.4609375], [-0.15121620893478394, -0.9884992241859436, 0.001711284276098013, 42419.859375], [-0.01662265509366989, 0.0042737992480397224, 0.99985271692276, -206.32144165039062], [0.0, 0.0, 0.0, 1.0]], [[-0.9883861541748047, 0.15087926387786865, -0.018117282539606094, -25864.578125], [-0.15096402168273926, -0.9885334372520447, 0.003396497806534171, 42420.609375], [-0.0173970777541399, 0.006092109251767397, 0.9998300671577454, -206.32415771484375], [0.0, 0.0, 0.0, 1.0]], [[-0.988437294960022, 0.15049731731414795, -0.018501512706279755, -25864.6953125], [-0.15062780678272247, -0.9885731339454651, 0.005865719635039568, 42421.35546875], [-0.01740732230246067, 0.008584738709032536, 0.999811589717865, -206.32876586914062], [0.0, 0.0, 0.0, 1.0]], [[-0.9885229468345642, 0.14992742240428925, -0.018551159650087357, -25864.8125], [-0.15007515251636505, -0.9886509776115417, 0.006838249042630196, 42422.09375], [-0.01731538027524948, 0.009543835185468197, 0.9998044967651367, -206.3361358642578], [0.0, 0.0, 0.0, 1.0]], [[-0.9885486364364624, 0.1495496779680252, -0.020162850618362427, -25864.927734375], [-0.14967311918735504, -0.9887241721153259, 0.004750754684209824, 42422.8203125], [-0.019225021824240685, 0.007714189123362303, 0.9997854232788086, -206.3417205810547], [0.0, 0.0, 0.0, 1.0]], [[-0.9885879755020142, 0.1490766853094101, -0.02168111503124237, -25865.04296875], [-0.1491686999797821, -0.9888081550598145, 0.0026813787408173084, 42423.54296875], [-0.021038729697465897, 0.0058849225752055645, 0.9997613430023193, -206.3463134765625], [0.0, 0.0, 0.0, 1.0]], [[-0.988615870475769, 0.14888325333595276, -0.021735217422246933, -25865.15625], [-0.14904820919036865, -0.9888107180595398, 0.006168910302221775, 42424.265625], [-0.02057356759905815, 0.009338278323411942, 0.999744713306427, -206.3512420654297], [0.0, 0.0, 0.0, 1.0]], [[-0.9885808229446411, 0.1490599513053894, -0.022116396576166153, -25865.265625], [-0.1493680328130722, -0.988696277141571, 0.012992416508495808, 42424.984375], [-0.01992974989116192, 0.016147535294294357, 0.9996709227561951, -206.3590850830078], [0.0, 0.0, 0.0, 1.0]], [[-0.9886137247085571, 0.1486704796552658, -0.023236557841300964, -25865.375], [-0.1490698754787445, -0.9886887669563293, 0.016511816531419754, 42425.69140625], [-0.02051890268921852, 0.019787680357694626, 0.9995935559272766, -206.37149047851562], [0.0, 0.0, 0.0, 1.0]], [[-0.9888126850128174, 0.14734293520450592, -0.023228898644447327, -25865.484375], [-0.14774467051029205, -0.9888856410980225, 0.01663772016763687, 42426.37890625], [-0.02051927149295807, 0.01988353580236435, 0.9995916485786438, -206.38563537597656], [0.0, 0.0, 0.0, 1.0]], [[-0.9889319539070129, 0.1461193710565567, -0.02574438787996769, -25865.58984375], [-0.14647023379802704, -0.9891385436058044, 0.012305370531976223, 42427.05078125], [-0.023666711524128914, 0.01593996025621891, 0.9995927810668945, -206.3972625732422], [0.0, 0.0, 0.0, 1.0]], [[-0.9892296195030212, 0.14452269673347473, -0.02319508232176304, -25865.6875], [-0.14465020596981049, -0.9894751310348511, 0.003908343613147736, 42427.70703125], [-0.022386109456419945, 0.007221423089504242, 0.9997232556343079, -206.40541076660156], [0.0, 0.0, 0.0, 1.0]], [[-0.9893380999565125, 0.1437668651342392, -0.023265203461050987, -25865.783203125], [-0.14377306401729584, -0.9896097779273987, -0.001415185397490859, 42428.35546875], [-0.02322692610323429, 0.0019448125967755914, 0.9997283220291138, -206.408935546875], [0.0, 0.0, 0.0, 1.0]], [[-0.9893678426742554, 0.1436724215745926, -0.022573430091142654, -25865.876953125], [-0.1436752825975418, -0.9896237850189209, -0.0015033138915896416, 42429.00390625], [-0.022555187344551086, 0.0017559134867042303, 0.9997440576553345, -206.40957641601562], [0.0, 0.0, 0.0, 1.0]], [[-0.9894242286682129, 0.14329245686531067, -0.022515790536999702, -25865.9765625], [-0.14333024621009827, -0.9896749258041382, 6.458465213654563e-05, 42429.65234375], [-0.022274058312177658, 0.0032910953741520643, 0.9997464418411255, -206.41073608398438], [0.0, 0.0, 0.0, 1.0]], [[-0.9894774556159973, 0.1428159773349762, -0.023194730281829834, -25866.072265625], [-0.14291560649871826, -0.9897312521934509, 0.002687680535018444, 42430.29296875], [-0.022572705522179604, 0.005974288564175367, 0.9997273087501526, -206.4126434326172], [0.0, 0.0, 0.0, 1.0]], [[-0.9894091486930847, 0.14329001307487488, -0.023186037316918373, -25866.16796875], [-0.1434025913476944, -0.9896590709686279, 0.0032595049124211073, 42430.921875], [-0.022479215636849403, 0.006549921352416277, 0.9997258186340332, -206.4164581298828], [0.0, 0.0, 0.0, 1.0]], [[-0.9892528057098389, 0.14414367079734802, -0.024525318294763565, -25866.26171875], [-0.14424626529216766, -0.9895387887954712, 0.0024573185946792364, 42431.5390625], [-0.02391454391181469, 0.005968595389276743, 0.9996961355209351, -206.41981506347656], [0.0, 0.0, 0.0, 1.0]], [[-0.9892119765281677, 0.14461827278137207, -0.02335142344236374, -25866.35546875], [-0.14472663402557373, -0.9894670844078064, 0.0030102685559540987, 42432.1484375], [-0.02267012558877468, 0.006357365753501654, 0.9997227191925049, -206.4232177734375], [0.0, 0.0, 0.0, 1.0]], [[-0.9888578057289124, 0.14689350128173828, -0.024137362837791443, -25866.447265625], [-0.14702165126800537, -0.9891267418861389, 0.0036130284424871206, 42432.74609375], [-0.02334417775273323, 0.007121485657989979, 0.9997020959854126, -206.42739868164062], [0.0, 0.0, 0.0, 1.0]], [[-0.9882333874702454, 0.15192249417304993, -0.017732052132487297, -25866.537109375], [-0.15199418365955353, -0.988377571105957, 0.0027595306746661663, 42433.33203125], [-0.01710672676563263, 0.005422229412943125, 0.9998389482498169, -206.43115234375], [0.0, 0.0, 0.0, 1.0]], [[-0.9868314862251282, 0.16130071878433228, -0.012067831121385098, -25866.626953125], [-0.16133923828601837, -0.9868963956832886, 0.002282496774569154, 42433.90625], [-0.01154153048992157, 0.004199454560875893, 0.9999245405197144, -206.43475341796875], [0.0, 0.0, 0.0, 1.0]], [[-0.9845343232154846, 0.17500710487365723, -0.008042433299124241, -25866.732421875], [-0.17502661049365997, -0.9845620393753052, 0.001781414495781064, 42434.46875], [-0.007606512866914272, 0.003161503467708826, 0.999966025352478, -206.4368438720703], [0.0, 0.0, 0.0, 1.0]], [[-0.9812451601028442, 0.1927267163991928, -0.003804541891440749, -25866.85546875], [-0.19273115694522858, -0.9812512397766113, 0.0008310406701639295, 42435.01171875], [-0.0035730476956814528, 0.0015487084165215492, 0.9999924302101135, -206.43814086914062], [0.0, 0.0, 0.0, 1.0]], [[-0.9768567681312561, 0.21384675800800323, -0.004525479860603809, -25866.994140625], [-0.21383842825889587, -0.9768664240837097, -0.0022572462912648916, 42435.54296875], [-0.004903492983430624, -0.001237285090610385, 0.9999871850013733, -206.43728637695312], [0.0, 0.0, 0.0, 1.0]], [[-0.9713470339775085, 0.23766225576400757, -0.0012386039597913623, -25867.146484375], [-0.23765431344509125, -0.9713388085365295, -0.004625269677489996, 42436.0625], [-0.0023023549001663923, -0.00419838260859251, 0.9999884963035583, -206.4358367919922], [0.0, 0.0, 0.0, 1.0]], [[-0.964290201663971, 0.2648466229438782, 0.0008507332531735301, -25867.3125], [-0.26484552025794983, -0.9642625451087952, -0.007396068423986435, 42436.56640625], [-0.001138491672463715, -0.007357269991189241, 0.9999722838401794, -206.4322509765625], [0.0, 0.0, 0.0, 1.0]], [[-0.9557662606239319, 0.29411935806274414, 0.0021568750962615013, -25867.49609375], [-0.2941272258758545, -0.9557453393936157, -0.006325922440737486, 42437.0703125], [0.0002008482115343213, -0.006680499762296677, 0.9999776482582092, -206.42808532714844], [0.0, 0.0, 0.0, 1.0]], [[-0.9453373551368713, 0.3260255455970764, 0.006681034807115793, -25867.697265625], [-0.3260481655597687, -0.9453495740890503, -0.0026091989129781723, 42437.5625], [0.005465247668325901, -0.004644912667572498, 0.9999743103981018, -206.42356872558594], [0.0, 0.0, 0.0, 1.0]], [[-0.9327679872512817, 0.360424667596817, 0.006166630424559116, -25867.916015625], [-0.3604316711425781, -0.9327856302261353, -2.598538776510395e-05, 42438.04296875], [0.005742780398577452, -0.0022468881215900183, 0.9999809861183167, -206.4209747314453], [0.0, 0.0, 0.0, 1.0]], [[-0.9184994697570801, 0.395394504070282, 0.004687360487878323, -25868.16015625], [-0.3953956365585327, -0.9185106754302979, 0.0007144407136365771, 42438.50390625], [0.0045878770761191845, -0.0011971485801041126, 0.9999887347221375, -206.4185333251953], [0.0, 0.0, 0.0, 1.0]], [[-0.902469277381897, 0.4307510554790497, 0.001661132089793682, -25868.427734375], [-0.43074917793273926, -0.9024706482887268, 0.0013668725732713938, 42438.94921875], [0.0020879062358289957, 0.0005180284497328103, 0.9999976754188538, -206.4161834716797], [0.0, 0.0, 0.0, 1.0]], [[-0.884435772895813, 0.46664193272590637, -0.0043110898695886135, -25868.7109375], [-0.46665090322494507, -0.8844406008720398, 0.0013210593024268746, 42439.375], [-0.0031964408699423075, 0.0031801664736121893, 0.9999898672103882, -206.4154510498047], [0.0, 0.0, 0.0, 1.0]], [[-0.8647923469543457, 0.5021185874938965, -0.0033514194656163454, -25869.0078125], [-0.5021219849586487, -0.8647968173027039, 0.00018125797214452177, 42439.7890625], [-0.0028072823770344257, 0.0018395715160295367, 0.9999943375587463, -206.41549682617188], [0.0, 0.0, 0.0, 1.0]], [[-0.8430032134056091, 0.5379029512405396, 0.0024500444997102022, -25869.3046875], [-0.5379032492637634, -0.8430063724517822, 0.0005870807217434049, 42440.1875], [0.0023811981081962585, -0.0008229768136516213, 0.9999967813491821, -206.4152374267578], [0.0, 0.0, 0.0, 1.0]], [[-0.8183915019035339, 0.5746257305145264, 0.006372430361807346, -25869.6171875], [-0.5746252536773682, -0.818414032459259, 0.0020837525371462107, 42440.5703125], [0.00641266442835331, -0.0019564346875995398, 0.9999775290489197, -206.41433715820312], [0.0, 0.0, 0.0, 1.0]], [[-0.7947198152542114, 0.6069549322128296, 0.005113605875521898, -25869.923828125], [-0.6069426536560059, -0.7947362661361694, 0.0038563122507184744, 42440.90234375], [0.006404577754437923, -3.8977013900876045e-05, 0.9999794363975525, -206.4120635986328], [0.0, 0.0, 0.0, 1.0]], [[-0.7669616937637329, 0.6416922807693481, 0.0008565092575736344, -25870.28515625], [-0.6416816711425781, -0.7669554948806763, 0.004901634994894266, 42441.23828125], [0.003802246181294322, 0.003209759946912527, 0.9999876022338867, -206.41091918945312], [0.0, 0.0, 0.0, 1.0]], [[-0.736672043800354, 0.6762122511863708, -0.007162724621593952, -25870.666015625], [-0.6762372851371765, -0.7366820573806763, 0.0016405738424509764, 42441.5625], [-0.004167274106293917, 0.006052267272025347, 0.9999729990959167, -206.41067504882812], [0.0, 0.0, 0.0, 1.0]], [[-0.7034980654716492, 0.710583508014679, -0.012711664661765099, -25871.0546875], [-0.710640549659729, -0.7035552859306335, -4.189064566162415e-05, 42441.859375], [-0.008973125368356705, 0.009003954939544201, 0.9999191761016846, -206.4124298095703], [0.0, 0.0, 0.0, 1.0]], [[-0.667796790599823, 0.7442411780357361, -0.012350957840681076, -25871.447265625], [-0.744310200214386, -0.6678321957588196, 0.0015954222762957215, 42442.140625], [-0.007060986012220383, 0.01025836169719696, 0.9999223947525024, -206.41604614257812], [0.0, 0.0, 0.0, 1.0]], [[-0.6303104162216187, 0.776277482509613, -0.010104779154062271, -25871.84375], [-0.776343047618866, -0.6302676200866699, 0.007370491046458483, 42442.4140625], [-0.0006471683736890554, 0.01249047089368105, 0.9999217391014099, -206.42169189453125], [0.0, 0.0, 0.0, 1.0]], [[-0.5917971134185791, 0.806060791015625, -0.006495444569736719, -25872.251953125], [-0.806012749671936, -0.5916140675544739, 0.01833568699657917, 42442.671875], [0.010936884209513664, 0.01608642004430294, 0.9998107552528381, -206.42709350585938], [0.0, 0.0, 0.0, 1.0]], [[-0.5509465932846069, 0.8345217704772949, -0.005588372237980366, -25872.66796875], [-0.8343756794929504, -0.5506923794746399, 0.023561684414744377, 42442.8984375], [0.016585268080234528, 0.017644032835960388, 0.9997067451477051, -206.43405151367188], [0.0, 0.0, 0.0, 1.0]], [[-0.5083919763565063, 0.8611041903495789, -0.006098213605582714, -25873.09375], [-0.8608928322792053, -0.5080770254135132, 0.02685883827507496, 42443.09375], [0.02002990059554577, 0.018904726952314377, 0.9996206164360046, -206.43975830078125], [0.0, 0.0, 0.0, 1.0]], [[-0.46411746740341187, 0.8856962323188782, -0.011708753183484077, -25873.533203125], [-0.8856914043426514, -0.463853657245636, 0.01976182870566845, 42443.2421875], [0.012071832083165646, 0.01954215206205845, 0.9997361302375793, -206.44509887695312], [0.0, 0.0, 0.0, 1.0]], [[-0.41985729336738586, 0.9075108766555786, -0.011997120454907417, -25873.966796875], [-0.9075526595115662, -0.4196831285953522, 0.014637505635619164, 42443.36328125], [0.008248706348240376, 0.017033684998750687, 0.99982088804245, -206.45065307617188], [0.0, 0.0, 0.0, 1.0]], [[-0.3742799460887909, 0.9272240996360779, -0.013038366101682186, -25874.3984375], [-0.9273147583007812, -0.3742219805717468, 0.006725780665874481, 42443.46484375], [0.0013570659793913364, 0.014607994817197323, 0.9998923540115356, -206.45460510253906], [0.0, 0.0, 0.0, 1.0]], [[-0.3281576931476593, 0.9445555210113525, -0.011285834945738316, -25874.822265625], [-0.9445732235908508, -0.32799437642097473, 0.014182115904986858, 42443.56640625], [0.009694106876850128, 0.015314268879592419, 0.999835729598999, -206.46104431152344], [0.0, 0.0, 0.0, 1.0]], [[-0.2875688970088959, 0.9576786756515503, -0.012474125251173973, -25875.205078125], [-0.9576417207717896, -0.28730425238609314, 0.019459566101431847, 42443.64453125], [0.015052142553031445, 0.017541706562042236, 0.9997327923774719, -206.46600341796875], [0.0, 0.0, 0.0, 1.0]], [[-0.24450427293777466, 0.9695602059364319, -0.013065560720860958, -25875.623046875], [-0.9694333076477051, -0.24414420127868652, 0.02434552274644375, 42443.70703125], [0.020414570346474648, 0.01861877366900444, 0.9996182322502136, -206.47132873535156], [0.0, 0.0, 0.0, 1.0]], [[-0.20206797122955322, 0.9793079495429993, -0.01115754246711731, -25876.03125], [-0.9791010618209839, -0.2017313539981842, 0.02579825185239315, 42443.7421875], [0.023013610392808914, 0.016137361526489258, 0.9996048808097839, -206.47659301757812], [0.0, 0.0, 0.0, 1.0]], [[-0.15923453867435455, 0.9872024059295654, -0.008705182000994682, -25876.4296875], [-0.9870668649673462, -0.15903480350971222, 0.020170968025922775, 42443.74609375], [0.018528403714299202, 0.011804512701928616, 0.9997586607933044, -206.47943115234375], [0.0, 0.0, 0.0, 1.0]], [[-0.11716584861278534, 0.9930835366249084, -0.0075636086985468864, -25876.8203125], [-0.9930619597434998, -0.1170797273516655, 0.01097142230719328, 42443.71875], [0.01000999379903078, 0.00879660714417696, 0.9999111890792847, -206.4795379638672], [0.0, 0.0, 0.0, 1.0]], [[-0.07828949391841888, 0.9969130158424377, -0.005929608829319477, -25877.203125], [-0.9969154596328735, -0.07825400680303574, 0.005997009575366974, 42443.6875], [0.005514482036232948, 0.0063808211125433445, 0.9999644160270691, -206.4803466796875], [0.0, 0.0, 0.0, 1.0]], [[-0.043524470180273056, 0.9990523457527161, -0.0002582268207333982, -25877.5703125], [-0.9990466833114624, -0.0435250923037529, -0.003367174416780472, 42443.64453125], [-0.003375222207978368, 0.00011142613948322833, 0.9999942779541016, -206.48007202148438], [0.0, 0.0, 0.0, 1.0]], [[-0.008926436305046082, 0.99996018409729, 0.00016047740064095706, -25877.935546875], [-0.9999601244926453, -0.008926386013627052, -0.000307524751406163, 42443.609375], [-0.0003060789604205638, -0.0001632161729503423, 0.9999998807907104, -206.4794158935547], [0.0, 0.0, 0.0, 1.0]], [[0.02203771471977234, 0.9997539520263672, -0.002538797678425908, -25878.30078125], [-0.999756932258606, 0.022039268165826797, 0.000585488392971456, 42443.57421875], [0.0006412986549548805, 0.002525277668610215, 0.9999966025352478, -206.47918701171875], [0.0, 0.0, 0.0, 1.0]], [[0.04530442878603935, 0.9989615082740784, -0.004843585658818483, -25878.65234375], [-0.9989636540412903, 0.04532467946410179, 0.004156157374382019, 42443.52734375], [0.004371375776827335, 0.004650273825973272, 0.9999796152114868, -206.47994995117188], [0.0, 0.0, 0.0, 1.0]], [[0.06608812510967255, 0.9978089928627014, -0.003086243523284793, -25878.9921875], [-0.9978103041648865, 0.06609580665826797, 0.002454388188198209, 42443.4765625], [0.0026529997121542692, 0.002917279489338398, 0.9999921917915344, -206.48057556152344], [0.0, 0.0, 0.0, 1.0]], [[0.0861663967370987, 0.9962806105613708, 0.0005879076779820025, -25879.3203125], [-0.9962773323059082, 0.08616763353347778, -0.0025650975294411182, 42443.41796875], [-0.0026062149554491043, -0.0003646938712336123, 0.999996542930603, -206.4793243408203], [0.0, 0.0, 0.0, 1.0]], [[0.10217996686697006, 0.9947591423988342, 0.0036757669877260923, -25879.63671875], [-0.9947469234466553, 0.10220013558864594, -0.005796803627163172, 42443.3515625], [-0.006142085418105125, -0.003064140910282731, 0.999976396560669, -206.4788055419922], [0.0, 0.0, 0.0, 1.0]], [[0.11387165635824203, 0.9934731721878052, 0.0066585177555680275, -25879.947265625], [-0.9934612512588501, 0.11392076313495636, -0.0075303660705685616, 42443.296875], [-0.008239759132266045, -0.005757484119385481, 0.9999494552612305, -206.47718811035156], [0.0, 0.0, 0.0, 1.0]], [[0.12349022924900055, 0.9923205971717834, 0.007071885280311108, -25880.251953125], [-0.9923198223114014, 0.12353545427322388, -0.00635836087167263, 42443.24609375], [-0.007183160167187452, -0.0062323762103915215, 0.9999547600746155, -206.4744110107422], [0.0, 0.0, 0.0, 1.0]], [[0.13282963633537292, 0.9911219477653503, 0.005797707475721836, -25880.548828125], [-0.9911258816719055, 0.1328555792570114, -0.004346872214227915, 42443.19921875], [-0.005078537855297327, -0.0051688640378415585, 0.999973714351654, -206.47271728515625], [0.0, 0.0, 0.0, 1.0]], [[0.1407211720943451, 0.9900347590446472, 0.005366935394704342, -25880.833984375], [-0.9900419116020203, 0.14073902368545532, -0.003109091892838478, 42443.1484375], [-0.0038334454875439405, -0.004875975660979748, 0.9999807476997375, -206.47100830078125], [0.0, 0.0, 0.0, 1.0]], [[0.1458263248205185, 0.9892856478691101, 0.0069832345470786095, -25881.10546875], [-0.9892940521240234, 0.14586061239242554, -0.004680834710597992, 42443.09765625], [-0.0056492602452635765, -0.006225883960723877, 0.9999646544456482, -206.46926879882812], [0.0, 0.0, 0.0, 1.0]], [[0.14901283383369446, 0.9887973070144653, 0.008664094842970371, -25881.36328125], [-0.9887934923171997, 0.14908139407634735, -0.00789232924580574, 42443.04296875], [-0.009095568209886551, -0.007390942424535751, 0.999931275844574, -206.4669647216797], [0.0, 0.0, 0.0, 1.0]], [[0.15154697000980377, 0.9884142279624939, 0.00841455813497305, -25881.611328125], [-0.9883667230606079, 0.15163928270339966, -0.011697547510266304, 42442.9921875], [-0.012837998569011688, -0.006543940864503384, 0.9998961687088013, -206.46456909179688], [0.0, 0.0, 0.0, 1.0]], [[0.15280993282794952, 0.9882367253303528, 0.0061117857694625854, -25881.8515625], [-0.9881758093833923, 0.15287326276302338, -0.011764372698962688, 42442.953125], [-0.012560312636196613, -0.004241805989295244, 0.9999120831489563, -206.46279907226562], [0.0, 0.0, 0.0, 1.0]], [[0.15248416364192963, 0.9883031249046326, 0.00235653854906559, -25882.0859375], [-0.9882737398147583, 0.1524980068206787, -0.00770249217748642, 42442.92578125], [-0.007971763610839844, -0.0011543970322236419, 0.9999675154685974, -206.46270751953125], [0.0, 0.0, 0.0, 1.0]], [[0.15192654728889465, 0.9883909225463867, 0.001334954984486103, -25882.3046875], [-0.9883787035942078, 0.15193134546279907, -0.00495275529101491, 42442.8984375], [-0.005098079331219196, -0.0005669862148351967, 0.9999868273735046, -206.46226501464844], [0.0, 0.0, 0.0, 1.0]], [[0.15229184925556183, 0.9883286952972412, 0.003684449475258589, -25882.505859375], [-0.9883198738098145, 0.152309387922287, -0.0050656236708164215, 42442.8671875], [-0.005567676387727261, -0.0028699615504592657, 0.9999803304672241, -206.46202087402344], [0.0, 0.0, 0.0, 1.0]], [[0.15376558899879456, 0.9880840182304382, 0.0067896065302193165, -25882.69140625], [-0.9880608320236206, 0.15382136404514313, -0.008642137981951237, 42442.828125], [-0.009583544917404652, -0.005379681475460529, 0.9999396204948425, -206.46087646484375], [0.0, 0.0, 0.0, 1.0]], [[0.153901144862175, 0.9880516529083252, 0.00826869998127222, -25882.873046875], [-0.987967312335968, 0.15400709211826324, -0.014229456894099712, 42442.78515625], [-0.015332875773310661, -0.005979275796562433, 0.9998645186424255, -206.45965576171875], [0.0, 0.0, 0.0, 1.0]], [[0.15264368057250977, 0.9882445931434631, 0.008508256636559963, -25883.046875], [-0.9881027340888977, 0.15277408063411713, -0.01769312098622322, 42442.75], [-0.01878497377038002, -0.005706289317458868, 0.9998072385787964, -206.4584503173828], [0.0, 0.0, 0.0, 1.0]], [[0.15245936810970306, 0.9882755875587463, 0.008220851421356201, -25883.2109375], [-0.9881311655044556, 0.15258465707302094, -0.017739836126565933, 42442.72265625], [-0.018786223605275154, -0.005418675485998392, 0.9998087882995605, -206.4568634033203], [0.0, 0.0, 0.0, 1.0]], [[0.1537075787782669, 0.9880859851837158, 0.00774709228426218, -25883.365234375], [-0.9879623651504517, 0.1538173109292984, -0.016449838876724243, 42442.703125], [-0.01744549162685871, -0.005125370807945728, 0.9998346567153931, -206.4561004638672], [0.0, 0.0, 0.0, 1.0]], [[0.1541111171245575, 0.9880295991897583, 0.0068820142187178135, -25883.513671875], [-0.987931489944458, 0.1541978269815445, -0.014644920825958252, 42442.6796875], [-0.015530805103480816, -0.004542013164609671, 0.9998690485954285, -206.45571899414062], [0.0, 0.0, 0.0, 1.0]], [[0.15327580273151398, 0.9881664514541626, 0.005797871854156256, -25883.65234375], [-0.9880643486976624, 0.15334579348564148, -0.014627636410295963, 42442.66015625], [-0.01534361857920885, -0.0034866079222410917, 0.9998762011528015, -206.4552001953125], [0.0, 0.0, 0.0, 1.0]], [[0.1531018167734146, 0.9881991744041443, 0.004719367250800133, -25883.7890625], [-0.9880852103233337, 0.15315668284893036, -0.015188422054052353, 42442.63671875], [-0.015731988474726677, -0.0023377621546387672, 0.9998735189437866, -206.45387268066406], [0.0, 0.0, 0.0, 1.0]], [[0.15396729111671448, 0.9880682826042175, 0.0038957407232373953, -25883.9140625], [-0.9879490733146667, 0.15400949120521545, -0.015415484085679054, 42442.6171875], [-0.015831531956791878, -0.0014753134455531836, 0.9998735785484314, -206.45411682128906], [0.0, 0.0, 0.0, 1.0]], [[0.1543474793434143, 0.988009512424469, 0.003739295294508338, -25884.029296875], [-0.9878771305084229, 0.1543881744146347, -0.016216780990362167, 42442.59375], [-0.016599636524915695, -0.0011909453896805644, 0.999861478805542, -206.45361328125], [0.0, 0.0, 0.0, 1.0]], [[0.15378306806087494, 0.9880985617637634, 0.0034762846771627665, -25884.140625], [-0.9879619479179382, 0.15381968021392822, -0.016453681513667107, 42442.578125], [-0.016792580485343933, -0.0009041392477229238, 0.9998586177825928, -206.4530487060547], [0.0, 0.0, 0.0, 1.0]], [[0.15378586947917938, 0.988098680973053, 0.003302117111161351, -25884.2421875], [-0.9879599213600159, 0.15381966531276703, -0.016578633338212967, 42442.5625], [-0.01688925363123417, -0.0007127997814677656, 0.9998571276664734, -206.45333862304688], [0.0, 0.0, 0.0, 1.0]], [[0.15425750613212585, 0.9880247116088867, 0.00345379370264709, -25884.3359375], [-0.9878911972045898, 0.15429352223873138, -0.016262715682387352, 42442.546875], [-0.01660086214542389, -0.0009033266105689108, 0.9998617768287659, -206.45303344726562], [0.0, 0.0, 0.0, 1.0]], [[0.1543470174074173, 0.9880096912384033, 0.0037206397391855717, -25884.42578125], [-0.9878690838813782, 0.1543879508972168, -0.016705220565199852, 42442.53125], [-0.017079340294003487, -0.0010971041629090905, 0.9998534917831421, -206.4528045654297], [0.0, 0.0, 0.0, 1.0]], [[0.15406395494937897, 0.9880543351173401, 0.0036023857537657022, -25884.5078125], [-0.9879048466682434, 0.15410339832305908, -0.017209893092513084, 42442.51953125], [-0.01755945011973381, -0.0009073900291696191, 0.9998453855514526, -206.45314025878906], [0.0, 0.0, 0.0, 1.0]], [[0.15435117483139038, 0.988010048866272, 0.0034332447685301304, -25884.58203125], [-0.9878582954406738, 0.1543876826763153, -0.017333904281258583, 42442.5078125], [-0.017656123265624046, -0.0007160506211221218, 0.9998438358306885, -206.45242309570312], [0.0, 0.0, 0.0, 1.0]], [[0.1547265499830246, 0.9879506826400757, 0.003629169659689069, -25884.65234375], [-0.9877995848655701, 0.15476642549037933, -0.01730211265385151, 42442.49609375], [-0.017655307427048683, -0.0009077964350581169, 0.9998437166213989, -206.4521484375], [0.0, 0.0, 0.0, 1.0]], [[0.1547207534313202, 0.9879502058029175, 0.003977262414991856, -25884.712890625], [-0.9878039360046387, 0.15476647019386292, -0.017051883041858673, 42442.484375], [-0.017461959272623062, -0.0012904751347377896, 0.9998466372489929, -206.45257568359375], [0.0, 0.0, 0.0, 1.0]], [[0.15480907261371613, 0.9879346489906311, 0.004391121212393045, -25884.771484375], [-0.9877968430519104, 0.1548612415790558, -0.016596712172031403, 42442.4765625], [-0.017076481133699417, -0.001768214046023786, 0.9998525977134705, -206.45285034179688], [0.0, 0.0, 0.0, 1.0]], [[0.15546368062496185, 0.9878283143043518, 0.0051342821680009365, -25884.826171875], [-0.9877163171768188, 0.1555246263742447, -0.015115826390683651, 42442.47265625], [-0.015730347484350204, -0.002721252618357539, 0.9998725652694702, -206.45233154296875], [0.0, 0.0, 0.0, 1.0]], [[0.15611299872398376, 0.9877179265022278, 0.006491738371551037, -25884.8828125], [-0.9876386523246765, 0.1561875194311142, -0.013242430053651333, 42442.46484375], [-0.014093711972236633, -0.00434417650103569, 0.9998912215232849, -206.45281982421875], [0.0, 0.0, 0.0, 1.0]], [[0.15582436323165894, 0.9877575635910034, 0.0073294141329824924, -25884.943359375], [-0.9877012968063354, 0.15590336918830872, -0.011845295317471027, 42442.45703125], [-0.012842959724366665, -0.0053934864699840546, 0.9999029636383057, -206.4521942138672], [0.0, 0.0, 0.0, 1.0]], [[0.15563589334487915, 0.987784743309021, 0.007662402465939522, -25885.001953125], [-0.9877418875694275, 0.15571384131908417, -0.010918352752923965, 42442.453125], [-0.01197812333703041, -0.005869188345968723, 0.9999110102653503, -206.45242309570312], [0.0, 0.0, 0.0, 1.0]], [[0.1559167355298996, 0.9877381920814514, 0.007949426770210266, -25885.064453125], [-0.9876976013183594, 0.15599781274795532, -0.010870078578591347, 42442.44140625], [-0.011976883746683598, -0.0061568026430904865, 0.9999092817306519, -206.45201110839844], [0.0, 0.0, 0.0, 1.0]]], "4": [[[0.5406929850578308, -0.8412173986434937, -0.0021146144717931747, -25833.697265625], [0.841219961643219, 0.540691614151001, 0.0012070093071088195, 42363.89453125], [0.00012799681280739605, -0.002431477652862668, 0.9999970197677612, -206.04859924316406], [0.0, 0.0, 0.0, 1.0]], [[0.5377140641212463, -0.8431107401847839, -0.005271863657981157, -25834.265625], [0.843126118183136, 0.5376926064491272, 0.005004847887903452, 42364.25390625], [-0.0013849992537871003, -0.007136023137718439, 0.9999735951423645, -206.0515594482422], [0.0, 0.0, 0.0, 1.0]], [[0.5348083972930908, -0.8449504971504211, -0.0062119197100400925, -25834.833984375], [0.8449703454971313, 0.5347726345062256, 0.0065832301042973995, 42364.609375], [-0.00224053836427629, -0.008769655600190163, 0.9999589920043945, -206.05801391601562], [0.0, 0.0, 0.0, 1.0]], [[0.5320512652397156, -0.8467059135437012, -0.0032450139988213778, -25835.3984375], [0.8467044830322266, 0.5320263504981995, 0.006284549832344055, 42364.9609375], [-0.0035947321448475122, -0.006091271061450243, 0.9999749660491943, -206.06332397460938], [0.0, 0.0, 0.0, 1.0]], [[0.5291967988014221, -0.848499059677124, -7.758773426758125e-05, -25835.962890625], [0.8484892845153809, 0.5291902422904968, 0.004868105985224247, 42365.30859375], [-0.00408952496945858, -0.0026420189533382654, 0.9999881386756897, -206.06741333007812], [0.0, 0.0, 0.0, 1.0]], [[0.5265872478485107, -0.8501207232475281, 0.0007879652548581362, -25836.533203125], [0.8501120805740356, 0.5265858173370361, 0.0041029308922588825, 42365.65625], [-0.0039029179606586695, -0.0014906927244737744, 0.999991238117218, -206.06874084472656], [0.0, 0.0, 0.0, 1.0]], [[0.525373637676239, -0.8508644104003906, -0.0035171895287930965, -25837.1171875], [0.8508678078651428, 0.5253544449806213, 0.00515903951600194, 42366.0078125], [-0.002541871042922139, -0.005703086964786053, 0.9999805092811584, -206.06961059570312], [0.0, 0.0, 0.0, 1.0]], [[0.5250549912452698, -0.8510209321975708, -0.008973442949354649, -25837.701171875], [0.8510661125183105, 0.5250005125999451, 0.007814954966306686, 42366.359375], [-0.0019396304851397872, -0.011740274727344513, 0.9999291896820068, -206.07530212402344], [0.0, 0.0, 0.0, 1.0]], [[0.5243068337440491, -0.8514354825019836, -0.012643581256270409, -25838.283203125], [0.8515292406082153, 0.5242422819137573, 0.008243848569691181, 42366.71484375], [-0.0003908059443347156, -0.015088686719536781, 0.9998860955238342, -206.083984375], [0.0, 0.0, 0.0, 1.0]], [[0.5237032771110535, -0.8518260717391968, -0.011278398334980011, -25838.859375], [0.8518971800804138, 0.5236935615539551, 0.004033948294818401, 42367.0625], [0.0024702013470232487, -0.011720627546310425, 0.9999282360076904, -206.09274291992188], [0.0, 0.0, 0.0, 1.0]], [[0.5239711403846741, -0.8517194390296936, -0.00530310720205307, -25839.43359375], [0.8517357110977173, 0.5239660739898682, 0.0024191963020712137, 42367.40625], [0.0007181716500781476, -0.005784434732049704, 0.999983012676239, -206.10035705566406], [0.0, 0.0, 0.0, 1.0]], [[0.5238112807273865, -0.8518335819244385, -0.0010918868938460946, -25840.001953125], [0.8518213033676147, 0.5238099098205566, -0.004860093351453543, 42367.75390625], [0.004711932502686977, 0.0016156790079548955, 0.9999876022338867, -206.10128784179688], [0.0, 0.0, 0.0, 1.0]], [[0.5238662958145142, -0.851765513420105, 0.007721069268882275, -25840.560546875], [0.8517900109291077, 0.5238832831382751, 0.00021246664982754737, 42368.109375], [-0.004225911572575569, 0.006465426180511713, 0.9999701380729675, -206.10145568847656], [0.0, 0.0, 0.0, 1.0]], [[0.5243650674819946, -0.8514790534973145, 0.004963983781635761, -25841.1328125], [0.8514767289161682, 0.5243824124336243, 0.0032366039231419563, 42368.46484375], [-0.005358927417546511, 0.0025295550003647804, 0.9999824166297913, -206.0981903076172], [0.0, 0.0, 0.0, 1.0]], [[0.5256800651550293, -0.8506513237953186, -0.007265771273523569, -25841.720703125], [0.8506814241409302, 0.525672435760498, 0.0030716885812580585, 42368.81640625], [0.001206479500979185, -0.00779558252543211, 0.9999688267707825, -206.09719848632812], [0.0, 0.0, 0.0, 1.0]], [[0.5283815264701843, -0.8489053249359131, -0.013142428360879421, -25842.298828125], [0.8489806652069092, 0.5284237265586853, 0.00030414742650464177, 42369.15234375], [0.006686578970402479, -0.011318373493850231, 0.9999135732650757, -206.10269165039062], [0.0, 0.0, 0.0, 1.0]], [[0.5310336351394653, -0.8471730947494507, -0.017351467162370682, -25842.865234375], [0.8472416996955872, 0.5311824679374695, -0.005163857713341713, 42369.49609375], [0.013591475784778595, -0.011958705261349678, 0.9998361468315125, -206.10797119140625], [0.0, 0.0, 0.0, 1.0]], [[0.5358197689056396, -0.8442217111587524, -0.013674751855432987, -25843.4140625], [0.8442804217338562, 0.5358961820602417, -0.002413957379758358, 42369.85546875], [0.009366163052618504, -0.010251879692077637, 0.9999035596847534, -206.11688232421875], [0.0, 0.0, 0.0, 1.0]], [[0.5427379608154297, -0.839850664138794, -0.009288575500249863, -25843.95703125], [0.8398763537406921, 0.5427746772766113, -0.0018209327245131135, 42370.21875], [0.006570915225893259, -0.006812965031713247, 0.9999551773071289, -206.12417602539062], [0.0, 0.0, 0.0, 1.0]], [[0.5503336191177368, -0.8348782062530518, -0.01054590567946434, -25844.501953125], [0.8348811864852905, 0.5504037737846375, -0.005391223821789026, 42370.578125], [0.010305522009730339, -0.005837606266140938, 0.9999298453330994, -206.12741088867188], [0.0, 0.0, 0.0, 1.0]], [[0.5566011071205139, -0.8306061625480652, -0.016989929601550102, -25845.05078125], [0.830631673336029, 0.5567716956138611, -0.007503157015889883, 42370.93359375], [0.01569168083369732, -0.009936108253896236, 0.99982750415802, -206.1298370361328], [0.0, 0.0, 0.0, 1.0]], [[0.5629652142524719, -0.8261775374412537, -0.022380996495485306, -25845.591796875], [0.8262143731117249, 0.563264787197113, -0.010130033828318119, 42371.296875], [0.02097563073039055, -0.012788644060492516, 0.9996982216835022, -206.13656616210938], [0.0, 0.0, 0.0, 1.0]], [[0.5699790120124817, -0.8213910460472107, -0.020989948883652687, -25846.119140625], [0.8214660882949829, 0.5702129602432251, -0.007116670720279217, 42371.671875], [0.017814312130212784, -0.013186180964112282, 0.9997543692588806, -206.1453857421875], [0.0, 0.0, 0.0, 1.0]], [[0.5777260661125183, -0.8160008788108826, -0.019366087391972542, -25846.634765625], [0.8160850405693054, 0.5779085755348206, -0.005177175626158714, 42372.0546875], [0.015416407957673073, -0.012813386507332325, 0.9997990131378174, -206.15505981445312], [0.0, 0.0, 0.0, 1.0]], [[0.5868248343467712, -0.8094642758369446, -0.020103808492422104, -25847.15625], [0.8095520734786987, 0.5870230793952942, -0.005414755083620548, 42372.43359375], [0.016184451058506966, -0.013097568415105343, 0.9997832179069519, -206.1634063720703], [0.0, 0.0, 0.0, 1.0]], [[0.5954297780990601, -0.8030617237091064, -0.02356152981519699, -25847.669921875], [0.8030972480773926, 0.5957586169242859, -0.010311085730791092, 42372.8125], [0.022317424416542053, -0.012782673351466656, 0.9996691942214966, -206.1694793701172], [0.0, 0.0, 0.0, 1.0]], [[0.6031931042671204, -0.7971855401992798, -0.0255585964769125, -25848.177734375], [0.7972354292869568, 0.603573739528656, -0.010693793185055256, 42373.1953125], [0.023951435461640358, -0.013925795443356037, 0.9996160864830017, -206.17800903320312], [0.0, 0.0, 0.0, 1.0]], [[0.6124671101570129, -0.7901239991188049, -0.024251168593764305, -25848.671875], [0.7902175784111023, 0.6127774715423584, -0.007747957017272711, 42373.59765625], [0.020982416346669197, -0.014418331906199455, 0.9996758699417114, -206.1879425048828], [0.0, 0.0, 0.0, 1.0]], [[0.6242092847824097, -0.7810232639312744, -0.019115153700113297, -25849.150390625], [0.78107750415802, 0.6244035959243774, -0.006165546365082264, 42374.00390625], [0.016751006245613098, -0.011081825941801071, 0.9997982382774353, -206.19741821289062], [0.0, 0.0, 0.0, 1.0]], [[0.6367748379707336, -0.7709718346595764, -0.010963868349790573, -25849.615234375], [0.7709097266197205, 0.6368658542633057, -0.010008543729782104, 42374.41015625], [0.014698818325996399, -0.002078964142128825, 0.9998897910118103, -206.2025909423828], [0.0, 0.0, 0.0, 1.0]], [[0.6487948894500732, -0.7609250545501709, -0.0076270028948783875, -25850.07421875], [0.7608253955841064, 0.6488355398178101, -0.012535776942968369, 42374.8203125], [0.01448745746165514, 0.0023303302004933357, 0.9998923540115356, -206.20330810546875], [0.0, 0.0, 0.0, 1.0]], [[0.660062313079834, -0.7511383891105652, -0.010430008172988892, -25850.537109375], [0.7510437965393066, 0.6601446866989136, -0.011921223253011703, 42375.23828125], [0.015839803963899612, 3.535793803166598e-05, 0.9998745322227478, -206.2014617919922], [0.0, 0.0, 0.0, 1.0]], [[0.6710508465766907, -0.7412380576133728, -0.016029739752411842, -25851.0], [0.7411770224571228, 0.6712254285812378, -0.010632549412548542, 42375.6640625], [0.01864081807434559, -0.004745893180370331, 0.9998149275779724, -206.2021026611328], [0.0, 0.0, 0.0, 1.0]], [[0.6823940277099609, -0.7307780981063843, -0.017369717359542847, -25851.447265625], [0.7307704091072083, 0.6825766563415527, -0.00798727199435234, 42376.1015625], [0.017693087458610535, -0.0072428081184625626, 0.9998171925544739, -206.20578002929688], [0.0, 0.0, 0.0, 1.0]], [[0.6939473152160645, -0.7198283076286316, -0.01686173863708973, -25851.880859375], [0.7198013663291931, 0.6941254734992981, -0.008712509647011757, 42376.53515625], [0.017975671216845512, -0.006091080140322447, 0.999819815158844, -206.2100830078125], [0.0, 0.0, 0.0, 1.0]], [[0.7057827711105347, -0.708293616771698, -0.013812918215990067, -25852.302734375], [0.7082421183586121, 0.7059102058410645, -0.00915920827537775, 42376.97265625], [0.01623808778822422, -0.0033184790518134832, 0.9998626112937927, -206.21324157714844], [0.0, 0.0, 0.0, 1.0]], [[0.7177074551582336, -0.6962172985076904, -0.013323763385415077, -25852.71484375], [0.6961108446121216, 0.7178309559822083, -0.012184283696115017, 42377.4140625], [0.018047118559479713, -0.0005300656775943935, 0.9998369812965393, -206.21409606933594], [0.0, 0.0, 0.0, 1.0]], [[0.7304280400276184, -0.6828168630599976, -0.015356145799160004, -25853.123046875], [0.682683527469635, 0.7305893301963806, -0.013510040938854218, 42377.86328125], [0.02044392004609108, -0.0006152736023068428, 0.9997907876968384, -206.2140350341797], [0.0, 0.0, 0.0, 1.0]], [[0.7431948781013489, -0.6688243746757507, -0.018311159685254097, -25853.521484375], [0.668704092502594, 0.7434158325195312, -0.012949833646416664, 42378.3203125], [0.022273970767855644, -0.0026204963214695454, 0.9997484683990479, -206.21377563476562], [0.0, 0.0, 0.0, 1.0]], [[0.7556191682815552, -0.6546623110771179, -0.021374855190515518, -25853.908203125], [0.6545425653457642, 0.7559105753898621, -0.013159086927771568, 42378.78515625], [0.024772239848971367, -0.004047493450343609, 0.9996849298477173, -206.21466064453125], [0.0, 0.0, 0.0, 1.0]], [[0.7685333490371704, -0.6393820643424988, -0.023389456793665886, -25854.283203125], [0.6392802000045776, 0.768871009349823, -0.012574776075780392, 42379.25390625], [0.026023562997579575, -0.005288283806294203, 0.999647319316864, -206.2173614501953], [0.0, 0.0, 0.0, 1.0]], [[0.7819812297821045, -0.6228697896003723, -0.023206235840916634, -25854.640625], [0.6228129863739014, 0.7823005318641663, -0.010486414656043053, 42379.73046875], [0.024685921147465706, -0.0062529658898711205, 0.9996756911277771, -206.2208251953125], [0.0, 0.0, 0.0, 1.0]], [[0.7956606149673462, -0.6054771542549133, -0.017932679504156113, -25854.974609375], [0.6054068207740784, 0.7958567142486572, -0.009736970998346806, 42380.2109375], [0.020167356356978416, -0.003109242534264922, 0.9997918009757996, -206.22463989257812], [0.0, 0.0, 0.0, 1.0]], [[0.8101460933685303, -0.5861483812332153, -0.009664028882980347, -25855.3203125], [0.5860395431518555, 0.8101935982704163, -0.011999369598925114, 42380.74609375], [0.014863143675029278, 0.004057739395648241, 0.9998812675476074, -206.22682189941406], [0.0, 0.0, 0.0, 1.0]], [[0.8232975006103516, -0.5675682425498962, -0.006891482509672642, -25855.626953125], [0.5674135088920593, 0.8232713341712952, -0.01631622388958931, 42381.234375], [0.014934130012989044, 0.009522785432636738, 0.9998431205749512, -206.22308349609375], [0.0, 0.0, 0.0, 1.0]], [[0.8365136384963989, -0.5478993654251099, -0.007154656108468771, -25855.931640625], [0.5476959943771362, 0.8364551067352295, -0.01928587444126606, 42381.72265625], [0.016551265493035316, 0.01221431978046894, 0.9997884035110474, -206.21615600585938], [0.0, 0.0, 0.0, 1.0]], [[0.8486436605453491, -0.528922975063324, -0.006658518686890602, -25856.21875], [0.5287595987319946, 0.8485978841781616, -0.017176317051053047, 42382.22265625], [0.01473535317927599, 0.01105581596493721, 0.9998302459716797, -206.2089385986328], [0.0, 0.0, 0.0, 1.0]], [[0.8598241806030273, -0.510362982749939, -0.015231931582093239, -25856.501953125], [0.5102577209472656, 0.859958291053772, -0.010431856848299503, 42382.73828125], [0.01842285878956318, 0.0011973517248407006, 0.9998295903205872, -206.20260620117188], [0.0, 0.0, 0.0, 1.0]], [[0.8709673881530762, -0.4910038709640503, -0.018192345276474953, -25856.7734375], [0.49109628796577454, 0.8711049556732178, 0.0007110424339771271, 42383.25390625], [0.015498319640755653, -0.009553488343954086, 0.9998341798782349, -206.20530700683594], [0.0, 0.0, 0.0, 1.0]], [[0.8817201256752014, -0.4715903401374817, -0.013119335286319256, -25857.017578125], [0.4716740548610687, 0.8817636370658875, 0.004063884727656841, 42383.7734375], [0.009651663713157177, -0.009771258570253849, 0.9999056458473206, -206.21316528320312], [0.0, 0.0, 0.0, 1.0]], [[0.8918740749359131, -0.45213592052459717, -0.011562642641365528, -25857.25], [0.4521597623825073, 0.8919366598129272, -0.0006080386810936034, 42384.28125], [0.010588061064481735, -0.004685867577791214, 0.9999329447746277, -206.21771240234375], [0.0, 0.0, 0.0, 1.0]], [[0.9017459154129028, -0.4321911334991455, -0.008065571077167988, -25857.474609375], [0.43216797709465027, 0.9017818570137024, -0.00450994772836566, 42384.79296875], [0.0092225456610322, 0.0005811455193907022, 0.999957263469696, -206.21929931640625], [0.0, 0.0, 0.0, 1.0]], [[0.9108709692955017, -0.41254884004592896, -0.010841616429388523, -25857.693359375], [0.4124537706375122, 0.9109237194061279, -0.009991311468183994, 42385.3046875], [0.013997789472341537, 0.004629130475223064, 0.9998913407325745, -206.2170867919922], [0.0, 0.0, 0.0, 1.0]], [[0.9197580218315125, -0.3922683298587799, -0.01306284498423338, -25857.90625], [0.39209407567977905, 0.9198176860809326, -0.014058614149689674, 42385.83203125], [0.017530184239149094, 0.007808659691363573, 0.999815821647644, -206.2122802734375], [0.0, 0.0, 0.0, 1.0]], [[0.9280425906181335, -0.37216615676879883, -0.015141662210226059, -25858.10546875], [0.3720284104347229, 0.928153932094574, -0.01118149608373642, 42386.37109375], [0.01821516826748848, 0.00474377628415823, 0.9998227953910828, -206.2085723876953], [0.0, 0.0, 0.0, 1.0]], [[0.9356107115745544, -0.35251250863075256, -0.019169315695762634, -25858.296875], [0.35245195031166077, 0.9358068704605103, -0.006562703754752874, 42386.9296875], [0.02025221288204193, -0.0006161267519928515, 0.9997946619987488, -206.20504760742188], [0.0, 0.0, 0.0, 1.0]], [[0.9430758953094482, -0.3320581316947937, -0.01857854053378105, -25858.4765625], [0.3321068584918976, 0.9432415962219238, -0.0004890229902230203, 42387.5], [0.01768643781542778, -0.005708875600248575, 0.9998272657394409, -206.2075653076172], [0.0, 0.0, 0.0, 1.0]], [[0.9500284194946289, -0.31163132190704346, -0.018220288679003716, -25858.642578125], [0.3117145895957947, 0.9501740336418152, 0.0018518243450671434, 42388.07421875], [0.01673535816371441, -0.007438815664499998, 0.999832272529602, -206.21212768554688], [0.0, 0.0, 0.0, 1.0]], [[0.9563391804695129, -0.29170718789100647, -0.017950640991330147, -25858.802734375], [0.29177960753440857, 0.9564844369888306, 0.0014996986137703061, 42388.65625], [0.01673203520476818, -0.006671851500868797, 0.9998377561569214, -206.21603393554688], [0.0, 0.0, 0.0, 1.0]], [[0.9617663621902466, -0.2733142375946045, -0.01745813712477684, -25858.955078125], [0.2733885645866394, 0.9619016647338867, 0.0019767070189118385, 42389.2421875], [0.016252748668193817, -0.006673985160887241, 0.9998456239700317, -206.22007751464844], [0.0, 0.0, 0.0, 1.0]], [[0.966149091720581, -0.25750452280044556, -0.01572217047214508, -25859.09765625], [0.25760093331336975, 0.9662413001060486, 0.0044129276648163795, 42389.84765625], [0.014055062085390091, -0.00831359252333641, 0.9998666048049927, -206.2256317138672], [0.0, 0.0, 0.0, 1.0]], [[0.9696371555328369, -0.24413157999515533, -0.01426482293754816, -25859.236328125], [0.244264617562294, 0.9696717858314514, 0.008451432920992374, 42390.46484375], [0.011768934316933155, -0.011679215356707573, 0.999862551689148, -206.23171997070312], [0.0, 0.0, 0.0, 1.0]], [[0.97237229347229, -0.23296019434928894, -0.014888900332152843, -25859.376953125], [0.23309868574142456, 0.9724172949790955, 0.008339759893715382, 42391.08203125], [0.012535392306745052, -0.011579934507608414, 0.999854326248169, -206.23951721191406], [0.0, 0.0, 0.0, 1.0]], [[0.974433422088623, -0.22391891479492188, -0.018432699143886566, -25859.521484375], [0.22400131821632385, 0.9745855927467346, 0.0025081709027290344, 42391.703125], [0.017402615398168564, -0.006572993937879801, 0.999826967716217, -206.24484252929688], [0.0, 0.0, 0.0, 1.0]], [[0.9765053987503052, -0.214664027094841, -0.01888352446258068, -25859.662109375], [0.21463614702224731, 0.9766877889633179, -0.0035148474853485823, 42392.328125], [0.019197817891836166, -0.0006208191625773907, 0.9998154640197754, -206.2471466064453], [0.0, 0.0, 0.0, 1.0]], [[0.978641927242279, -0.20454011857509613, -0.020574331283569336, -25859.79296875], [0.20445625483989716, 0.9788565635681152, -0.006123141385614872, 42392.96875], [0.021391747519373894, 0.0017858120845630765, 0.9997695684432983, -206.2465362548828], [0.0, 0.0, 0.0, 1.0]], [[0.9808081984519958, -0.19382916390895844, -0.021106675267219543, -25859.916015625], [0.19371247291564941, 0.9810299873352051, -0.007459092419594526, 42393.625], [0.022152071818709373, 0.0032273123506456614, 0.9997493624687195, -206.2444305419922], [0.0, 0.0, 0.0, 1.0]], [[0.982505738735199, -0.18394741415977478, -0.029082121327519417, -25860.05078125], [0.18385818600654602, 0.9829360842704773, -0.005735847167670727, 42394.29296875], [0.029640959575772285, 0.0002885164285544306, 0.999560534954071, -206.24014282226562], [0.0, 0.0, 0.0, 1.0]], [[0.984031617641449, -0.17526952922344208, -0.031020300462841988, -25860.177734375], [0.17530672252178192, 0.9845126867294312, -0.0015378595562651753, 42394.97265625], [0.030809415504336357, -0.003924764692783356, 0.999517560005188, -206.24111938476562], [0.0, 0.0, 0.0, 1.0]], [[0.9851452112197876, -0.1692248433828354, -0.02918613888323307, -25860.2890625], [0.16925901174545288, 0.9855707287788391, -0.0013142939424142241, 42395.65625], [0.02898741513490677, -0.003645246382802725, 0.9995731115341187, -206.24481201171875], [0.0, 0.0, 0.0, 1.0]], [[0.9860278367996216, -0.16487595438957214, -0.02377130463719368, -25860.388671875], [0.16481688618659973, 0.9863141775131226, -0.004435851704329252, 42396.34765625], [0.024177338927984238, 0.00045596103882417083, 0.9997075796127319, -206.24822998046875], [0.0, 0.0, 0.0, 1.0]], [[0.9867424368858337, -0.1611841768026352, -0.018946751952171326, -25860.498046875], [0.1610756516456604, 0.9869162440299988, -0.007130445912480354, 42397.04296875], [0.0198481734842062, 0.003984053153544664, 0.9997950792312622, -206.24851989746094], [0.0, 0.0, 0.0, 1.0]], [[0.9869539141654968, -0.15938624739646912, -0.022758835926651955, -25860.62109375], [0.15920975804328918, 0.9872002601623535, -0.009377987124025822, 42397.75], [0.023962249979376793, 0.00563221238553524, 0.9996970295906067, -206.243896484375], [0.0, 0.0, 0.0, 1.0]], [[0.9869041442871094, -0.1592947393655777, -0.025404229760169983, -25860.744140625], [0.15919968485832214, 0.9872297644615173, -0.005733972415328026, 42398.46875], [0.02599320374429226, 0.0016145361587405205, 0.9996607899665833, -206.24046325683594], [0.0, 0.0, 0.0, 1.0]], [[0.9869379997253418, -0.158915713429451, -0.026441356167197227, -25860.8671875], [0.15893897414207458, 0.9872876405715942, -0.0012325852876529098, 42399.1953125], [0.026301100850105286, -0.0029860767535865307, 0.9996495842933655, -206.24102783203125], [0.0, 0.0, 0.0, 1.0]], [[0.9870131015777588, -0.15844102203845978, -0.0264859851449728, -25860.990234375], [0.15851782262325287, 0.9873557686805725, 0.0008127228356897831, 42399.93359375], [0.026022322475910187, -0.00500066950917244, 0.9996488094329834, -206.24395751953125], [0.0, 0.0, 0.0, 1.0]], [[0.9870290756225586, -0.15882016718387604, -0.023447606712579727, -25861.107421875], [0.15883775055408478, 0.9873040914535522, -0.0011231278767809272, 42400.671875], [0.02332829311490059, -0.002615805249661207, 0.9997244477272034, -206.24868774414062], [0.0, 0.0, 0.0, 1.0]], [[0.9869380593299866, -0.1594829112291336, -0.022769900038838387, -25861.232421875], [0.15947234630584717, 0.9871997237205505, -0.00228948169387877, 42401.4140625], [0.02284357324242592, -0.0013715930981561542, 0.9997380971908569, -206.2499542236328], [0.0, 0.0, 0.0, 1.0]], [[0.9868569374084473, -0.15986068546772003, -0.023619581013917923, -25861.359375], [0.15990276634693146, 0.987132728099823, -0.00010785296035464853, 42402.171875], [0.023332905024290085, -0.0036704018712043762, 0.9997209906578064, -206.25221252441406], [0.0, 0.0, 0.0, 1.0]], [[0.9867582321166992, -0.16004648804664612, -0.026329850777983665, -25861.494140625], [0.160202294588089, 0.987076461315155, 0.0039051263593137264, 42402.9375], [0.025364574044942856, -0.008071519434452057, 0.9996456503868103, -206.2554168701172], [0.0, 0.0, 0.0, 1.0]], [[0.9866829514503479, -0.1603277027606964, -0.027418727055191994, -25861.626953125], [0.16054117679595947, 0.9870123267173767, 0.005756173748522997, 42403.7109375], [0.026139751076698303, -0.010081354528665543, 0.9996074438095093, -206.26327514648438], [0.0, 0.0, 0.0, 1.0]], [[0.9865863919258118, -0.1608036607503891, -0.02809753641486168, -25861.7578125], [0.16097623109817505, 0.9869502782821655, 0.003977641463279724, 42404.484375], [0.02709124982357025, -0.008447322063148022, 0.9995973110198975, -206.27085876464844], [0.0, 0.0, 0.0, 1.0]], [[0.9865425825119019, -0.16109208762645721, -0.027983825653791428, -25861.892578125], [0.16114996373653412, 0.9869298934936523, -0.0001898280024761334, 42405.26171875], [0.027648653835058212, -0.00432231929153204, 0.9996083974838257, -206.27687072753906], [0.0, 0.0, 0.0, 1.0]], [[0.9864974021911621, -0.1613776534795761, -0.027930455282330513, -25862.02734375], [0.16131316125392914, 0.9868926405906677, -0.004562080837786198, 42406.0390625], [0.028300581499934196, -5.069787675893167e-06, 0.9995994567871094, -206.2786407470703], [0.0, 0.0, 0.0, 1.0]], [[0.9864861369132996, -0.16118904948234558, -0.02938002720475197, -25862.162109375], [0.16110597550868988, 0.9869234561920166, -0.005188658833503723, 42406.82421875], [0.02983219362795353, 0.00038524175761267543, 0.9995548129081726, -206.27798461914062], [0.0, 0.0, 0.0, 1.0]], [[0.9865629076957703, -0.16052694618701935, -0.03040800243616104, -25862.298828125], [0.16047149896621704, 0.9870312213897705, -0.004271490033715963, 42407.609375], [0.030699335038661957, -0.0006655236938968301, 0.9995284080505371, -206.27845764160156], [0.0, 0.0, 0.0, 1.0]], [[0.9867829084396362, -0.1591072380542755, -0.030728528276085854, -25862.431640625], [0.15907512605190277, 0.9872602224349976, -0.0035027870908379555, 42408.39453125], [0.03089437447488308, -0.0014316535089164972, 0.9995216131210327, -206.279541015625], [0.0, 0.0, 0.0, 1.0]], [[0.9869861602783203, -0.15806585550308228, -0.0295531265437603, -25862.55859375], [0.15799498558044434, 0.987428605556488, -0.004733016714453697, 42409.17578125], [0.02992972917854786, 2.1765681594843045e-06, 0.9995519518852234, -206.28053283691406], [0.0, 0.0, 0.0, 1.0]], [[0.9871054887771606, -0.1575915813446045, -0.02806355617940426, -25862.68359375], [0.1574641466140747, 0.9875019192695618, -0.006707631517201662, 42409.953125], [0.028769882395863533, 0.0022021355107426643, 0.9995836019515991, -206.28045654296875], [0.0, 0.0, 0.0, 1.0]], [[0.987313985824585, -0.157399520277977, -0.02089371532201767, -25862.80078125], [0.1573069542646408, 0.9875314831733704, -0.006012520287185907, 42410.73046875], [0.021579569205641747, 0.002649518661201, 0.9997636079788208, -206.28128051757812], [0.0, 0.0, 0.0, 1.0]], [[0.9873856902122498, -0.15730401873588562, -0.018023885786533356, -25862.91796875], [0.157235786318779, 0.9875476360321045, -0.00515179056674242, 42411.50390625], [0.018609844148159027, 0.002252805046737194, 0.9998242855072021, -206.2800750732422], [0.0, 0.0, 0.0, 1.0]], [[0.9875167608261108, -0.156735360622406, -0.01564042456448078, -25863.041015625], [0.15670587122440338, 0.9876404404640198, -0.0031006745994091034, 42412.28515625], [0.015933100134134293, 0.0006110217655077577, 0.9998728632926941, -206.27964782714844], [0.0, 0.0, 0.0, 1.0]], [[0.9876341223716736, -0.1560719609260559, -0.014845483005046844, -25863.16796875], [0.15607702732086182, 0.9877445697784424, -0.0008239263552241027, 42413.0546875], [0.01479213684797287, -0.0015033009694889188, 0.9998894929885864, -206.28076171875], [0.0, 0.0, 0.0, 1.0]], [[0.9877026081085205, -0.1557871252298355, -0.013186022639274597, -25863.287109375], [0.1558048576116562, 0.9877877831459045, 0.00032379882759414613, 42413.82421875], [0.01297454908490181, -0.0023742637131363153, 0.9999129772186279, -206.28317260742188], [0.0, 0.0, 0.0, 1.0]], [[0.987827479839325, -0.15512338280677795, -0.011558771133422852, -25863.40625], [0.15514600276947021, 0.9878909587860107, 0.0010811721440404654, 42414.59375], [0.01125109102576971, -0.002861308865249157, 0.999932587146759, -206.28602600097656], [0.0, 0.0, 0.0, 1.0]], [[0.9880302548408508, -0.1539856493473053, -0.009202691726386547, -25863.525390625], [0.15401138365268707, 0.9880667328834534, 0.0021519530564546585, 42415.35546875], [0.008761503733694553, -0.0035435142926871777, 0.9999552965164185, -206.28924560546875], [0.0, 0.0, 0.0, 1.0]], [[0.9881508350372314, -0.15322570502758026, -0.008931242860853672, -25863.64453125], [0.15327179431915283, 0.9881727695465088, 0.004726261831820011, 42416.125], [0.008101425133645535, -0.0060391672886908054, 0.9999489188194275, -206.29290771484375], [0.0, 0.0, 0.0, 1.0]], [[0.9882156848907471, -0.15284456312656403, -0.008254008367657661, -25863.765625], [0.15290018916130066, 0.9882197380065918, 0.006584662478417158, 42416.88671875], [0.007150344550609589, -0.007769107818603516, 0.999944269657135, -206.2985076904297], [0.0, 0.0, 0.0, 1.0]], [[0.9883138537406921, -0.15227587521076202, -0.006909582298249006, -25863.8828125], [0.15232187509536743, 0.9883081912994385, 0.006705089937895536, 42417.640625], [0.00580777321010828, -0.007679214235395193, 0.9999536275863647, -206.30552673339844], [0.0, 0.0, 0.0, 1.0]], [[0.988401472568512, -0.1517075002193451, -0.0068757906556129456, -25864.001953125], [0.15175217390060425, 0.9883970618247986, 0.006519676186144352, 42418.3984375], [0.005806927569210529, -0.007487474009394646, 0.9999550580978394, -206.31124877929688], [0.0, 0.0, 0.0, 1.0]], [[0.9884708523750305, -0.15123619139194489, -0.007272182498127222, -25864.12109375], [0.1512685865163803, 0.9884840250015259, 0.004130727145820856, 42419.1484375], [0.006563721224665642, -0.005183156579732895, 0.9999650120735168, -206.31683349609375], [0.0, 0.0, 0.0, 1.0]], [[0.9885111451148987, -0.15095317363739014, -0.00766585161909461, -25864.240234375], [0.1509748101234436, 0.9885348081588745, 0.00232382002286613, 42419.89453125], [0.0072271740064024925, -0.0034544728696346283, 0.9999679327011108, -206.32028198242188], [0.0, 0.0, 0.0, 1.0]], [[0.9885461330413818, -0.15066781640052795, -0.008705335669219494, -25864.357421875], [0.15070679783821106, 0.9885703921318054, 0.004006560891866684, 42420.64453125], [0.008002176880836487, -0.005272623151540756, 0.999954104423523, -206.32281494140625], [0.0, 0.0, 0.0, 1.0]], [[0.9886007308959961, -0.15028618276119232, -0.009089380502700806, -25864.474609375], [0.15034739673137665, 0.9886120557785034, 0.006472481414675713, 42421.38671875], [0.008013146929442883, -0.007765265181660652, 0.9999377131462097, -206.32742309570312], [0.0, 0.0, 0.0, 1.0]], [[0.988686740398407, -0.14971628785133362, -0.009138686582446098, -25864.591796875], [0.1497856080532074, 0.9886904954910278, 0.007439710199832916, 42422.125], [0.007921487092971802, -0.008724387735128403, 0.9999305605888367, -206.33480834960938], [0.0, 0.0, 0.0, 1.0]], [[0.9887274503707886, -0.14933985471725464, -0.010750374756753445, -25864.70703125], [0.14940319955348969, 0.9887619614601135, 0.005348470993340015, 42422.8515625], [0.009830820374190807, -0.00689432118088007, 0.9999278783798218, -206.33998107910156], [0.0, 0.0, 0.0, 1.0]], [[0.9887809753417969, -0.14886809885501862, -0.012268587946891785, -25864.822265625], [0.14891822636127472, 0.988844096660614, 0.003274378599599004, 42423.57421875], [0.011644269339740276, -0.0050646597519516945, 0.9999193549156189, -206.3441619873047], [0.0, 0.0, 0.0, 1.0]], [[0.98880934715271, -0.14867469668388367, -0.012322584167122841, -25864.935546875], [0.148764967918396, 0.9888494610786438, 0.006760620512068272, 42424.296875], [0.011180047877132893, -0.008518134243786335, 0.9999012351036072, -206.3491973876953], [0.0, 0.0, 0.0, 1.0]], [[0.9887778759002686, -0.14885173738002777, -0.012703930027782917, -25865.044921875], [0.14902068674564362, 0.9887406826019287, 0.0135869225487113, 42425.015625], [0.010538456961512566, -0.01532759703695774, 0.999826967716217, -206.35716247558594], [0.0, 0.0, 0.0, 1.0]], [[0.9888213276863098, -0.1484631597995758, -0.01382405124604702, -25865.154296875], [0.14868944883346558, 0.9887360334396362, 0.017103370279073715, 42425.72265625], [0.011129116639494896, -0.01896766945719719, 0.9997581243515015, -206.3694305419922], [0.0, 0.0, 0.0, 1.0]], [[0.9890198111534119, -0.14713555574417114, -0.01381560880690813, -25865.263671875], [0.14736308157444, 0.9889326095581055, 0.017216654494404793, 42426.41015625], [0.011129523627460003, -0.019063524901866913, 0.9997562766075134, -206.3835906982422], [0.0, 0.0, 0.0, 1.0]], [[0.9891624450683594, -0.1459140181541443, -0.016330866143107414, -25865.369140625], [0.14612935483455658, 0.9891817569732666, 0.012872315011918545, 42427.078125], [0.014275941997766495, -0.015119231306016445, 0.9997837543487549, -206.39451599121094], [0.0, 0.0, 0.0, 1.0]], [[0.9894358515739441, -0.1443151980638504, -0.013780181296169758, -25865.466796875], [0.14438824355602264, 0.9895110726356506, 0.004458283539861441, 42427.73828125], [0.01299224328249693, -0.0064008827321231365, 0.9998950362205505, -206.4029541015625], [0.0, 0.0, 0.0, 1.0]], [[0.9895448088645935, -0.14355939626693726, -0.013849898241460323, -25865.5625], [0.14356113970279694, 0.9896410703659058, -0.0008733610739000142, 42428.38671875], [0.013831807300448418, -0.0011240774765610695, 0.999903678894043, -206.4062957763672], [0.0, 0.0, 0.0, 1.0]], [[0.9895679950714111, -0.1434643715620041, -0.013157954439520836, -25865.65625], [0.14346419274806976, 0.989655077457428, -0.0009624159429222345, 42429.03515625], [0.013159907422959805, -0.0009353192872367799, 0.9999129772186279, -206.4071044921875], [0.0, 0.0, 0.0, 1.0]], [[0.9896237850189209, -0.1430843621492386, -0.013100096955895424, -25865.755859375], [0.1431044191122055, 0.9897074103355408, 0.0006021286826580763, 42429.68359375], [0.01287910807877779, -0.002470562933012843, 0.9999139904975891, -206.40830993652344], [0.0, 0.0, 0.0, 1.0]], [[0.9896832704544067, -0.1426084190607071, -0.013778897002339363, -25865.8515625], [0.14266513288021088, 0.9897657632827759, 0.0032211653888225555, 42430.3203125], [0.013178515248000622, -0.0051537021063268185, 0.9998998641967773, -206.41015625], [0.0, 0.0, 0.0, 1.0]], [[0.9896149635314941, -0.14308245480060577, -0.013770457357168198, -25865.947265625], [0.14314675331115723, 0.9896941781044006, 0.0037975995801389217, 42430.953125], [0.01308517251163721, -0.005729357711970806, 0.9998979568481445, -206.4139862060547], [0.0, 0.0, 0.0, 1.0]], [[0.9894713759422302, -0.14393727481365204, -0.015110449865460396, -25866.041015625], [0.14399796724319458, 0.9895734190940857, 0.0030034754890948534, 42431.5703125], [0.014520587399601936, -0.00514772767201066, 0.9998812675476074, -206.41702270507812], [0.0, 0.0, 0.0, 1.0]], [[0.98941969871521, -0.1444109082221985, -0.013936605304479599, -25866.134765625], [0.1444731205701828, 0.9895023107528687, 0.003560973098501563, 42432.17578125], [0.013276059180498123, -0.005536761600524187, 0.9998965263366699, -206.4207000732422], [0.0, 0.0, 0.0, 1.0]], [[0.9890733361244202, -0.1466868370771408, -0.014723976142704487, -25866.228515625], [0.1467624306678772, 0.9891629219055176, 0.004185550846159458, 42432.77734375], [0.013950444757938385, -0.006300743669271469, 0.9998828172683716, -206.4247283935547], [0.0, 0.0, 0.0, 1.0]], [[0.9883899092674255, -0.15171076357364655, -0.008320706896483898, -25866.31640625], [0.15174295008182526, 0.9884141683578491, 0.0033794299233704805, 42433.36328125], [0.007711608894169331, -0.0046028029173612595, 0.9999596476554871, -206.4298858642578], [0.0, 0.0, 0.0, 1.0]], [[0.9869369864463806, -0.16108466684818268, -0.0026622414588928223, -25866.40625], [0.16109240055084229, 0.9869348406791687, 0.0029914428014308214, 42433.94140625], [0.002145583275705576, -0.003381232498213649, 0.9999919533729553, -206.43472290039062], [0.0, 0.0, 0.0, 1.0]], [[0.984605073928833, -0.17478831112384796, 0.0013526005204766989, -25866.513671875], [0.17478437721729279, 0.9846032857894897, 0.0026209105271846056, 42434.50390625], [-0.001789879985153675, -0.0023441484663635492, 0.9999955892562866, -206.4376983642578], [0.0, 0.0, 0.0, 1.0]], [[0.9812800884246826, -0.19250519573688507, 0.005573884584009647, -25866.63671875], [0.19249780476093292, 0.981295645236969, 0.0018396541709080338, 42435.0546875], [-0.0058237710036337376, -0.0007322555175051093, 0.9999827742576599, -206.4398956298828], [0.0, 0.0, 0.0, 1.0]], [[0.9769034385681152, -0.21362683176994324, 0.004829013720154762, -25866.775390625], [0.21363413333892822, 0.9769131541252136, -0.0010465671075507998, 42435.5859375], [-0.004493952263146639, 0.002054037293419242, 0.999987781047821, -206.43875122070312], [0.0, 0.0, 0.0, 1.0]], [[0.9713683724403381, -0.23744088411331177, 0.008083445020020008, -25866.9296875], [0.23747245967388153, 0.9713890552520752, -0.0031861693132668734, 42436.11328125], [-0.007095642387866974, 0.005014540161937475, 0.9999622106552124, -206.43788146972656], [0.0, 0.0, 0.0, 1.0]], [[0.9642981290817261, -0.2646251320838928, 0.010128606110811234, -25867.095703125], [0.2646900415420532, 0.9643166661262512, -0.005695545580238104, 42436.625], [-0.008259999565780163, 0.008173146285116673, 0.9999324679374695, -206.43455505371094], [0.0, 0.0, 0.0, 1.0]], [[0.9557687640190125, -0.2938987612724304, 0.011378529481589794, -25867.283203125], [0.2939622700214386, 0.9558071494102478, -0.004343325737863779, 42437.1328125], [-0.00959918275475502, 0.007496073842048645, 0.9999257922172546, -206.43069458007812], [0.0, 0.0, 0.0, 1.0]], [[0.9453048706054688, -0.32580363750457764, 0.01583058014512062, -25867.486328125], [0.3258492052555084, 0.9454216361045837, -0.0003183078661095351, 42437.6328125], [-0.014862867072224617, 0.005459280218929052, 0.9998746514320374, -206.42735290527344], [0.0, 0.0, 0.0, 1.0]], [[0.932748556137085, -0.3602060377597809, 0.015226216055452824, -25867.708984375], [0.3602096736431122, 0.932867705821991, 0.002598162740468979, 42438.12109375], [-0.015139920637011528, 0.003061198629438877, 0.9998806715011597, -206.4248046875], [0.0, 0.0, 0.0, 1.0]], [[0.9185022711753845, -0.395180344581604, 0.013641527853906155, -25867.955078125], [0.3951683044433594, 0.918601393699646, 0.0036787819117307663, 42438.58984375], [-0.013984909281134605, 0.0020117301028221846, 0.9999001622200012, -206.42210388183594], [0.0, 0.0, 0.0, 1.0]], [[0.9025090336799622, -0.4305430054664612, 0.010493714362382889, -25868.224609375], [0.43051770329475403, 0.9025700092315674, 0.004676514770835638, 42439.04296875], [-0.011484753340482712, 0.00029713299591094255, 0.9999340176582336, -206.41920471191406], [0.0, 0.0, 0.0, 1.0]], [[0.8845403790473938, -0.46644291281700134, 0.004381648730486631, -25868.513671875], [0.46642225980758667, 0.8845481872558594, 0.00498281279578805, 42439.48046875], [-0.006199976895004511, -0.0023638010025024414, 0.9999780058860779, -206.4172821044922], [0.0, 0.0, 0.0, 1.0]], [[0.8648965954780579, -0.5019232630729675, 0.005185699090361595, -25868.8125], [0.5019067525863647, 0.8649115562438965, 0.0041924430988729, 42439.8984375], [-0.006589456927031279, -0.0010232921922579408, 0.999977707862854, -206.41741943359375], [0.0, 0.0, 0.0, 1.0]], [[0.8430618643760681, -0.5377078652381897, 0.01081141922622919, -25869.115234375], [0.5376875400543213, 0.8431296944618225, 0.00495230033993721, 42440.30859375], [-0.011778319254517555, 0.0016380699817091227, 0.9999292492866516, -206.41831970214844], [0.0, 0.0, 0.0, 1.0]], [[0.8184224963188171, -0.5744329690933228, 0.014532387256622314, -25869.43359375], [0.5743992924690247, 0.8185468912124634, 0.006814089138060808, 42440.69921875], [-0.01580967754125595, 0.0027705891989171505, 0.9998711943626404, -206.41830444335938], [0.0, 0.0, 0.0, 1.0]], [[0.7947708368301392, -0.6067686676979065, 0.013077614828944206, -25869.74609375], [0.6067038178443909, 0.7948780059814453, 0.008909616619348526, 42441.03515625], [-0.015801185742020607, 0.0008531354833394289, 0.9998747706413269, -206.41603088378906], [0.0, 0.0, 0.0, 1.0]], [[0.7670616507530212, -0.641515851020813, 0.008588274009525776, -25870.11328125], [0.6414375901222229, 0.7671059966087341, 0.010304048657417297, 42441.3828125], [-0.013198326341807842, -0.0023949977476149797, 0.9999100565910339, -206.414306640625], [0.0, 0.0, 0.0, 1.0]], [[0.7368564009666443, -0.6760492324829102, 0.00031299772672355175, -25870.5], [0.6760290265083313, 0.7368378639221191, 0.007392603904008865, 42441.7109375], [-0.005228393245488405, -0.005235692020505667, 0.9999726414680481, -206.41229248046875], [0.0, 0.0, 0.0, 1.0]], [[0.7037435173988342, -0.7104326486587524, -0.00551932118833065, -25870.896484375], [0.7104539275169373, 0.7037176489830017, 0.006060586776584387, 42442.015625], [-0.0004215957596898079, -0.008186323568224907, 0.9999663829803467, -206.4129638671875], [0.0, 0.0, 0.0, 1.0]], [[0.6680479645729065, -0.7440981864929199, -0.005466586444526911, -25871.296875], [0.7441145777702332, 0.6680036187171936, 0.008043430745601654, 42442.3046875], [-0.002333403332158923, -0.009441165253520012, 0.9999526739120483, -206.41700744628906], [0.0, 0.0, 0.0, 1.0]], [[0.6305491328239441, -0.7761412262916565, -0.0035465662367641926, -25871.703125], [0.7761000394821167, 0.6304510235786438, 0.014149969443678856, 42442.5859375], [-0.008746436797082424, -0.01167474128305912, 0.9998935461044312, -206.424072265625], [0.0, 0.0, 0.0, 1.0]], [[0.5920102596282959, -0.8059303760528564, -0.000274941383395344, -25872.1171875], [0.8056739568710327, 0.5918132662773132, 0.02542509138584137, 42442.8515625], [-0.020328139886260033, -0.015273429453372955, 0.9996767044067383, -206.4320526123047], [0.0, 0.0, 0.0, 1.0]], [[0.5511593222618103, -0.834399938583374, 0.00027149677043780684, -25872.54296875], [0.833995521068573, 0.5509023666381836, 0.030950842425227165, 42443.0859375], [-0.025974951684474945, -0.016832420602440834, 0.9995208382606506, -206.4402618408203], [0.0, 0.0, 0.0, 1.0]], [[0.5086172223091125, -0.8609925508499146, -0.0006164652295410633, -25872.978515625], [0.8604899048805237, 0.5082957148551941, 0.03453187644481659, 42443.28515625], [-0.0294183436781168, -0.018093973398208618, 0.9994033575057983, -206.44674682617188], [0.0, 0.0, 0.0, 1.0]], [[0.46440285444259644, -0.8855993151664734, -0.006622687913477421, -25873.427734375], [0.8853639364242554, 0.46407225728034973, 0.027704376727342606, 42443.44140625], [-0.021461572498083115, -0.01872948184609413, 0.9995942115783691, -206.45030212402344], [0.0, 0.0, 0.0, 1.0]], [[0.4201521873474121, -0.90742427110672, -0.007309111766517162, -25873.873046875], [0.9072821736335754, 0.41990262269973755, 0.022821832448244095, 42443.5625], [-0.017639966681599617, -0.01622007042169571, 0.9997128248214722, -206.45501708984375], [0.0, 0.0, 0.0, 1.0]], [[0.3745909631252289, -0.9271487593650818, -0.008762475103139877, -25874.314453125], [0.9271277785301208, 0.374439537525177, 0.015133337117731571, 42443.66796875], [-0.010749838314950466, -0.01379274670034647, 0.9998470544815063, -206.4574432373047], [0.0, 0.0, 0.0, 1.0]], [[0.32845813035964966, -0.9444893002510071, -0.007429255172610283, -25874.748046875], [0.9443256258964539, 0.32822197675704956, 0.02278931811451912, 42443.77734375], [-0.019085822626948357, -0.014500974677503109, 0.9997127056121826, -206.46572875976562], [0.0, 0.0, 0.0, 1.0]], [[0.2878851890563965, -0.957622766494751, -0.008988169953227043, -25875.138671875], [0.9573529362678528, 0.28753918409347534, 0.02822261117398739, 42443.85546875], [-0.024442162364721298, -0.016729721799492836, 0.9995612502098083, -206.47186279296875], [0.0, 0.0, 0.0, 1.0]], [[0.2448306828737259, -0.9695145487785339, -0.009974535554647446, -25875.568359375], [0.9691076874732971, 0.24438582360744476, 0.03325444832444191, 42443.921875], [-0.029803037643432617, -0.017808109521865845, 0.9993971586227417, -206.4783935546875], [0.0, 0.0, 0.0, 1.0]], [[0.2023804634809494, -0.9792704582214355, -0.008457401767373085, -25875.984375], [0.9787707328796387, 0.20197635889053345, 0.03483264520764351, 42443.95703125], [-0.03240238502621651, -0.015327303670346737, 0.9993574023246765, -206.48423767089844], [0.0, 0.0, 0.0, 1.0]], [[0.15952762961387634, -0.9871727228164673, -0.00640119519084692, -25876.392578125], [0.9867985248565674, 0.15927709639072418, 0.02931538037955761, 42443.96484375], [-0.02791977860033512, -0.01099330373108387, 0.9995497465133667, -206.486083984375], [0.0, 0.0, 0.0, 1.0]], [[0.11745137721300125, -0.9930624961853027, -0.005650177597999573, -25876.79296875], [0.9928890466690063, 0.1173158809542656, 0.020206887274980545, 42443.94140625], [-0.019403845071792603, -0.007983325980603695, 0.9997798800468445, -206.4842987060547], [0.0, 0.0, 0.0, 1.0]], [[0.07856222987174988, -0.9968996047973633, -0.004378436133265495, -25877.18359375], [0.9967976808547974, 0.07848700135946274, 0.015300656668841839, 42443.91015625], [-0.01490956824272871, -0.005566469393670559, 0.9998733401298523, -206.48411560058594], [0.0, 0.0, 0.0, 1.0]], [[0.04374592378735542, -0.9990422129631042, 0.0009677597554400563, -25877.560546875], [0.999024510383606, 0.04375094920396805, 0.005985315889120102, 42443.86328125], [-0.0060219238512218, 0.0007049825508147478, 0.9999815821647644, -206.48187255859375], [0.0, 0.0, 0.0, 1.0]], [[0.009145686402916908, -0.9999576210975647, 0.0010620737448334694, -25877.931640625], [0.9999168515205383, 0.009154959581792355, 0.009081707336008549, 42443.8359375], [-0.009091045707464218, 0.0009789270116016269, 0.9999581575393677, -206.4818878173828], [0.0, 0.0, 0.0, 1.0]], [[-0.021791774779558182, -0.9997606873512268, -0.0019282166613265872, -25878.302734375], [0.9997121691703796, -0.021810002624988556, 0.009998093359172344, 42443.796875], [-0.010037754662334919, -0.0017097854288294911, 0.9999481439590454, -206.48187255859375], [0.0, 0.0, 0.0, 1.0]], [[-0.04503598064184189, -0.998975396156311, -0.004452184773981571, -25878.66015625], [0.9988904595375061, -0.04509267583489418, 0.013580190017819405, 42443.75], [-0.013767036609351635, -0.00383564829826355, 0.999897837638855, -206.48345947265625], [0.0, 0.0, 0.0, 1.0]], [[-0.06583552807569504, -0.9978262782096863, -0.0028911656700074673, -25879.005859375], [0.9977577328681946, -0.06586544215679169, 0.011884644627571106, 42443.6953125], [-0.012049240060150623, -0.0021022511646151543, 0.9999251365661621, -206.48370361328125], [0.0, 0.0, 0.0, 1.0]], [[-0.08594777435064316, -0.9962995052337646, 0.0005928988684900105, -25879.337890625], [0.9962764978408813, -0.08594170957803726, 0.006867391988635063, 42443.63671875], [-0.006791023537516594, 0.001180928316898644, 0.9999762177467346, -206.48129272460938], [0.0, 0.0, 0.0, 1.0]], [[-0.1019899919629097, -0.9947791695594788, 0.0035288988146930933, -25879.658203125], [0.9947800636291504, -0.10197719186544418, 0.0036345594562590122, 42443.57421875], [-0.003255717223510146, 0.003881166921928525, 0.9999871253967285, -206.47999572753906], [0.0, 0.0, 0.0, 1.0]], [[-0.1137094795703888, -0.9934934973716736, 0.006400599610060453, -25879.970703125], [0.9935133457183838, -0.1136995255947113, 0.0018985765054821968, 42443.515625], [-0.0011584783205762506, 0.006574967876076698, 0.9999776482582092, -206.47792053222656], [0.0, 0.0, 0.0, 1.0]], [[-0.12333176285028458, -0.9923426508903503, 0.0067226216197013855, -25880.279296875], [0.9923630356788635, -0.1233135238289833, 0.0030676652677357197, 42443.46875], [-0.002215186133980751, 0.007049621548503637, 0.9999727010726929, -206.47537231445312], [0.0, 0.0, 0.0, 1.0]], [[-0.13265904784202576, -0.9911472797393799, 0.005359759088605642, -25880.578125], [0.991152286529541, -0.1326322704553604, 0.00507546728476882, 42443.41796875], [-0.004319657105952501, 0.005985644180327654, 0.9999727606773376, -206.47413635253906], [0.0, 0.0, 0.0, 1.0]], [[-0.14054642617702484, -0.9900622367858887, 0.004853961057960987, -25880.86328125], [0.9900584816932678, -0.14051495492458344, 0.006309453397989273, 42443.37109375], [-0.005564697086811066, 0.0056924764066934586, 0.9999683499336243, -206.47271728515625], [0.0, 0.0, 0.0, 1.0]], [[-0.14566670358181, -0.9893128871917725, 0.006421603262424469, -25881.13671875], [0.9893265962600708, -0.14563800394535065, 0.004734941758215427, 42443.31640625], [-0.003749108873307705, 0.007042786572128534, 0.9999681711196899, -206.47056579589844], [0.0, 0.0, 0.0, 1.0]], [[-0.14886897802352905, -0.9888240098953247, 0.00807204656302929, -25881.39453125], [0.9888569116592407, -0.14886151254177094, 0.0015215196181088686, 42443.26171875], [-0.0003028979990631342, 0.008208606392145157, 0.9999662637710571, -206.46749877929688], [0.0, 0.0, 0.0, 1.0]], [[-0.15140074491500854, -0.9884417057037354, 0.007798394188284874, -25881.642578125], [0.9884664416313171, -0.15142257511615753, -0.0022854479029774666, 42443.2109375], [0.003439884399995208, 0.007362432312220335, 0.9999669790267944, -206.46426391601562], [0.0, 0.0, 0.0, 1.0]], [[-0.15264204144477844, -0.9882662892341614, 0.005483711138367653, -25881.884765625], [0.9882764220237732, -0.1526566743850708, -0.002353054704144597, 42443.17578125], [0.003162569832056761, 0.005060248542577028, 0.9999821782112122, -206.4625701904297], [0.0, 0.0, 0.0, 1.0]], [[-0.1522809863090515, -0.9883357882499695, 0.0017317464808002114, -25882.119140625], [0.9883361458778381, -0.15227806568145752, 0.001709258765913546, 42443.14453125], [-0.001425613765604794, 0.001971835270524025, 0.9999970197677612, -206.46347045898438], [0.0, 0.0, 0.0, 1.0]], [[-0.1517137736082077, -0.9884241819381714, 0.0007155201165005565, -25882.337890625], [0.9884151220321655, -0.1517091691493988, 0.004459395073354244, 42443.12109375], [-0.00429922342300415, 0.0013837829465046525, 0.9999898076057434, -206.46368408203125], [0.0, 0.0, 0.0, 1.0]], [[-0.15210115909576416, -0.9883601665496826, 0.0030614263378083706, -25882.537109375], [0.9883574843406677, -0.15208730101585388, 0.004346288274973631, 42443.08984375], [-0.0038300948217511177, 0.00368685950525105, 0.9999858736991882, -206.4633331298828], [0.0, 0.0, 0.0, 1.0]], [[-0.15360407531261444, -0.9881133437156677, 0.006152397021651268, -25882.724609375], [0.9881324172019958, -0.1536022424697876, 0.0007687362376600504, 42443.046875], [0.00018542389443609864, 0.006197463721036911, 0.9999808073043823, -206.46128845214844], [0.0, 0.0, 0.0, 1.0]], [[-0.15375353395938873, -0.9880797266960144, 0.007630124222487211, -25882.904296875], [0.9880913496017456, -0.15379256010055542, -0.004819062072783709, 42443.00390625], [0.005935073364526033, 0.006798312533646822, 0.9999592304229736, -206.4587860107422], [0.0, 0.0, 0.0, 1.0]], [[-0.15249831974506378, -0.9882723093032837, 0.007881645113229752, -25883.078125], [0.9882591366767883, -0.15256235003471375, -0.008282307535409927, 42442.97265625], [0.00938761793076992, 0.006526069715619087, 0.9999346137046814, -206.4568328857422], [0.0, 0.0, 0.0, 1.0]], [[-0.1523113250732422, -0.9883034229278564, 0.007596008945256472, -25883.2421875], [0.9882879257202148, -0.15237295627593994, -0.008328909054398537, 42442.9453125], [0.00938891526311636, 0.006238457281142473, 0.9999364614486694, -206.45523071289062], [0.0, 0.0, 0.0, 1.0]], [[-0.15355505049228668, -0.9881144762039185, 0.007110387086868286, -25883.3984375], [0.9881073236465454, -0.15360461175441742, -0.00703954603523016, 42442.92578125], [0.008048065938055515, 0.00594486715272069, 0.9999499320983887, -206.45477294921875], [0.0, 0.0, 0.0, 1.0]], [[-0.15395045280456543, -0.9880588054656982, 0.006241508759558201, -25883.546875], [0.9880595207214355, -0.15398363769054413, -0.005234688054770231, 42442.90234375], [0.006133269984275103, 0.005361099727451801, 0.9999668002128601, -206.45481872558594], [0.0, 0.0, 0.0, 1.0]], [[-0.1531049609184265, -0.9881964325904846, 0.005165375769138336, -25883.685546875], [0.9881920218467712, -0.15313155949115753, -0.0052168527618050575, 42442.87890625], [0.005946257617324591, 0.004305657465010881, 0.9999730587005615, -206.45433044433594], [0.0, 0.0, 0.0, 1.0]], [[-0.15292084217071533, -0.9882299900054932, 0.004088581074029207, -25883.8203125], [0.9882181286811829, -0.15294291079044342, -0.005777571815997362, 42442.859375], [0.006334889680147171, 0.003156899008899927, 0.9999749660491943, -206.45291137695312], [0.0, 0.0, 0.0, 1.0]], [[-0.1537785679101944, -0.9880999326705933, 0.0032567516900599003, -25883.947265625], [0.9880843758583069, -0.15379592776298523, -0.006005204748362303, 42442.8359375], [0.006434618029743433, 0.0022944738157093525, 0.999976634979248, -206.45314025878906], [0.0, 0.0, 0.0, 1.0]], [[-0.15415728092193604, -0.9880414009094238, 0.0030966924969106913, -25884.0625], [0.9880200028419495, -0.1541752815246582, -0.006806832738220692, 42442.81640625], [0.0072028664872050285, 0.002010271418839693, 0.9999720454216003, -206.45245361328125], [0.0, 0.0, 0.0, 1.0]], [[-0.1535903960466385, -0.988130509853363, 0.0028390700463205576, -25884.173828125], [0.9881068468093872, -0.15360696613788605, -0.007043391931802034, 42442.80078125], [0.007395891938358545, 0.001723507302813232, 0.9999711513519287, -206.4518585205078], [0.0, 0.0, 0.0, 1.0]], [[-0.1535915732383728, -0.9881308078765869, 0.0026648840866982937, -25884.275390625], [0.9881060123443604, -0.15360704064369202, -0.007168355397880077, 42442.78515625], [0.007492617703974247, 0.0015321887331083417, 0.9999707341194153, -206.45211791992188], [0.0, 0.0, 0.0, 1.0]], [[-0.1540646255016327, -0.9880567193031311, 0.0028120612259954214, -25884.369140625], [0.9880344867706299, -0.15408067405223846, -0.0068527101539075375, 42442.76953125], [0.007204151246696711, 0.0017226533964276314, 0.9999725818634033, -206.45187377929688], [0.0, 0.0, 0.0, 1.0]], [[-0.1541566252708435, -0.9880415797233582, 0.003078042296692729, -25884.45703125], [0.9880165457725525, -0.15417544543743134, -0.007295326795428991, 42442.75], [0.007682644762098789, 0.0019165335688740015, 0.9999685883522034, -206.45155334472656], [0.0, 0.0, 0.0, 1.0]], [[-0.15387247502803802, -0.9880862832069397, 0.0029624898452311754, -25884.5390625], [0.9880569577217102, -0.15389131009578705, -0.007799874525517225, 42442.73828125], [0.008162850514054298, 0.0017269228119403124, 0.9999651312828064, -206.4517822265625], [0.0, 0.0, 0.0, 1.0]], [[-0.1541581004858017, -0.9880422353744507, 0.002790621016174555, -25884.615234375], [0.9880116581916809, -0.15417571365833282, -0.007924084551632404, 42442.7265625], [0.008259575814008713, 0.0015356042422354221, 0.9999646544456482, -206.45103454589844], [0.0, 0.0, 0.0, 1.0]], [[-0.15453530848026276, -0.9879828095436096, 0.0029829617124050856, -25884.68359375], [0.987952709197998, -0.15455445647239685, -0.007892535999417305, 42442.71484375], [0.008258719928562641, 0.0017273497069254518, 0.9999644160270691, -206.4507598876953], [0.0, 0.0, 0.0, 1.0]], [[-0.1545327752828598, -0.9879820346832275, 0.0033310928847640753, -25884.74609375], [0.9879546761512756, -0.15455429255962372, -0.007642277050763369, 42442.703125], [0.008065267466008663, 0.0021099864970892668, 0.999965250492096, -206.45123291015625], [0.0, 0.0, 0.0, 1.0]], [[-0.15462499856948853, -0.9879661202430725, 0.0037440904416143894, -25884.8046875], [0.9879433512687683, -0.15464867651462555, -0.007187115028500557, 42442.6953125], [0.007679644972085953, 0.0025876418221741915, 0.9999671578407288, -206.4515838623047], [0.0, 0.0, 0.0, 1.0]], [[-0.15528658032417297, -0.987859308719635, 0.004480980336666107, -25884.859375], [0.9878491163253784, -0.15531088411808014, -0.005706509575247765, 42442.69140625], [0.006333173252642155, 0.0035403885412961245, 0.9999736547470093, -206.45138549804688], [0.0, 0.0, 0.0, 1.0]], [[-0.155948668718338, -0.9877479076385498, 0.0058321841061115265, -25884.916015625], [0.9877539277076721, -0.15597225725650787, -0.0038333842530846596, 42442.6875], [0.004696075804531574, 0.005162952002137899, 0.9999756217002869, -206.45223999023438], [0.0, 0.0, 0.0, 1.0]], [[-0.15566788613796234, -0.9877868890762329, 0.006672567222267389, -25884.9765625], [0.9878034591674805, -0.15568695962429047, -0.002435954986140132, 42442.6796875], [0.0034450360108166933, 0.006211985368281603, 0.9999747276306152, -206.451904296875], [0.0, 0.0, 0.0, 1.0]], [[-0.15548256039619446, -0.9878137707710266, 0.007007334381341934, -25885.03515625], [0.9878352880477905, -0.15549665689468384, -0.0015088266227394342, 42442.671875], [0.002580056432634592, 0.006687495857477188, 0.9999743103981018, -206.4523162841797], [0.0, 0.0, 0.0, 1.0]], [[-0.155766099691391, -0.9877670407295227, 0.007291668560355902, -25885.09765625], [0.987790584564209, -0.1557805985212326, -0.0014607385965064168, 42442.66015625], [0.0025787698104977608, 0.006975108291953802, 0.9999723434448242, -206.451904296875], [0.0, 0.0, 0.0, 1.0]]]}, "original_image_size": {"0": [1280, 1920], "1": [1280, 1920], "2": [1280, 1920], "3": [886, 1920], "4": [886, 1920]}, "relative_image_path": {"0": ["training/796/images/000_0.jpg", "training/796/images/001_0.jpg", "training/796/images/002_0.jpg", "training/796/images/003_0.jpg", "training/796/images/004_0.jpg", "training/796/images/005_0.jpg", "training/796/images/006_0.jpg", "training/796/images/007_0.jpg", "training/796/images/008_0.jpg", "training/796/images/009_0.jpg", "training/796/images/010_0.jpg", "training/796/images/011_0.jpg", "training/796/images/012_0.jpg", "training/796/images/013_0.jpg", "training/796/images/014_0.jpg", "training/796/images/015_0.jpg", "training/796/images/016_0.jpg", "training/796/images/017_0.jpg", "training/796/images/018_0.jpg", "training/796/images/019_0.jpg", "training/796/images/020_0.jpg", "training/796/images/021_0.jpg", "training/796/images/022_0.jpg", "training/796/images/023_0.jpg", "training/796/images/024_0.jpg", "training/796/images/025_0.jpg", "training/796/images/026_0.jpg", "training/796/images/027_0.jpg", "training/796/images/028_0.jpg", "training/796/images/029_0.jpg", "training/796/images/030_0.jpg", "training/796/images/031_0.jpg", "training/796/images/032_0.jpg", "training/796/images/033_0.jpg", "training/796/images/034_0.jpg", "training/796/images/035_0.jpg", "training/796/images/036_0.jpg", "training/796/images/037_0.jpg", "training/796/images/038_0.jpg", "training/796/images/039_0.jpg", "training/796/images/040_0.jpg", "training/796/images/041_0.jpg", "training/796/images/042_0.jpg", "training/796/images/043_0.jpg", "training/796/images/044_0.jpg", "training/796/images/045_0.jpg", "training/796/images/046_0.jpg", "training/796/images/047_0.jpg", "training/796/images/048_0.jpg", "training/796/images/049_0.jpg", "training/796/images/050_0.jpg", "training/796/images/051_0.jpg", "training/796/images/052_0.jpg", "training/796/images/053_0.jpg", "training/796/images/054_0.jpg", "training/796/images/055_0.jpg", "training/796/images/056_0.jpg", "training/796/images/057_0.jpg", "training/796/images/058_0.jpg", "training/796/images/059_0.jpg", "training/796/images/060_0.jpg", "training/796/images/061_0.jpg", "training/796/images/062_0.jpg", "training/796/images/063_0.jpg", "training/796/images/064_0.jpg", "training/796/images/065_0.jpg", "training/796/images/066_0.jpg", "training/796/images/067_0.jpg", "training/796/images/068_0.jpg", "training/796/images/069_0.jpg", "training/796/images/070_0.jpg", "training/796/images/071_0.jpg", "training/796/images/072_0.jpg", "training/796/images/073_0.jpg", "training/796/images/074_0.jpg", "training/796/images/075_0.jpg", "training/796/images/076_0.jpg", "training/796/images/077_0.jpg", "training/796/images/078_0.jpg", "training/796/images/079_0.jpg", "training/796/images/080_0.jpg", "training/796/images/081_0.jpg", "training/796/images/082_0.jpg", "training/796/images/083_0.jpg", "training/796/images/084_0.jpg", "training/796/images/085_0.jpg", "training/796/images/086_0.jpg", "training/796/images/087_0.jpg", "training/796/images/088_0.jpg", "training/796/images/089_0.jpg", "training/796/images/090_0.jpg", "training/796/images/091_0.jpg", "training/796/images/092_0.jpg", "training/796/images/093_0.jpg", "training/796/images/094_0.jpg", "training/796/images/095_0.jpg", "training/796/images/096_0.jpg", "training/796/images/097_0.jpg", "training/796/images/098_0.jpg", "training/796/images/099_0.jpg", "training/796/images/100_0.jpg", "training/796/images/101_0.jpg", "training/796/images/102_0.jpg", "training/796/images/103_0.jpg", "training/796/images/104_0.jpg", "training/796/images/105_0.jpg", "training/796/images/106_0.jpg", "training/796/images/107_0.jpg", "training/796/images/108_0.jpg", "training/796/images/109_0.jpg", "training/796/images/110_0.jpg", "training/796/images/111_0.jpg", "training/796/images/112_0.jpg", "training/796/images/113_0.jpg", "training/796/images/114_0.jpg", "training/796/images/115_0.jpg", "training/796/images/116_0.jpg", "training/796/images/117_0.jpg", "training/796/images/118_0.jpg", "training/796/images/119_0.jpg", "training/796/images/120_0.jpg", "training/796/images/121_0.jpg", "training/796/images/122_0.jpg", "training/796/images/123_0.jpg", "training/796/images/124_0.jpg", "training/796/images/125_0.jpg", "training/796/images/126_0.jpg", "training/796/images/127_0.jpg", "training/796/images/128_0.jpg", "training/796/images/129_0.jpg", "training/796/images/130_0.jpg", "training/796/images/131_0.jpg", "training/796/images/132_0.jpg", "training/796/images/133_0.jpg", "training/796/images/134_0.jpg", "training/796/images/135_0.jpg", "training/796/images/136_0.jpg", "training/796/images/137_0.jpg", "training/796/images/138_0.jpg", "training/796/images/139_0.jpg", "training/796/images/140_0.jpg", "training/796/images/141_0.jpg", "training/796/images/142_0.jpg", "training/796/images/143_0.jpg", "training/796/images/144_0.jpg", "training/796/images/145_0.jpg", "training/796/images/146_0.jpg", "training/796/images/147_0.jpg", "training/796/images/148_0.jpg", "training/796/images/149_0.jpg", "training/796/images/150_0.jpg", "training/796/images/151_0.jpg", "training/796/images/152_0.jpg", "training/796/images/153_0.jpg", "training/796/images/154_0.jpg", "training/796/images/155_0.jpg", "training/796/images/156_0.jpg", "training/796/images/157_0.jpg", "training/796/images/158_0.jpg", "training/796/images/159_0.jpg", "training/796/images/160_0.jpg", "training/796/images/161_0.jpg", "training/796/images/162_0.jpg", "training/796/images/163_0.jpg", "training/796/images/164_0.jpg", "training/796/images/165_0.jpg", "training/796/images/166_0.jpg", "training/796/images/167_0.jpg", "training/796/images/168_0.jpg", "training/796/images/169_0.jpg", "training/796/images/170_0.jpg", "training/796/images/171_0.jpg", "training/796/images/172_0.jpg", "training/796/images/173_0.jpg", "training/796/images/174_0.jpg", "training/796/images/175_0.jpg", "training/796/images/176_0.jpg", "training/796/images/177_0.jpg", "training/796/images/178_0.jpg", "training/796/images/179_0.jpg", "training/796/images/180_0.jpg", "training/796/images/181_0.jpg", "training/796/images/182_0.jpg", "training/796/images/183_0.jpg", "training/796/images/184_0.jpg", "training/796/images/185_0.jpg", "training/796/images/186_0.jpg", "training/796/images/187_0.jpg", "training/796/images/188_0.jpg", "training/796/images/189_0.jpg", "training/796/images/190_0.jpg", "training/796/images/191_0.jpg", "training/796/images/192_0.jpg", "training/796/images/193_0.jpg", "training/796/images/194_0.jpg", "training/796/images/195_0.jpg", "training/796/images/196_0.jpg", "training/796/images/197_0.jpg"], "1": ["training/796/images/000_1.jpg", "training/796/images/001_1.jpg", "training/796/images/002_1.jpg", "training/796/images/003_1.jpg", "training/796/images/004_1.jpg", "training/796/images/005_1.jpg", "training/796/images/006_1.jpg", "training/796/images/007_1.jpg", "training/796/images/008_1.jpg", "training/796/images/009_1.jpg", "training/796/images/010_1.jpg", "training/796/images/011_1.jpg", "training/796/images/012_1.jpg", "training/796/images/013_1.jpg", "training/796/images/014_1.jpg", "training/796/images/015_1.jpg", "training/796/images/016_1.jpg", "training/796/images/017_1.jpg", "training/796/images/018_1.jpg", "training/796/images/019_1.jpg", "training/796/images/020_1.jpg", "training/796/images/021_1.jpg", "training/796/images/022_1.jpg", "training/796/images/023_1.jpg", "training/796/images/024_1.jpg", "training/796/images/025_1.jpg", "training/796/images/026_1.jpg", "training/796/images/027_1.jpg", "training/796/images/028_1.jpg", "training/796/images/029_1.jpg", "training/796/images/030_1.jpg", "training/796/images/031_1.jpg", "training/796/images/032_1.jpg", "training/796/images/033_1.jpg", "training/796/images/034_1.jpg", "training/796/images/035_1.jpg", "training/796/images/036_1.jpg", "training/796/images/037_1.jpg", "training/796/images/038_1.jpg", "training/796/images/039_1.jpg", "training/796/images/040_1.jpg", "training/796/images/041_1.jpg", "training/796/images/042_1.jpg", "training/796/images/043_1.jpg", "training/796/images/044_1.jpg", "training/796/images/045_1.jpg", "training/796/images/046_1.jpg", "training/796/images/047_1.jpg", "training/796/images/048_1.jpg", "training/796/images/049_1.jpg", "training/796/images/050_1.jpg", "training/796/images/051_1.jpg", "training/796/images/052_1.jpg", "training/796/images/053_1.jpg", "training/796/images/054_1.jpg", "training/796/images/055_1.jpg", "training/796/images/056_1.jpg", "training/796/images/057_1.jpg", "training/796/images/058_1.jpg", "training/796/images/059_1.jpg", "training/796/images/060_1.jpg", "training/796/images/061_1.jpg", "training/796/images/062_1.jpg", "training/796/images/063_1.jpg", "training/796/images/064_1.jpg", "training/796/images/065_1.jpg", "training/796/images/066_1.jpg", "training/796/images/067_1.jpg", "training/796/images/068_1.jpg", "training/796/images/069_1.jpg", "training/796/images/070_1.jpg", "training/796/images/071_1.jpg", "training/796/images/072_1.jpg", "training/796/images/073_1.jpg", "training/796/images/074_1.jpg", "training/796/images/075_1.jpg", "training/796/images/076_1.jpg", "training/796/images/077_1.jpg", "training/796/images/078_1.jpg", "training/796/images/079_1.jpg", "training/796/images/080_1.jpg", "training/796/images/081_1.jpg", "training/796/images/082_1.jpg", "training/796/images/083_1.jpg", "training/796/images/084_1.jpg", "training/796/images/085_1.jpg", "training/796/images/086_1.jpg", "training/796/images/087_1.jpg", "training/796/images/088_1.jpg", "training/796/images/089_1.jpg", "training/796/images/090_1.jpg", "training/796/images/091_1.jpg", "training/796/images/092_1.jpg", "training/796/images/093_1.jpg", "training/796/images/094_1.jpg", "training/796/images/095_1.jpg", "training/796/images/096_1.jpg", "training/796/images/097_1.jpg", "training/796/images/098_1.jpg", "training/796/images/099_1.jpg", "training/796/images/100_1.jpg", "training/796/images/101_1.jpg", "training/796/images/102_1.jpg", "training/796/images/103_1.jpg", "training/796/images/104_1.jpg", "training/796/images/105_1.jpg", "training/796/images/106_1.jpg", "training/796/images/107_1.jpg", "training/796/images/108_1.jpg", "training/796/images/109_1.jpg", "training/796/images/110_1.jpg", "training/796/images/111_1.jpg", "training/796/images/112_1.jpg", "training/796/images/113_1.jpg", "training/796/images/114_1.jpg", "training/796/images/115_1.jpg", "training/796/images/116_1.jpg", "training/796/images/117_1.jpg", "training/796/images/118_1.jpg", "training/796/images/119_1.jpg", "training/796/images/120_1.jpg", "training/796/images/121_1.jpg", "training/796/images/122_1.jpg", "training/796/images/123_1.jpg", "training/796/images/124_1.jpg", "training/796/images/125_1.jpg", "training/796/images/126_1.jpg", "training/796/images/127_1.jpg", "training/796/images/128_1.jpg", "training/796/images/129_1.jpg", "training/796/images/130_1.jpg", "training/796/images/131_1.jpg", "training/796/images/132_1.jpg", "training/796/images/133_1.jpg", "training/796/images/134_1.jpg", "training/796/images/135_1.jpg", "training/796/images/136_1.jpg", "training/796/images/137_1.jpg", "training/796/images/138_1.jpg", "training/796/images/139_1.jpg", "training/796/images/140_1.jpg", "training/796/images/141_1.jpg", "training/796/images/142_1.jpg", "training/796/images/143_1.jpg", "training/796/images/144_1.jpg", "training/796/images/145_1.jpg", "training/796/images/146_1.jpg", "training/796/images/147_1.jpg", "training/796/images/148_1.jpg", "training/796/images/149_1.jpg", "training/796/images/150_1.jpg", "training/796/images/151_1.jpg", "training/796/images/152_1.jpg", "training/796/images/153_1.jpg", "training/796/images/154_1.jpg", "training/796/images/155_1.jpg", "training/796/images/156_1.jpg", "training/796/images/157_1.jpg", "training/796/images/158_1.jpg", "training/796/images/159_1.jpg", "training/796/images/160_1.jpg", "training/796/images/161_1.jpg", "training/796/images/162_1.jpg", "training/796/images/163_1.jpg", "training/796/images/164_1.jpg", "training/796/images/165_1.jpg", "training/796/images/166_1.jpg", "training/796/images/167_1.jpg", "training/796/images/168_1.jpg", "training/796/images/169_1.jpg", "training/796/images/170_1.jpg", "training/796/images/171_1.jpg", "training/796/images/172_1.jpg", "training/796/images/173_1.jpg", "training/796/images/174_1.jpg", "training/796/images/175_1.jpg", "training/796/images/176_1.jpg", "training/796/images/177_1.jpg", "training/796/images/178_1.jpg", "training/796/images/179_1.jpg", "training/796/images/180_1.jpg", "training/796/images/181_1.jpg", "training/796/images/182_1.jpg", "training/796/images/183_1.jpg", "training/796/images/184_1.jpg", "training/796/images/185_1.jpg", "training/796/images/186_1.jpg", "training/796/images/187_1.jpg", "training/796/images/188_1.jpg", "training/796/images/189_1.jpg", "training/796/images/190_1.jpg", "training/796/images/191_1.jpg", "training/796/images/192_1.jpg", "training/796/images/193_1.jpg", "training/796/images/194_1.jpg", "training/796/images/195_1.jpg", "training/796/images/196_1.jpg", "training/796/images/197_1.jpg"], "2": ["training/796/images/000_2.jpg", "training/796/images/001_2.jpg", "training/796/images/002_2.jpg", "training/796/images/003_2.jpg", "training/796/images/004_2.jpg", "training/796/images/005_2.jpg", "training/796/images/006_2.jpg", "training/796/images/007_2.jpg", "training/796/images/008_2.jpg", "training/796/images/009_2.jpg", "training/796/images/010_2.jpg", "training/796/images/011_2.jpg", "training/796/images/012_2.jpg", "training/796/images/013_2.jpg", "training/796/images/014_2.jpg", "training/796/images/015_2.jpg", "training/796/images/016_2.jpg", "training/796/images/017_2.jpg", "training/796/images/018_2.jpg", "training/796/images/019_2.jpg", "training/796/images/020_2.jpg", "training/796/images/021_2.jpg", "training/796/images/022_2.jpg", "training/796/images/023_2.jpg", "training/796/images/024_2.jpg", "training/796/images/025_2.jpg", "training/796/images/026_2.jpg", "training/796/images/027_2.jpg", "training/796/images/028_2.jpg", "training/796/images/029_2.jpg", "training/796/images/030_2.jpg", "training/796/images/031_2.jpg", "training/796/images/032_2.jpg", "training/796/images/033_2.jpg", "training/796/images/034_2.jpg", "training/796/images/035_2.jpg", "training/796/images/036_2.jpg", "training/796/images/037_2.jpg", "training/796/images/038_2.jpg", "training/796/images/039_2.jpg", "training/796/images/040_2.jpg", "training/796/images/041_2.jpg", "training/796/images/042_2.jpg", "training/796/images/043_2.jpg", "training/796/images/044_2.jpg", "training/796/images/045_2.jpg", "training/796/images/046_2.jpg", "training/796/images/047_2.jpg", "training/796/images/048_2.jpg", "training/796/images/049_2.jpg", "training/796/images/050_2.jpg", "training/796/images/051_2.jpg", "training/796/images/052_2.jpg", "training/796/images/053_2.jpg", "training/796/images/054_2.jpg", "training/796/images/055_2.jpg", "training/796/images/056_2.jpg", "training/796/images/057_2.jpg", "training/796/images/058_2.jpg", "training/796/images/059_2.jpg", "training/796/images/060_2.jpg", "training/796/images/061_2.jpg", "training/796/images/062_2.jpg", "training/796/images/063_2.jpg", "training/796/images/064_2.jpg", "training/796/images/065_2.jpg", "training/796/images/066_2.jpg", "training/796/images/067_2.jpg", "training/796/images/068_2.jpg", "training/796/images/069_2.jpg", "training/796/images/070_2.jpg", "training/796/images/071_2.jpg", "training/796/images/072_2.jpg", "training/796/images/073_2.jpg", "training/796/images/074_2.jpg", "training/796/images/075_2.jpg", "training/796/images/076_2.jpg", "training/796/images/077_2.jpg", "training/796/images/078_2.jpg", "training/796/images/079_2.jpg", "training/796/images/080_2.jpg", "training/796/images/081_2.jpg", "training/796/images/082_2.jpg", "training/796/images/083_2.jpg", "training/796/images/084_2.jpg", "training/796/images/085_2.jpg", "training/796/images/086_2.jpg", "training/796/images/087_2.jpg", "training/796/images/088_2.jpg", "training/796/images/089_2.jpg", "training/796/images/090_2.jpg", "training/796/images/091_2.jpg", "training/796/images/092_2.jpg", "training/796/images/093_2.jpg", "training/796/images/094_2.jpg", "training/796/images/095_2.jpg", "training/796/images/096_2.jpg", "training/796/images/097_2.jpg", "training/796/images/098_2.jpg", "training/796/images/099_2.jpg", "training/796/images/100_2.jpg", "training/796/images/101_2.jpg", "training/796/images/102_2.jpg", "training/796/images/103_2.jpg", "training/796/images/104_2.jpg", "training/796/images/105_2.jpg", "training/796/images/106_2.jpg", "training/796/images/107_2.jpg", "training/796/images/108_2.jpg", "training/796/images/109_2.jpg", "training/796/images/110_2.jpg", "training/796/images/111_2.jpg", "training/796/images/112_2.jpg", "training/796/images/113_2.jpg", "training/796/images/114_2.jpg", "training/796/images/115_2.jpg", "training/796/images/116_2.jpg", "training/796/images/117_2.jpg", "training/796/images/118_2.jpg", "training/796/images/119_2.jpg", "training/796/images/120_2.jpg", "training/796/images/121_2.jpg", "training/796/images/122_2.jpg", "training/796/images/123_2.jpg", "training/796/images/124_2.jpg", "training/796/images/125_2.jpg", "training/796/images/126_2.jpg", "training/796/images/127_2.jpg", "training/796/images/128_2.jpg", "training/796/images/129_2.jpg", "training/796/images/130_2.jpg", "training/796/images/131_2.jpg", "training/796/images/132_2.jpg", "training/796/images/133_2.jpg", "training/796/images/134_2.jpg", "training/796/images/135_2.jpg", "training/796/images/136_2.jpg", "training/796/images/137_2.jpg", "training/796/images/138_2.jpg", "training/796/images/139_2.jpg", "training/796/images/140_2.jpg", "training/796/images/141_2.jpg", "training/796/images/142_2.jpg", "training/796/images/143_2.jpg", "training/796/images/144_2.jpg", "training/796/images/145_2.jpg", "training/796/images/146_2.jpg", "training/796/images/147_2.jpg", "training/796/images/148_2.jpg", "training/796/images/149_2.jpg", "training/796/images/150_2.jpg", "training/796/images/151_2.jpg", "training/796/images/152_2.jpg", "training/796/images/153_2.jpg", "training/796/images/154_2.jpg", "training/796/images/155_2.jpg", "training/796/images/156_2.jpg", "training/796/images/157_2.jpg", "training/796/images/158_2.jpg", "training/796/images/159_2.jpg", "training/796/images/160_2.jpg", "training/796/images/161_2.jpg", "training/796/images/162_2.jpg", "training/796/images/163_2.jpg", "training/796/images/164_2.jpg", "training/796/images/165_2.jpg", "training/796/images/166_2.jpg", "training/796/images/167_2.jpg", "training/796/images/168_2.jpg", "training/796/images/169_2.jpg", "training/796/images/170_2.jpg", "training/796/images/171_2.jpg", "training/796/images/172_2.jpg", "training/796/images/173_2.jpg", "training/796/images/174_2.jpg", "training/796/images/175_2.jpg", "training/796/images/176_2.jpg", "training/796/images/177_2.jpg", "training/796/images/178_2.jpg", "training/796/images/179_2.jpg", "training/796/images/180_2.jpg", "training/796/images/181_2.jpg", "training/796/images/182_2.jpg", "training/796/images/183_2.jpg", "training/796/images/184_2.jpg", "training/796/images/185_2.jpg", "training/796/images/186_2.jpg", "training/796/images/187_2.jpg", "training/796/images/188_2.jpg", "training/796/images/189_2.jpg", "training/796/images/190_2.jpg", "training/796/images/191_2.jpg", "training/796/images/192_2.jpg", "training/796/images/193_2.jpg", "training/796/images/194_2.jpg", "training/796/images/195_2.jpg", "training/796/images/196_2.jpg", "training/796/images/197_2.jpg"], "3": ["training/796/images/000_3.jpg", "training/796/images/001_3.jpg", "training/796/images/002_3.jpg", "training/796/images/003_3.jpg", "training/796/images/004_3.jpg", "training/796/images/005_3.jpg", "training/796/images/006_3.jpg", "training/796/images/007_3.jpg", "training/796/images/008_3.jpg", "training/796/images/009_3.jpg", "training/796/images/010_3.jpg", "training/796/images/011_3.jpg", "training/796/images/012_3.jpg", "training/796/images/013_3.jpg", "training/796/images/014_3.jpg", "training/796/images/015_3.jpg", "training/796/images/016_3.jpg", "training/796/images/017_3.jpg", "training/796/images/018_3.jpg", "training/796/images/019_3.jpg", "training/796/images/020_3.jpg", "training/796/images/021_3.jpg", "training/796/images/022_3.jpg", "training/796/images/023_3.jpg", "training/796/images/024_3.jpg", "training/796/images/025_3.jpg", "training/796/images/026_3.jpg", "training/796/images/027_3.jpg", "training/796/images/028_3.jpg", "training/796/images/029_3.jpg", "training/796/images/030_3.jpg", "training/796/images/031_3.jpg", "training/796/images/032_3.jpg", "training/796/images/033_3.jpg", "training/796/images/034_3.jpg", "training/796/images/035_3.jpg", "training/796/images/036_3.jpg", "training/796/images/037_3.jpg", "training/796/images/038_3.jpg", "training/796/images/039_3.jpg", "training/796/images/040_3.jpg", "training/796/images/041_3.jpg", "training/796/images/042_3.jpg", "training/796/images/043_3.jpg", "training/796/images/044_3.jpg", "training/796/images/045_3.jpg", "training/796/images/046_3.jpg", "training/796/images/047_3.jpg", "training/796/images/048_3.jpg", "training/796/images/049_3.jpg", "training/796/images/050_3.jpg", "training/796/images/051_3.jpg", "training/796/images/052_3.jpg", "training/796/images/053_3.jpg", "training/796/images/054_3.jpg", "training/796/images/055_3.jpg", "training/796/images/056_3.jpg", "training/796/images/057_3.jpg", "training/796/images/058_3.jpg", "training/796/images/059_3.jpg", "training/796/images/060_3.jpg", "training/796/images/061_3.jpg", "training/796/images/062_3.jpg", "training/796/images/063_3.jpg", "training/796/images/064_3.jpg", "training/796/images/065_3.jpg", "training/796/images/066_3.jpg", "training/796/images/067_3.jpg", "training/796/images/068_3.jpg", "training/796/images/069_3.jpg", "training/796/images/070_3.jpg", "training/796/images/071_3.jpg", "training/796/images/072_3.jpg", "training/796/images/073_3.jpg", "training/796/images/074_3.jpg", "training/796/images/075_3.jpg", "training/796/images/076_3.jpg", "training/796/images/077_3.jpg", "training/796/images/078_3.jpg", "training/796/images/079_3.jpg", "training/796/images/080_3.jpg", "training/796/images/081_3.jpg", "training/796/images/082_3.jpg", "training/796/images/083_3.jpg", "training/796/images/084_3.jpg", "training/796/images/085_3.jpg", "training/796/images/086_3.jpg", "training/796/images/087_3.jpg", "training/796/images/088_3.jpg", "training/796/images/089_3.jpg", "training/796/images/090_3.jpg", "training/796/images/091_3.jpg", "training/796/images/092_3.jpg", "training/796/images/093_3.jpg", "training/796/images/094_3.jpg", "training/796/images/095_3.jpg", "training/796/images/096_3.jpg", "training/796/images/097_3.jpg", "training/796/images/098_3.jpg", "training/796/images/099_3.jpg", "training/796/images/100_3.jpg", "training/796/images/101_3.jpg", "training/796/images/102_3.jpg", "training/796/images/103_3.jpg", "training/796/images/104_3.jpg", "training/796/images/105_3.jpg", "training/796/images/106_3.jpg", "training/796/images/107_3.jpg", "training/796/images/108_3.jpg", "training/796/images/109_3.jpg", "training/796/images/110_3.jpg", "training/796/images/111_3.jpg", "training/796/images/112_3.jpg", "training/796/images/113_3.jpg", "training/796/images/114_3.jpg", "training/796/images/115_3.jpg", "training/796/images/116_3.jpg", "training/796/images/117_3.jpg", "training/796/images/118_3.jpg", "training/796/images/119_3.jpg", "training/796/images/120_3.jpg", "training/796/images/121_3.jpg", "training/796/images/122_3.jpg", "training/796/images/123_3.jpg", "training/796/images/124_3.jpg", "training/796/images/125_3.jpg", "training/796/images/126_3.jpg", "training/796/images/127_3.jpg", "training/796/images/128_3.jpg", "training/796/images/129_3.jpg", "training/796/images/130_3.jpg", "training/796/images/131_3.jpg", "training/796/images/132_3.jpg", "training/796/images/133_3.jpg", "training/796/images/134_3.jpg", "training/796/images/135_3.jpg", "training/796/images/136_3.jpg", "training/796/images/137_3.jpg", "training/796/images/138_3.jpg", "training/796/images/139_3.jpg", "training/796/images/140_3.jpg", "training/796/images/141_3.jpg", "training/796/images/142_3.jpg", "training/796/images/143_3.jpg", "training/796/images/144_3.jpg", "training/796/images/145_3.jpg", "training/796/images/146_3.jpg", "training/796/images/147_3.jpg", "training/796/images/148_3.jpg", "training/796/images/149_3.jpg", "training/796/images/150_3.jpg", "training/796/images/151_3.jpg", "training/796/images/152_3.jpg", "training/796/images/153_3.jpg", "training/796/images/154_3.jpg", "training/796/images/155_3.jpg", "training/796/images/156_3.jpg", "training/796/images/157_3.jpg", "training/796/images/158_3.jpg", "training/796/images/159_3.jpg", "training/796/images/160_3.jpg", "training/796/images/161_3.jpg", "training/796/images/162_3.jpg", "training/796/images/163_3.jpg", "training/796/images/164_3.jpg", "training/796/images/165_3.jpg", "training/796/images/166_3.jpg", "training/796/images/167_3.jpg", "training/796/images/168_3.jpg", "training/796/images/169_3.jpg", "training/796/images/170_3.jpg", "training/796/images/171_3.jpg", "training/796/images/172_3.jpg", "training/796/images/173_3.jpg", "training/796/images/174_3.jpg", "training/796/images/175_3.jpg", "training/796/images/176_3.jpg", "training/796/images/177_3.jpg", "training/796/images/178_3.jpg", "training/796/images/179_3.jpg", "training/796/images/180_3.jpg", "training/796/images/181_3.jpg", "training/796/images/182_3.jpg", "training/796/images/183_3.jpg", "training/796/images/184_3.jpg", "training/796/images/185_3.jpg", "training/796/images/186_3.jpg", "training/796/images/187_3.jpg", "training/796/images/188_3.jpg", "training/796/images/189_3.jpg", "training/796/images/190_3.jpg", "training/796/images/191_3.jpg", "training/796/images/192_3.jpg", "training/796/images/193_3.jpg", "training/796/images/194_3.jpg", "training/796/images/195_3.jpg", "training/796/images/196_3.jpg", "training/796/images/197_3.jpg"], "4": ["training/796/images/000_4.jpg", "training/796/images/001_4.jpg", "training/796/images/002_4.jpg", "training/796/images/003_4.jpg", "training/796/images/004_4.jpg", "training/796/images/005_4.jpg", "training/796/images/006_4.jpg", "training/796/images/007_4.jpg", "training/796/images/008_4.jpg", "training/796/images/009_4.jpg", "training/796/images/010_4.jpg", "training/796/images/011_4.jpg", "training/796/images/012_4.jpg", "training/796/images/013_4.jpg", "training/796/images/014_4.jpg", "training/796/images/015_4.jpg", "training/796/images/016_4.jpg", "training/796/images/017_4.jpg", "training/796/images/018_4.jpg", "training/796/images/019_4.jpg", "training/796/images/020_4.jpg", "training/796/images/021_4.jpg", "training/796/images/022_4.jpg", "training/796/images/023_4.jpg", "training/796/images/024_4.jpg", "training/796/images/025_4.jpg", "training/796/images/026_4.jpg", "training/796/images/027_4.jpg", "training/796/images/028_4.jpg", "training/796/images/029_4.jpg", "training/796/images/030_4.jpg", "training/796/images/031_4.jpg", "training/796/images/032_4.jpg", "training/796/images/033_4.jpg", "training/796/images/034_4.jpg", "training/796/images/035_4.jpg", "training/796/images/036_4.jpg", "training/796/images/037_4.jpg", "training/796/images/038_4.jpg", "training/796/images/039_4.jpg", "training/796/images/040_4.jpg", "training/796/images/041_4.jpg", "training/796/images/042_4.jpg", "training/796/images/043_4.jpg", "training/796/images/044_4.jpg", "training/796/images/045_4.jpg", "training/796/images/046_4.jpg", "training/796/images/047_4.jpg", "training/796/images/048_4.jpg", "training/796/images/049_4.jpg", "training/796/images/050_4.jpg", "training/796/images/051_4.jpg", "training/796/images/052_4.jpg", "training/796/images/053_4.jpg", "training/796/images/054_4.jpg", "training/796/images/055_4.jpg", "training/796/images/056_4.jpg", "training/796/images/057_4.jpg", "training/796/images/058_4.jpg", "training/796/images/059_4.jpg", "training/796/images/060_4.jpg", "training/796/images/061_4.jpg", "training/796/images/062_4.jpg", "training/796/images/063_4.jpg", "training/796/images/064_4.jpg", "training/796/images/065_4.jpg", "training/796/images/066_4.jpg", "training/796/images/067_4.jpg", "training/796/images/068_4.jpg", "training/796/images/069_4.jpg", "training/796/images/070_4.jpg", "training/796/images/071_4.jpg", "training/796/images/072_4.jpg", "training/796/images/073_4.jpg", "training/796/images/074_4.jpg", "training/796/images/075_4.jpg", "training/796/images/076_4.jpg", "training/796/images/077_4.jpg", "training/796/images/078_4.jpg", "training/796/images/079_4.jpg", "training/796/images/080_4.jpg", "training/796/images/081_4.jpg", "training/796/images/082_4.jpg", "training/796/images/083_4.jpg", "training/796/images/084_4.jpg", "training/796/images/085_4.jpg", "training/796/images/086_4.jpg", "training/796/images/087_4.jpg", "training/796/images/088_4.jpg", "training/796/images/089_4.jpg", "training/796/images/090_4.jpg", "training/796/images/091_4.jpg", "training/796/images/092_4.jpg", "training/796/images/093_4.jpg", "training/796/images/094_4.jpg", "training/796/images/095_4.jpg", "training/796/images/096_4.jpg", "training/796/images/097_4.jpg", "training/796/images/098_4.jpg", "training/796/images/099_4.jpg", "training/796/images/100_4.jpg", "training/796/images/101_4.jpg", "training/796/images/102_4.jpg", "training/796/images/103_4.jpg", "training/796/images/104_4.jpg", "training/796/images/105_4.jpg", "training/796/images/106_4.jpg", "training/796/images/107_4.jpg", "training/796/images/108_4.jpg", "training/796/images/109_4.jpg", "training/796/images/110_4.jpg", "training/796/images/111_4.jpg", "training/796/images/112_4.jpg", "training/796/images/113_4.jpg", "training/796/images/114_4.jpg", "training/796/images/115_4.jpg", "training/796/images/116_4.jpg", "training/796/images/117_4.jpg", "training/796/images/118_4.jpg", "training/796/images/119_4.jpg", "training/796/images/120_4.jpg", "training/796/images/121_4.jpg", "training/796/images/122_4.jpg", "training/796/images/123_4.jpg", "training/796/images/124_4.jpg", "training/796/images/125_4.jpg", "training/796/images/126_4.jpg", "training/796/images/127_4.jpg", "training/796/images/128_4.jpg", "training/796/images/129_4.jpg", "training/796/images/130_4.jpg", "training/796/images/131_4.jpg", "training/796/images/132_4.jpg", "training/796/images/133_4.jpg", "training/796/images/134_4.jpg", "training/796/images/135_4.jpg", "training/796/images/136_4.jpg", "training/796/images/137_4.jpg", "training/796/images/138_4.jpg", "training/796/images/139_4.jpg", "training/796/images/140_4.jpg", "training/796/images/141_4.jpg", "training/796/images/142_4.jpg", "training/796/images/143_4.jpg", "training/796/images/144_4.jpg", "training/796/images/145_4.jpg", "training/796/images/146_4.jpg", "training/796/images/147_4.jpg", "training/796/images/148_4.jpg", "training/796/images/149_4.jpg", "training/796/images/150_4.jpg", "training/796/images/151_4.jpg", "training/796/images/152_4.jpg", "training/796/images/153_4.jpg", "training/796/images/154_4.jpg", "training/796/images/155_4.jpg", "training/796/images/156_4.jpg", "training/796/images/157_4.jpg", "training/796/images/158_4.jpg", "training/796/images/159_4.jpg", "training/796/images/160_4.jpg", "training/796/images/161_4.jpg", "training/796/images/162_4.jpg", "training/796/images/163_4.jpg", "training/796/images/164_4.jpg", "training/796/images/165_4.jpg", "training/796/images/166_4.jpg", "training/796/images/167_4.jpg", "training/796/images/168_4.jpg", "training/796/images/169_4.jpg", "training/796/images/170_4.jpg", "training/796/images/171_4.jpg", "training/796/images/172_4.jpg", "training/796/images/173_4.jpg", "training/796/images/174_4.jpg", "training/796/images/175_4.jpg", "training/796/images/176_4.jpg", "training/796/images/177_4.jpg", "training/796/images/178_4.jpg", "training/796/images/179_4.jpg", "training/796/images/180_4.jpg", "training/796/images/181_4.jpg", "training/796/images/182_4.jpg", "training/796/images/183_4.jpg", "training/796/images/184_4.jpg", "training/796/images/185_4.jpg", "training/796/images/186_4.jpg", "training/796/images/187_4.jpg", "training/796/images/188_4.jpg", "training/796/images/189_4.jpg", "training/796/images/190_4.jpg", "training/796/images/191_4.jpg", "training/796/images/192_4.jpg", "training/796/images/193_4.jpg", "training/796/images/194_4.jpg", "training/796/images/195_4.jpg", "training/796/images/196_4.jpg", "training/796/images/197_4.jpg"]}, "fps": 10}
================================================
FILE: docs/example_data/waymo_train.txt
================================================
annotations/waymo/training/segment-10231929575853664160_1160_000_1180_000_with_camera_labels.json
annotations/waymo/training/segment-10391312872392849784_4099_400_4119_400_with_camera_labels.json
annotations/waymo/training/segment-12339284075576056695_1920_000_1940_000_with_camera_labels.json
annotations/waymo/training/segment-14810689888487451189_720_000_740_000_with_camera_labels.json
annotations/waymo/training/segment-7670103006580549715_360_000_380_000_with_camera_labels.json
annotations/waymo/training/segment-8822503619482926605_1080_000_1100_000_with_camera_labels.json
annotations/waymo/training/segment-9907794657177651763_1126_570_1146_570_with_camera_labels.json
annotations/waymo/training/segment-990914685337955114_980_000_1000_000_with_camera_labels.json
================================================
FILE: engine_storm.py
================================================
import datetime
import logging
import os
import numpy as np
import torch
import torch.nn.functional as F
from skimage.metrics import structural_similarity as ssim
from tqdm import tqdm
import storm.utils.distributed as distributed
from storm.dataset.constants import MEAN, STD
from storm.dataset.data_utils import prepare_inputs_and_targets
from storm.utils.losses import compute_scene_flow_metrics
from storm.visualization.video_maker import make_video
logger = logging.getLogger("STORM")
@torch.no_grad()
def visualize(args, model, dset_train, step, train_vis_id, device, dset_val=None, val_vis_id=None):
model.eval()
global_rank = distributed.get_global_rank()
split = "train"
for vis_id, dataset in zip([train_vis_id, val_vis_id], [dset_train, dset_val]):
if vis_id is None or dataset is None: # sometimes there is no validation set
continue
sample_id = global_rank * 80 + vis_id
out_pth = f"{args.video_dir}/step{step}-rank{global_rank}-sample{sample_id}-{split}.mp4"
logger.info(f"saving video to {out_pth}")
make_video(
dataset,
model,
device,
output_filename=out_pth,
scene_id=sample_id,
skip_plot_gt_depth_and_flow=False,
)
logger.info(f"saved video to {out_pth}")
split = "val"
torch.cuda.empty_cache()
return train_vis_id + 1, val_vis_id + 1 if val_vis_id is not None else None
@torch.no_grad()
def evaluate(dataloader, model, args, name_str=None):
torch.cuda.empty_cache()
model.eval()
device = next(model.parameters()).device
mean = torch.tensor(MEAN).to(device)
std = torch.tensor(STD).to(device)
eval_result_dir = os.path.join(args.log_dir, "eval_results")
os.makedirs(eval_result_dir, exist_ok=True)
logger.info(f"Saving evaluation results to {eval_result_dir}")
# use yr-mo-dy-hr-min
if name_str is None:
name_str = datetime.datetime.now().strftime("%y-%m-%d-%H-%M")
def get_numpy(tensor):
return tensor.squeeze().detach().cpu().numpy()
# Initialize running sums and counts
total_samples, total_dynamic_samples, total_valid_dynamic_depth_samples = 0, 0, 0
total_psnr, total_ssim, total_depth_rmse = 0.0, 0.0, 0.0
total_occupied_psnr, total_occupied_ssim = 0.0, 0.0
total_dynamic_psnr, total_dynamic_ssim, total_dynamic_rmse = 0.0, 0.0, 0.0
# test_indices = [1, 2, 3, 4, 6, 7, 8, 9, 11, 12, 13, 14, 16, 17, 18, 19]
printed = False
pbar = tqdm(dataloader, desc="Evaluating")
for data_dict in pbar:
input_dict, target_dict = prepare_inputs_and_targets(data_dict, device)
input_indices = input_dict["context_frame_idx"][0].cpu().numpy().tolist()
input_indice_start = input_indices[0]
input_indices = [idx - input_indice_start for idx in input_indices]
# target indices include all frames between t+0 to t+19, including the input frames
target_indices = target_dict["target_frame_idx"][0].cpu().numpy().tolist()
# remove the input frames from target indices to get test indices
target_indices = [idx - input_indice_start for idx in target_indices]
test_indices = [idx for idx in target_indices if idx not in input_indices]
if not printed:
logger.info(f"Input indices: {input_indices}")
logger.info(f"Test indices: {test_indices}")
printed = True
pred_dict = model(input_dict)
# evaluate on real target images:
# b, t, v, c, h, w
gt_rgb = target_dict["target_image"][:, test_indices]
# b, t, v, h, w, c
gt_rgb = gt_rgb.permute(0, 1, 2, 4, 5, 3) * std + mean # transform from [-1, 1] to [0, 1]
height, width = gt_rgb.shape[-3], gt_rgb.shape[-2]
# btv, h, w, c
gt_rgb = gt_rgb.reshape(-1, height, width, 3)
gt_depth = target_dict["target_depth"][:, test_indices].view(-1, height, width)
gt_sky_mask = target_dict["target_sky_masks"][:, test_indices].view(-1, height, width)
occupied_mask = (gt_sky_mask == 0).bool()
if "target_dynamic_masks" in target_dict:
gt_dynamic_mask = target_dict["target_dynamic_masks"][:, test_indices]
gt_dynamic_mask = gt_dynamic_mask.view(-1, height, width)
dynamic_mask = gt_dynamic_mask.bool()
else:
dynamic_mask = torch.ones_like(occupied_mask)
valid_depth_mask = gt_depth > 0.0
rendered_results = pred_dict["render_results"]
pred_rgb = rendered_results[rendered_results["rgb_key"]][:, test_indices] * std + mean
pred_rgb = pred_rgb.reshape(-1, height, width, 3).detach()
pred_rgb = torch.clamp(pred_rgb, 0, 1)
if rendered_results["decoder_depth_key"] is None:
pred_depth = rendered_results[rendered_results["depth_key"]][:, test_indices].view(
-1, height, width
)
else:
pred_depth = rendered_results[rendered_results["decoder_depth_key"]][
:, test_indices
].view(-1, height, width)
psnrs, ssim_scores, depth_rmses = [], [], []
occupied_ssims, occupied_psnrs = [], []
dynamic_ssims, dynamic_psnrs, dynamic_depth_rmses = [], [], []
for i in range(len(gt_rgb)):
ssim_score = ssim(
get_numpy(pred_rgb[i]),
get_numpy(gt_rgb[i]),
data_range=1.0,
channel_axis=-1,
)
ssim_scores.append(ssim_score)
occupied_ssims.append(
ssim(
get_numpy(pred_rgb[i]),
get_numpy(gt_rgb[i]),
data_range=1.0,
channel_axis=-1,
full=True,
)[1][get_numpy(occupied_mask[i])].mean()
)
psnrs.append(
-10
* torch.log10(
F.mse_loss(
pred_rgb[i],
gt_rgb[i],
)
).item()
)
occupied_psnrs.append(
-10
* torch.log10(
F.mse_loss(
pred_rgb[i][occupied_mask[i]],
gt_rgb[i][occupied_mask[i]],
)
).item()
)
depth_rms = torch.sqrt(
F.mse_loss(
pred_depth[i][valid_depth_mask[i]],
gt_depth[i][valid_depth_mask[i]],
)
).item()
depth_rmses.append(depth_rms)
if dynamic_mask[i].sum() == 0:
continue
dynamic_ssims.append(
ssim(
get_numpy(pred_rgb[i]),
get_numpy(gt_rgb[i]),
data_range=1.0,
channel_axis=-1,
full=True,
)[1][get_numpy(dynamic_mask[i])].mean()
)
dynamic_psnrs.append(
-10
* torch.log10(
F.mse_loss(
pred_rgb[i][dynamic_mask[i]],
gt_rgb[i][dynamic_mask[i]],
)
).item()
)
total_dynamic_samples += 1
_valid_depth_mask = dynamic_mask[i] & valid_depth_mask[i]
if _valid_depth_mask.sum() == 0:
continue
dynamic_depth_rms = torch.sqrt(
F.mse_loss(
pred_depth[i][dynamic_mask[i] & valid_depth_mask[i]],
gt_depth[i][dynamic_mask[i] & valid_depth_mask[i]],
)
).item()
dynamic_depth_rmses.append(dynamic_depth_rms)
total_valid_dynamic_depth_samples += 1
psnr_sum = np.sum(psnrs)
ssim_sum = np.sum(ssim_scores)
depth_rmse_sum = np.sum(depth_rmses)
occupied_ssim_sum = np.sum(occupied_ssims)
occupied_psnr_sum = np.sum(occupied_psnrs)
dynamic_ssim_sum = np.sum(dynamic_ssims)
dynamic_psnr_sum = np.sum(dynamic_psnrs)
dynamic_depth_rmse_sum = np.sum(dynamic_depth_rmses)
batch_size = len(gt_rgb)
# Update running sums and counts
total_psnr += psnr_sum
total_ssim += ssim_sum
total_depth_rmse += depth_rmse_sum
total_occupied_psnr += occupied_psnr_sum
total_occupied_ssim += occupied_ssim_sum
total_dynamic_psnr += dynamic_psnr_sum
total_dynamic_ssim += dynamic_ssim_sum
total_dynamic_rmse += dynamic_depth_rmse_sum
total_samples += batch_size
pbar.set_postfix(
psnr=psnr_sum / batch_size,
ssim=ssim_sum / batch_size,
depth_rmse=depth_rmse_sum / batch_size,
avg_psnr=total_psnr / total_samples,
avg_depth_rmse=total_depth_rmse / total_samples,
avg_dynamic_psnr=total_dynamic_psnr / total_dynamic_samples,
avg_dynamic_depth_rmse=total_dynamic_rmse / total_valid_dynamic_depth_samples,
)
# Create tensors for sums and counts
total_psnr_tensor = torch.tensor(total_psnr, device=device)
total_ssim_tensor = torch.tensor(total_ssim, device=device)
total_depth_rmse_tensor = torch.tensor(total_depth_rmse, device=device)
total_occupied_psnr_tensor = torch.tensor(total_occupied_psnr, device=device)
total_occupied_ssim_tensor = torch.tensor(total_occupied_ssim, device=device)
total_dynamic_psnr_tensor = torch.tensor(total_dynamic_psnr, device=device)
total_dynamic_ssim_tensor = torch.tensor(total_dynamic_ssim, device=device)
total_dynamic_rmse_tensor = torch.tensor(total_dynamic_rmse, device=device)
total_samples_tensor = torch.tensor(total_samples, device=device)
total_dynamic_samples_tensor = torch.tensor(total_dynamic_samples, device=device)
total_valid_dynamic_depth_samples_tensor = torch.tensor(
total_valid_dynamic_depth_samples, device=device
)
torch.cuda.synchronize()
if distributed.is_enabled():
# Aggregate sums across all processes
torch.distributed.all_reduce(total_psnr_tensor)
torch.distributed.all_reduce(total_ssim_tensor)
torch.distributed.all_reduce(total_depth_rmse_tensor)
torch.distributed.all_reduce(total_occupied_psnr_tensor)
torch.distributed.all_reduce(total_occupied_ssim_tensor)
torch.distributed.all_reduce(total_dynamic_psnr_tensor)
torch.distributed.all_reduce(total_dynamic_ssim_tensor)
torch.distributed.all_reduce(total_dynamic_rmse_tensor)
torch.distributed.all_reduce(total_samples_tensor)
torch.distributed.all_reduce(total_dynamic_samples_tensor)
torch.distributed.all_reduce(total_valid_dynamic_depth_samples_tensor)
result = None
if distributed.is_main_process():
avg_psnr = total_psnr_tensor.item() / total_samples_tensor.item()
avg_ssim = total_ssim_tensor.item() / total_samples_tensor.item()
avg_depth_rmse = total_depth_rmse_tensor.item() / total_samples_tensor.item()
avg_occupied_psnr = total_occupied_psnr_tensor.item() / total_samples_tensor.item()
avg_occupied_ssim = total_occupied_ssim_tensor.item() / total_samples_tensor.item()
avg_dynamic_psnr = total_dynamic_psnr_tensor.item() / total_dynamic_samples_tensor.item()
avg_dynamic_ssim = total_dynamic_ssim_tensor.item() / total_dynamic_samples_tensor.item()
avg_dynamic_rmse = (
total_dynamic_rmse_tensor.item() / total_valid_dynamic_depth_samples_tensor.item()
)
with open(os.path.join(eval_result_dir, f"eval_{name_str}.txt"), "w") as f:
f.write(f"Average PSNR: {avg_psnr:.4f}\n")
f.write(f"Average SSIM: {avg_ssim:.4f}\n")
f.write(f"Average Depth RMSE: {avg_depth_rmse:.4f}\n")
f.write(f"Average Occupied PSNR: {avg_occupied_psnr:.4f}\n")
f.write(f"Average Occupied SSIM: {avg_occupied_ssim:.4f}\n")
f.write(f"Average Dynamic PSNR: {avg_dynamic_psnr:.4f}\n")
f.write(f"Average Dynamic SSIM: {avg_dynamic_ssim:.4f}\n")
f.write(f"Average Dynamic Depth RMSE: {avg_dynamic_rmse:.4f}\n")
logger.info("Evaluation results saved.")
logger.info(f"Evaluated on {total_samples_tensor.item()} samples.")
logger.info(
f"Average PSNR: {avg_psnr:.4f}, Average SSIM: {avg_ssim:.4f}, Average Depth RMSE: {avg_depth_rmse:.4f}"
)
logger.info(
f"Average Occupied PSNR: {avg_occupied_psnr:.4f}, Average Occupied SSIM: {avg_occupied_ssim:.4f}"
)
logger.info(
f"Average Dynamic PSNR: {avg_dynamic_psnr:.4f}, Average Dynamic SSIM: {avg_dynamic_ssim:.4f}, Average Dynamic Depth RMSE: {avg_dynamic_rmse:.4f}"
)
result = {
"psnr": avg_psnr,
"ssim": avg_ssim,
"depth_rmse": avg_depth_rmse,
"occupied_psnr": avg_occupied_psnr,
"occupied_ssim": avg_occupied_ssim,
"dynamic_psnr": avg_dynamic_psnr,
"dynamic_ssim": avg_dynamic_ssim,
"dynamic_depth_rmse": avg_dynamic_rmse,
}
torch.cuda.empty_cache()
return result
@torch.no_grad()
def evaluate_flow(dataloader, model, args, name_str=None):
torch.cuda.empty_cache()
model.eval()
device = next(model.parameters()).device
eval_result_dir = os.path.join(args.log_dir, "eval_results")
os.makedirs(eval_result_dir, exist_ok=True)
logger.info(f"Saving evaluation results to {eval_result_dir}")
# use yr-mo-dy-hr-min
if name_str is None:
name_str = datetime.datetime.now().strftime("%y-%m-%d-%H-%M")
(
total_flow_epes,
total_flow_accs_strict,
total_flow_accs_relax,
total_flow_angles,
total_flow_rmse,
total_numb_flow_samples,
) = (0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
pbar = tqdm(dataloader, desc="Evaluating")
for data_dict in pbar:
input_dict, target_dict = prepare_inputs_and_targets(data_dict, device)
pred_dict = model(input_dict)
# evaluate on real target images:
# b, t, v, c, h, w
b, t, v, height, width = target_dict["target_depth"].shape
gt_depth = target_dict["target_depth"].view(b * t, -1, height, width)
num_imgs = gt_depth.shape[0]
valid_depth_mask = gt_depth > 0.0
rendered_results = pred_dict["render_results"]
if args.load_ground:
gt_ground_mask = target_dict["target_ground_masks"].view(b * t, -1, height, width)
gt_ground_mask = gt_ground_mask.bool()
num_valid_samples = 0
eval_flow = (
"rendered_flow" in rendered_results
and args.decoder_type == "dummy"
and args.load_flow
and "target_flow" in target_dict
)
if eval_flow:
gt_flow = target_dict["target_flow"].view(b * t, -1, height, width, 3)
pred_flow = rendered_results["rendered_flow"].view(b * t, -1, height, width, 3)
# pred_flow = gt_flow.clone() + torch.rand_like(gt_flow) * 0.05
flow_epes, flow_accs_strict, flow_accs_relax, flow_angles = [], [], [], []
flow_rmse = []
for i in range(num_imgs):
if torch.max(gt_flow.norm(dim=-1)) > 1.0:
if args.load_ground:
non_ground_gt_flow = gt_flow[i][~gt_ground_mask[i] & valid_depth_mask[i]]
non_ground_pred_flow = pred_flow[i][
~gt_ground_mask[i] & valid_depth_mask[i]
]
else:
non_ground_gt_flow = gt_flow[i][valid_depth_mask[i]]
non_ground_pred_flow = pred_flow[i][valid_depth_mask[i]]
flow_metrics = compute_scene_flow_metrics(
non_ground_pred_flow, non_ground_gt_flow
)
flow_epes.append(flow_metrics["EPE3D"])
flow_accs_strict.append(flow_metrics["acc3d_strict"] * 100)
flow_accs_relax.append(flow_metrics["acc3d_relax"] * 100)
flow_angles.append(flow_metrics["angle_error"])
flow_rmse.append(
torch.sqrt(
F.mse_loss(
pred_flow[i][valid_depth_mask[i]],
gt_flow[i][valid_depth_mask[i]],
)
).item()
)
num_valid_samples += 1
flow_epe_sum = np.sum(flow_epes)
flow_acc_strict_sum = np.sum(flow_accs_strict)
flow_acc_relax_sum = np.sum(flow_accs_relax)
flow_angle_sum = np.sum(flow_angles)
flow_rmse_sum = np.sum(flow_rmse)
valid_flow_samples = num_valid_samples
# Update running sums and counts
total_flow_epes += flow_epe_sum
total_flow_accs_strict += flow_acc_strict_sum
total_flow_accs_relax += flow_acc_relax_sum
total_flow_angles += flow_angle_sum
total_flow_rmse += flow_rmse_sum
total_numb_flow_samples += valid_flow_samples
pbar.set_postfix(
avg_flow_epe=total_flow_epes / total_numb_flow_samples,
avg_flow_acc_relax=total_flow_accs_relax / total_numb_flow_samples,
avg_flow_acc_strict=total_flow_accs_strict / total_numb_flow_samples,
avg_flow_angle=total_flow_angles / total_numb_flow_samples,
avg_flow_rmse=total_flow_rmse / total_numb_flow_samples,
)
# Create tensors for sums and counts
result = None
if eval_flow:
total_flow_epes_tensor = torch.tensor(total_flow_epes, device=device)
total_flow_accs_strict_tensor = torch.tensor(total_flow_accs_strict, device=device)
total_flow_accs_relax_tensor = torch.tensor(total_flow_accs_relax, device=device)
total_flow_angles_tensor = torch.tensor(total_flow_angles, device=device)
total_flow_rmse_tensor = torch.tensor(total_flow_rmse, device=device)
total_numb_flow_samples_tensor = torch.tensor(total_numb_flow_samples, device=device)
torch.cuda.synchronize()
if distributed.is_enabled():
# Aggregate sums across all processes
torch.distributed.all_reduce(total_flow_epes_tensor)
torch.distributed.all_reduce(total_flow_accs_strict_tensor)
torch.distributed.all_reduce(total_flow_accs_relax_tensor)
torch.distributed.all_reduce(total_flow_angles_tensor)
torch.distributed.all_reduce(total_flow_rmse_tensor)
torch.distributed.all_reduce(total_numb_flow_samples_tensor)
if distributed.is_main_process() and total_numb_flow_samples_tensor.item() > 0:
avg_flow_epe = total_flow_epes_tensor.item() / total_numb_flow_samples_tensor.item()
avg_flow_acc_strict = (
total_flow_accs_strict_tensor.item() / total_numb_flow_samples_tensor.item()
)
avg_flow_acc_relax = (
total_flow_accs_relax_tensor.item() / total_numb_flow_samples_tensor.item()
)
avg_flow_angle = total_flow_angles_tensor.item() / total_numb_flow_samples_tensor.item()
avg_flow_rmse = total_flow_rmse_tensor.item() / total_numb_flow_samples_tensor.item()
with open(os.path.join(eval_result_dir, f"eval_{name_str}_flow.txt"), "w") as f:
f.write(f"Average Flow EPE: {avg_flow_epe:.4f}\n")
f.write(f"Average Flow Acc Strict: {avg_flow_acc_strict:.4f}\n")
f.write(f"Average Flow Acc Relax: {avg_flow_acc_relax:.4f}\n")
f.write(f"Average Flow Angle: {avg_flow_angle:.4f}\n")
f.write(f"Average Flow RMSE: {avg_flow_rmse:.4f}\n")
logger.info("Evaluation results saved.")
logger.info(f"Evaluated on {total_numb_flow_samples_tensor.item()} samples.")
logger.info(
f"Average Flow EPE: {avg_flow_epe:.4f}, Average Flow Acc Strict: {avg_flow_acc_strict:.4f}, Average Flow Acc Relax: {avg_flow_acc_relax:.4f}, Average Flow Angle: {avg_flow_angle:.4f}"
)
logger.info(f"Average Flow RMSE: {avg_flow_rmse:.4f}")
result = {
"flow_epe": avg_flow_epe,
"flow_acc_strict": avg_flow_acc_strict,
"flow_acc_relax": avg_flow_acc_relax,
"flow_angle": avg_flow_angle,
"flow_rmse": avg_flow_rmse,
}
torch.cuda.empty_cache()
return result
================================================
FILE: extract_sky.py
================================================
import argparse
import os
from typing import Optional, Tuple
import numpy as np
import torch
import torch.utils.data
import torchvision.transforms as transforms
from PIL import Image
from tqdm import tqdm
from torchvision.datasets.folder import default_loader
from storm.dataset.constants import IMGNET_MEAN, IMGNET_STD
from third_party.depth_anything_v2.dpt import DepthAnythingV2
class ListDataset:
"""Dataset class that loads images from a list of file paths."""
def __init__(
self,
data_list: str,
transform: Optional[transforms.Compose] = None,
return_path: bool = False,
):
"""
Initialize the dataset.
Args:
data_list: Path to text file containing image paths
transform: Optional transforms to apply to images
return_path: Whether to return image paths along with images
"""
self.transform = transform
self.return_path = return_path
self.loader = default_loader
self.samples = self._load_samples(data_list)
def _load_samples(self, data_list: str) -> list:
"""Load image paths from the data list file."""
samples = []
with open(data_list, "r") as f:
for line in f:
file_path = line.strip()
samples.append(file_path)
return samples
def __getitem__(self, index: int) -> Tuple[torch.Tensor, str]:
"""Get an image and optionally its path."""
img_pth = self.samples[index]
try:
img = self.loader(img_pth)
except Exception as e:
print(f"Error loading '{img_pth}': {e}")
return self.__getitem__((index + 1) % len(self.samples))
if self.transform is not None:
img = self.transform(img)
to_return = [img]
if self.return_path:
to_return.append(img_pth)
return tuple(to_return) if len(to_return) > 1 else to_return[0]
def __len__(self) -> int:
return len(self.samples)
def get_args_parser() -> argparse.ArgumentParser:
"""Get command line argument parser."""
parser = argparse.ArgumentParser("extract sky masks", add_help=False)
parser.add_argument("--eval_batch_size", type=int, default=16)
parser.add_argument("--num_workers", default=10, type=int)
parser.add_argument("--file_list", type=str, default="file_list.txt")
parser.add_argument("--depth_ckpt", type=str, default="ckpts/depth_anything_v2_vitl.pth")
return parser
def setup_model(device: torch.device, depth_ckpt: str) -> DepthAnythingV2:
"""Initialize and setup the depth estimation model."""
depthv2 = DepthAnythingV2(encoder="vitl", features=256, out_channels=[256, 512, 1024, 1024])
depthv2.load_state_dict(torch.load(depth_ckpt, map_location="cpu"))
depthv2 = depthv2.eval().to(device)
for param in depthv2.parameters():
param.requires_grad = False
return depthv2
@torch.no_grad()
def get_sky_mask(dataloader: torch.utils.data.DataLoader, depthv2: DepthAnythingV2) -> None:
"""
Extract sky masks from images using depth estimation.
Args:
dataloader: DataLoader containing images
depthv2: Depth estimation model
"""
torch.cuda.empty_cache()
device = next(depthv2.parameters()).device
pbar = tqdm(dataloader, desc=f"Extracting sky masks")
for samples, paths in pbar:
samples = samples.to(device)
# predict depth using the model
with torch.autocast(device.type, dtype=torch.bfloat16):
outputs = depthv2(samples)
# identify sky regions (depth = 0)
sky_masks = (outputs == 0).float()
sky_masks = sky_masks.cpu().numpy()
# save masks
for i in range(len(sky_masks)):
mask = sky_masks[i]
mask = (mask * 255).astype(np.uint8)
raise NotImplementedError("please specify the target path by yourself and comment this line")
# e.g:
tgt_pth = paths[i].replace("images", "sky_masks_518")
tgt_pth = tgt_pth.replace("jpg", "png")
# ensure directory exists and save mask
os.makedirs(os.path.dirname(tgt_pth), exist_ok=True)
Image.fromarray(mask).save(tgt_pth)
def main(args: argparse.Namespace) -> None:
# setup device
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
img_transformation = transforms.Compose([
transforms.Resize([518, 518], interpolation=Image.BICUBIC, antialias=True),
transforms.ToTensor(),
transforms.Normalize(mean=IMGNET_MEAN, std=IMGNET_STD),
])
# setup dataloaders
dataset = ListDataset(data_list=args.file_list, transform=img_transformation, return_path=True)
data_loader = torch.utils.data.DataLoader(
dataset,
batch_size=args.eval_batch_size,
num_workers=args.num_workers,
shuffle=False,
drop_last=False,
)
# setup model
depthv2 = setup_model(device, args.depth_ckpt)
get_sky_mask(data_loader, depthv2)
if __name__ == "__main__":
args = get_args_parser()
args = args.parse_args()
main(args)
================================================
FILE: inference.py
================================================
import argparse
import copy
import datetime
import json
import logging
import math
import os
import time
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torch.utils.data
import storm.models as models
import storm.utils.misc as misc
from storm.dataset.constants import DATASET_DICT
from storm.dataset.data_utils import to_batch_tensor
from storm.dataset.storm_dataset import SingleSequenceDataset
from storm.utils.logging import setup_logging
from storm.visualization.video_maker import make_video
torch.backends.cuda.matmul.allow_tf32 = True
torch.backends.cudnn.allow_tf32 = True
def get_args_parser():
parser = argparse.ArgumentParser("STORM training", add_help=False)
# =============== Model parameters ================= #
parser.add_argument("--model", default="STORM-B/8", type=str)
parser.add_argument("--num_context_timesteps", default=4, type=int)
parser.add_argument("--num_target_timesteps", default=4, type=int)
parser.add_argument("--gs_dim", default=3, type=int, help="Number of gs dimensions")
parser.add_argument("--use_sky_token", action="store_true")
parser.add_argument("--use_affine_token", action="store_true")
parser.add_argument("--use_latest_gsplat", action="store_true")
parser.add_argument(
"--decoder_type",
type=str,
choices=["dummy", "conv"],
default="dummy",
help="STORM or LatentSTORM",
)
parser.add_argument("--num_motion_tokens", default=16, type=int, help="Number of motion tokens")
# ============= Checkpoint parameters ============= #
parser.add_argument("--auto_resume", action="store_true")
parser.add_argument("--resume_from", default=None, help="resume from checkpoint")
parser.add_argument("--load_from", type=str, default=None)
# ============= Dataset parameters ============= #
parser.add_argument("--data_root", default="./data/STORM2", type=str, help="dataset path")
parser.add_argument("--overwrite_train_ctx_view_with", default=None, type=int)
parser.add_argument("--overwrite_train_tgt_view_with", default=None, type=int)
parser.add_argument("--overwrite_test_ctx_view_with", default=None, type=int)
parser.add_argument("--input_size", default=(160, 240), type=int, nargs=2)
parser.add_argument("--num_max_cameras", type=int, default=3)
parser.add_argument("--timespan", type=float, default=2.0)
parser.add_argument("--load_ground", action="store_true")
parser.add_argument("--load_depth", action="store_true")
parser.add_argument("--load_flow", action="store_true")
parser.add_argument("--dataset", default="waymo", type=str, choices=DATASET_DICT.keys())
parser.add_argument("--skip_sky_mask", action="store_true", help="skip sky mask loading")
# ============= Logging ============= #
parser.add_argument("--output_dir", default="./work_dirs")
parser.add_argument("--num_vis_samples", type=int, default=1)
# ============= Miscellaneous ============= #
parser.add_argument("--seed", default=1, type=int)
parser.add_argument("--device", default="cuda", help="device to use for training / testing")
# ============= WandB ============= #
parser.add_argument("--project", default="debug", type=str)
parser.add_argument("--exp_name", default=None, type=str)
return parser
def main(args):
global logger
args.exp_name = args.model.replace("/", "-") if args.exp_name is None else args.exp_name
log_dir = os.path.join(args.output_dir, args.project, args.exp_name)
checkpoint_dir = os.path.join(log_dir, "checkpoints")
video_dir = os.path.join(log_dir, "videos")
args.log_dir, args.ckpt_dir, args.video_dir = log_dir, checkpoint_dir, video_dir
device = torch.device(args.device)
seed = args.seed
misc.fix_random_seeds(seed)
cudnn.benchmark = True
# set up logging
setup_logging(output=log_dir, level=logging.INFO)
logger = logging.getLogger("STORM")
logger.info(f"hostname: {os.uname().nodename}\n")
logger.info(f"job dir: {os.path.dirname(os.path.realpath(__file__))}")
logger.info(f"Logging to {log_dir}")
logger.info(json.dumps(args.__dict__, indent=4, sort_keys=True))
with open(os.path.join(log_dir, "args.json"), "w") as f:
json.dump(args.__dict__, f, indent=4)
dataset_meta = DATASET_DICT[args.dataset]
train_annotation = dataset_meta["annotation_txt_file_train"]
val_annotation = dataset_meta["annotation_txt_file_val"]
if train_annotation is not None:
if args.dataset == "nuscenes":
train_annotation = f"data/dataset_scene_list/nuscenes_train.txt"
else:
train_annotation = f"{args.data_root}/{train_annotation}"
if val_annotation is not None:
if args.dataset == "nuscenes":
val_annotation = f"data/dataset_scene_list/nuscenes_val.txt"
else:
val_annotation = f"{args.data_root}/{val_annotation}"
if not os.path.exists(val_annotation):
val_annotation = None
num_context_timesteps = dataset_meta["num_context_timesteps"]
num_target_timesteps = dataset_meta["num_target_timesteps"]
if args.overwrite_train_ctx_view_with is not None:
num_context_timesteps = args.overwrite_train_ctx_view_with
if args.overwrite_test_ctx_view_with is not None:
num_context_timesteps = args.overwrite_test_ctx_view_with
if args.overwrite_train_tgt_view_with is not None:
num_target_timesteps = args.overwrite_train_tgt_view_with
input_size = dataset_meta["size"]
logger.info(f"Dataset: {args.dataset}")
logger.info(f"annotation_txt_file_list_train: {train_annotation}")
if args.model in models.STORM_models:
model = models.STORM_models[args.model](
img_size=args.input_size,
gs_dim=args.gs_dim,
decoder_type=args.decoder_type,
use_sky_token=args.use_sky_token,
use_affine_token=args.use_affine_token,
num_motion_tokens=args.num_motion_tokens,
use_latest_gsplat=args.use_latest_gsplat,
)
else:
raise ValueError(f"Invalid model name: {args.model}")
logger.info(f"Model = {str(model)}")
n_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
logger.info(f"{args.model} Parameters: {n_params / 1e6:.2f}M ({n_params:,})")
model.to(device)
dataset = SingleSequenceDataset(
data_root=args.data_root,
annotation_txt_file_list=train_annotation,
target_size=input_size,
num_context_timesteps=num_context_timesteps,
num_target_timesteps=num_target_timesteps,
timespan=args.timespan,
num_max_cams=args.num_max_cameras,
load_depth=args.load_depth,
load_flow=args.load_flow,
)
logger.info(f"Dataset contains {len(dataset):,} sequences using {train_annotation}.")
misc.load_model(args, model)
num_trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
logger.info(f"{args.model} Trainable Parameters: {num_trainable_params / 1e6:.2f}M")
model.eval().cuda()
logger.info(f"Preparing data... (This may take a while)")
data_dict_list = dataset.__getitem__(index=0, start_index=0, end_index=60)
data_dict_list = to_batch_tensor(data_dict_list)
logger.info(f"Done preparing data.")
for i in range(len(data_dict_list)):
data_dict = data_dict_list[i]
output_name = f"test_{i}.mp4"
make_video(
dataset=None,
model=model,
device=device,
output_filename=output_name,
data_dict=data_dict,
)
print(f"Saved video to {output_name}")
if __name__ == "__main__":
args = get_args_parser().parse_args()
main(args)
================================================
FILE: main_storm.py
================================================
import argparse
import copy
import datetime
import json
import logging
import math
import os
import sys
import time
import numpy as np
import timm.optim.optim_factory as optim_factory
import torch
import torch.backends.cudnn as cudnn
import torch.distributed
import torch.utils.data
# STORM imports
import storm.models as models
import storm.utils.distributed as distributed
import storm.utils.misc as misc
from engine_storm import evaluate, evaluate_flow, visualize
from storm.dataset.constants import DATASET_DICT
from storm.dataset.data_utils import prepare_inputs_and_targets
from storm.dataset.samplers import InfiniteSampler, NoPaddingDistributedSampler
from storm.dataset.storm_dataset import STORMDataset, STORMDatasetEval
from storm.utils.logging import MetricLogger, WandbLogger, setup_logging
from storm.utils.losses import compute_loss
from storm.utils.lpips_loss import RGBLpipsLoss
from storm.utils.misc import NativeScalerWithGradNormCount as NativeScaler
torch.backends.cuda.matmul.allow_tf32 = True
torch.backends.cudnn.allow_tf32 = True
cudnn.benchmark = True
def get_args_parser():
parser = argparse.ArgumentParser("STORM training", add_help=False)
# =============== Model parameters ================= #
parser.add_argument("--model", default="STORM-B/8", type=str)
parser.add_argument("--num_context_timesteps", default=4, type=int)
parser.add_argument("--num_target_timesteps", default=4, type=int)
parser.add_argument("--gs_dim", default=3, type=int, help="Number of gs dimensions")
parser.add_argument("--use_sky_token", action="store_true")
parser.add_argument("--use_affine_token", action="store_true")
parser.add_argument("--use_latest_gsplat", action="store_true")
parser.add_argument(
"--decoder_type",
type=str,
choices=["dummy", "conv"],
default="dummy",
help="STORM or LatentSTORM",
)
parser.add_argument("--num_motion_tokens", default=16, type=int, help="Number of motion tokens")
# =============== Losses =============== #
parser.add_argument("--enable_depth_loss", action="store_true")
# Option 1: push the sky depth to a fixed value
parser.add_argument("--enable_sky_depth_loss", action="store_true")
parser.add_argument("--sky_depth", type=float, default=300.0)
# Option 2: make sky gaussians transparent and use a sky token to represent sky
parser.add_argument("--enable_sky_opacity_loss", action="store_true")
parser.add_argument("--sky_opacity_loss_coeff", type=float, default=0.1)
# flow regularization loss
parser.add_argument("--enable_flow_reg_loss", action="store_true")
parser.add_argument("--flow_reg_coeff", type=float, default=0.005)
# perceptual loss
parser.add_argument("--enable_perceptual_loss", action="store_true")
parser.add_argument("--perceptual_weight", default=0.05, type=float, help="LPIPS weight")
parser.add_argument("--perceptual_loss_start_iter", default=5000, type=int)
# ============= Optimizer and LR parameters ============= #
parser.add_argument("--lr", type=float, default=4e-4, help="learning rate (absolute lr)")
parser.add_argument("--blr", type=float, default=8e-4, help="base learning rate")
parser.add_argument("--min_lr", type=float, default=0.0)
parser.add_argument("--lr_sched", type=str, default="cosine", choices=["constant", "cosine"])
parser.add_argument("--warmup_iters", type=int, default=5000, help="iters to warmup LR")
parser.add_argument("--weight_decay", type=float, default=0.05)
parser.add_argument("--grad_clip", type=float, default=3.0, help="Gradient clip")
parser.add_argument("--disable_grad_checkpointing", action="store_true")
parser.add_argument("--start_iteration", default=0, type=int, help="start iteration")
parser.add_argument("--num_iterations", default=200_000, type=int, help="num of iterations")
parser.add_argument("--resume_from", default=None, help="resume from checkpoint")
parser.add_argument("--auto_resume", action="store_true")
parser.add_argument("--load_from", type=str, default=None)
# ============= Dataset parameters ============= #
parser.add_argument("--data_root", default="./data/STORM2", type=str, help="dataset path")
parser.add_argument("--batch_size", default=8, type=int, help="Batch size per GPU")
parser.add_argument("--eval_batch_size", type=int, default=1)
parser.add_argument("--input_size", default=(160, 240), type=int, nargs=2)
parser.add_argument("--num_max_cameras", type=int, default=3)
parser.add_argument("--timespan", type=float, default=2.0)
parser.add_argument("--load_ground", action="store_true")
parser.add_argument("--load_depth", action="store_true")
parser.add_argument("--load_flow", action="store_true")
parser.add_argument("--dataset", default="waymo", type=str, choices=DATASET_DICT.keys())
parser.add_argument("--subset_ratio", default=1.0, type=float)
parser.add_argument("--num_workers", default=16, type=int)
parser.add_argument("--skip_sky_mask", action="store_true", help="skip sky mask loading")
# ============= Logging ============= #
parser.add_argument("--output_dir", default="./work_dirs")
parser.add_argument("--num_vis_samples", type=int, default=1)
parser.add_argument("--log_every_n_iters", type=int, default=50)
parser.add_argument("--vis_every_n_iters", type=int, default=5000)
parser.add_argument("--ckpt_every_n_iters", type=int, default=5000)
parser.add_argument("--eval_every_n_iters", type=int, default=50000)
parser.add_argument("--total_elapsed_time", type=float, default=0.0, help="total time elapsed")
parser.add_argument("--keep_n_ckpts", default=1, type=int)
# ============= Miscellaneous ============= #
parser.add_argument("--seed", default=1, type=int)
parser.add_argument("--device", default="cuda", help="device to use for training / testing")
parser.add_argument("--visualization_only", action="store_true")
parser.add_argument("--evaluate", action="store_true")
# ============= WandB ============= #
parser.add_argument("--enable_wandb", action="store_true")
parser.add_argument("--project", default="debug", type=str)
parser.add_argument("--entity", default="YOUR_ENTITY", type=str)
parser.add_argument("--exp_name", default=None, type=str)
parser.add_argument("--overwrite_wandb", action="store_true")
return parser
def main(args):
# Prepare distributed training
distributed.enable(overwrite=True)
global logger
args.exp_name = args.model.replace("/", "-") if args.exp_name is None else args.exp_name
log_dir = os.path.join(args.output_dir, args.project, args.exp_name)
checkpoint_dir = os.path.join(log_dir, "checkpoints")
video_dir = os.path.join(log_dir, "videos")
args.log_dir, args.ckpt_dir, args.video_dir = log_dir, checkpoint_dir, video_dir
device = torch.device(args.device)
world_size, global_rank = distributed.get_world_size(), distributed.get_global_rank()
seed = args.seed + global_rank
misc.fix_random_seeds(seed)
log_writer = None
if global_rank == 0:
[os.makedirs(d, exist_ok=True) for d in [log_dir, checkpoint_dir, video_dir]]
if args.enable_wandb:
run_id_path, run_id = os.path.join(log_dir, "wandb_run_id.txt"), None
if os.path.exists(run_id_path) and not args.overwrite_wandb:
with open(run_id_path, "r") as f:
run_id = f.readlines()[-1].strip()
log_writer = WandbLogger(args=args, resume="must", id=run_id)
if run_id is None:
with open(run_id_path, "a") as f:
f.write(log_writer.run_id + "\n")
# set up logging
setup_logging(output=log_dir, level=logging.INFO)
logger = logging.getLogger("STORM")
logger.info(f"hostname: {os.uname().nodename}\n")
logger.info(f"job dir: {os.path.dirname(os.path.realpath(__file__))}")
logger.info(f"Logging to {log_dir}")
logger.info(json.dumps(args.__dict__, indent=4, sort_keys=True))
if global_rank == 0:
with open(os.path.join(log_dir, "args.json"), "w") as f:
json.dump(args.__dict__, f, indent=4)
dataset_meta = DATASET_DICT[args.dataset]
train_annotation = dataset_meta["annotation_txt_file_train"]
val_annotation = dataset_meta["annotation_txt_file_val"]
if train_annotation is not None:
if args.dataset == "nuscenes":
train_annotation = f"data/dataset_scene_list/nuscenes_train.txt"
else:
train_annotation = f"{args.data_root}/{train_annotation}"
if val_annotation is not None:
if args.dataset == "nuscenes":
val_annotation = f"data/dataset_scene_list/nuscenes_val.txt"
else:
val_annotation = f"{args.data_root}/{val_annotation}"
if not os.path.exists(val_annotation):
val_annotation = None
dataset_train = STORMDataset(
data_root=args.data_root,
annotation_txt_file_list=train_annotation,
target_size=args.input_size,
num_context_timesteps=args.num_context_timesteps,
num_target_timesteps=args.num_target_timesteps,
timespan=args.timespan,
num_max_cams=args.num_max_cameras,
load_depth=args.load_depth,
load_flow=args.load_flow,
skip_sky_mask=args.skip_sky_mask,
)
sampler_train = InfiniteSampler(sample_count=len(dataset_train), shuffle=True, seed=seed)
data_loader_train = torch.utils.data.DataLoader(
dataset_train,
sampler=sampler_train,
batch_size=args.batch_size,
num_workers=args.num_workers,
pin_memory=False,
persistent_workers=True,
drop_last=True,
)
if val_annotation is not None:
dataset_val = STORMDataset(
data_root=args.data_root,
annotation_txt_file_list=val_annotation,
target_size=args.input_size,
num_context_timesteps=args.num_context_timesteps,
num_target_timesteps=args.num_target_timesteps,
timespan=args.timespan,
num_max_cams=args.num_max_cameras,
load_depth=args.load_depth,
load_flow=args.load_flow,
skip_sky_mask=args.skip_sky_mask,
)
dataset_eval = STORMDatasetEval(
data_root=args.data_root,
annotation_txt_file_list=val_annotation,
target_size=args.input_size,
num_context_timesteps=args.num_context_timesteps,
num_target_timesteps=args.num_target_timesteps,
timespan=args.timespan,
num_max_cams=args.num_max_cameras,
load_depth=args.load_depth,
load_flow=args.load_flow,
load_dynamic_mask=True,
load_ground_label=args.load_ground,
skip_sky_mask=args.skip_sky_mask,
)
dataset_eval_flow = STORMDatasetEval(
data_root=args.data_root,
annotation_txt_file_list=val_annotation,
target_size=args.input_size,
num_context_timesteps=args.num_context_timesteps,
num_target_timesteps=args.num_target_timesteps,
timespan=args.timespan,
num_max_cams=args.num_max_cameras,
load_depth=args.load_depth,
load_flow=args.load_flow,
load_dynamic_mask=False,
load_ground_label=args.load_ground,
return_context_as_target=True,
skip_sky_mask=args.skip_sky_mask,
)
sampler = NoPaddingDistributedSampler(
dataset_eval,
num_replicas=world_size,
rank=global_rank,
shuffle=False,
)
data_loader_eval = torch.utils.data.DataLoader(
dataset_eval,
batch_size=args.eval_batch_size,
num_workers=args.num_workers,
sampler=sampler,
pin_memory=False,
persistent_workers=True,
shuffle=False,
drop_last=False,
)
data_loader_eval_flow = torch.utils.data.DataLoader(
dataset_eval_flow,
batch_size=args.eval_batch_size,
num_workers=args.num_workers,
sampler=sampler,
pin_memory=False,
persistent_workers=True,
shuffle=False,
drop_last=False,
)
else:
dataset_val = None
dataset_eval = None
dataset_eval_flow = None
data_loader_eval = None
data_loader_eval_flow = None
logger.info(f"Dataset: {args.dataset}, train: {train_annotation}, val: {val_annotation}")
logger.info(f"Dataset contains {len(dataset_train):,} sequences using {train_annotation}.")
if args.model in models.STORM_models:
model = models.STORM_models[args.model](
img_size=args.input_size,
gs_dim=args.gs_dim,
decoder_type=args.decoder_type,
grad_checkpointing=not args.disable_grad_checkpointing,
use_sky_token=args.use_sky_token,
use_affine_token=args.use_affine_token,
num_motion_tokens=args.num_motion_tokens,
use_latest_gsplat=args.use_latest_gsplat,
)
else:
raise ValueError(f"Invalid model name: {args.model}")
logger.info(f"Model = {str(model)}")
n_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
logger.info(f"{args.model} Parameters: {n_params / 1e6:.2f}M ({n_params:,})")
model.to(device)
model_without_ddp = model
if distributed.is_enabled():
model = torch.nn.parallel.DistributedDataParallel(model)
model_without_ddp = model.module
global_batch_size = args.batch_size * world_size
if args.lr is None: # only base_lr is specified
args.lr = args.blr * global_batch_size / 256
logger.info("Global batch size: %d" % global_batch_size)
logger.info(f"Base lr: {args.lr * 256 / global_batch_size:.2e}, Actual lr: {args.lr:.2e}")
param_groups = optim_factory.param_groups_weight_decay(model_without_ddp, args.weight_decay)
optimizer = torch.optim.AdamW(param_groups, lr=args.lr, betas=(0.9, 0.95))
loss_scaler = NativeScaler()
logger.info(f"Optimizer = {optimizer}")
logger.info(f"Loss Scaler = {loss_scaler}")
# Load checkpoint or resume training
logger.info(f"Original start Iteration: {args.start_iteration}")
vis_slice_id = misc.load_model(args, model_without_ddp, optimizer, loss_scaler)
logger.info(f"New start iteration {args.start_iteration}")
num_trainable_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
logger.info(f"{args.model} Trainable Parameters: {num_trainable_params / 1e6:.2f}M")
logger.info(f"Training with {world_size} GPUs")
data_iter_step = args.start_iteration
if log_writer is not None:
log_writer.set_step(data_iter_step)
if args.evaluate:
eval_result = evaluate(data_loader_eval, model_without_ddp, args)
if log_writer is not None and eval_result is not None:
eval_result = {f"eval/{k}": v for k, v in eval_result.items()}
log_writer.update(eval_result)
if args.dataset == "waymo":
if args.decoder_type != "conv":
flow_eval_result = evaluate_flow(data_loader_eval_flow, model_without_ddp, args)
if log_writer is not None and flow_eval_result is not None:
flow_eval_result = {f"eval/{k}": v for k, v in flow_eval_result.items()}
log_writer.update(flow_eval_result)
logger.info("Evaluation done, exiting.")
exit()
valid_slice_id = copy.deepcopy(vis_slice_id)
if dataset_val is not None and valid_slice_id >= len(dataset_val):
valid_slice_id = 0
for _ in range(args.num_vis_samples):
vis_slice_id, valid_slice_id = visualize(
args=args,
model=model_without_ddp,
dset_train=dataset_train,
step=data_iter_step,
train_vis_id=vis_slice_id,
device=device,
dset_val=dataset_val,
val_vis_id=valid_slice_id,
)
if args.visualization_only:
logger.info("Visualization done, exiting.")
exit()
rgb_and_lpips_loss = RGBLpipsLoss(
perceptual_weight=args.perceptual_weight,
enable_perceptual_loss=args.enable_perceptual_loss,
).to(device)
# will turn on perceptual loss after a certain number of iterations
rgb_and_lpips_loss.set_perceptual_loss(False)
logger.info(f"Starting training from iteration {args.start_iteration} to {args.num_iterations}")
metrics_file = os.path.join(args.log_dir, "training_metrics.json")
metric_logger = MetricLogger(delimiter=" ", output_file=metrics_file)
start_time = time.time()
num_tokens_printed = False
for data_dict in metric_logger.log_every(
data_loader_train,
print_freq=args.log_every_n_iters,
header="Training",
n_iterations=args.num_iterations,
start_iteration=args.start_iteration,
):
if data_iter_step > args.num_iterations:
break
if log_writer is not None:
log_writer.set_step(data_iter_step)
if args.enable_perceptual_loss and data_iter_step >= args.perceptual_loss_start_iter:
rgb_and_lpips_loss.set_perceptual_loss(True)
model.train()
misc.adjust_learning_rate(optimizer, data_iter_step, args)
with torch.autocast("cuda", dtype=torch.bfloat16):
input_dict, target_dict = prepare_inputs_and_targets(data_dict, device)
pred_dict = model(input_dict)
loss_dict = compute_loss(pred_dict, target_dict, args, rgb_and_lpips_loss)
loss_value = sum(loss for k, loss in loss_dict.items() if "loss" in k)
if not math.isfinite(loss_value):
logger.info("NaN detected")
raise AssertionError
grad_norm = loss_scaler(
loss_value,
optimizer,
parameters=model.parameters(),
clip_grad=args.grad_clip,
)
optimizer.zero_grad()
torch.cuda.synchronize()
if world_size > 1:
[torch.distributed.all_reduce(v) for v in loss_dict.values()]
loss_dict_reduced = {k: v.item() / world_size for k, v in loss_dict.items()}
total_loss_reduced = sum(loss for k, loss in loss_dict_reduced.items() if "loss" in k)
lr = optimizer.param_groups[0]["lr"]
psnr = -10 * np.log10(loss_dict_reduced["rgb_loss"])
metric_logger.update(lr=lr, psnr=psnr, loss=total_loss_reduced, **loss_dict_reduced)
metric_logger.update(grad_norm=grad_norm)
if "num_tokens" in pred_dict and not num_tokens_printed:
logger.info(f"num_tokens: {pred_dict['num_tokens']}")
num_tokens_printed = True
if log_writer is not None:
log_writer.update(
{
"psnr": psnr,
"loss": total_loss_reduced,
**loss_dict_reduced,
"lr": lr,
"grad_norm": grad_norm,
}
)
log_writer.set_step()
if (data_iter_step + 1) % args.ckpt_every_n_iters == 0:
if distributed.is_main_process():
elapsed_t = time.time() - start_time + args.total_elapsed_time
checkpoint = {
"model": model_without_ddp.state_dict(),
"optimizer": optimizer.state_dict(),
"loss_scaler": loss_scaler.state_dict(),
"latest_step": data_iter_step,
"vis_slice_id": vis_slice_id,
"args": args,
"total_elapsed_time": elapsed_t,
}
checkpoint_path = os.path.join(args.ckpt_dir, f"ckpt_{data_iter_step:06d}.pth")
torch.save(checkpoint, checkpoint_path)
misc.cleanup_checkpoints(args.ckpt_dir, keep_num=args.keep_n_ckpts)
logger.info(f"Saved checkpoint to {checkpoint_path}")
torch.distributed.barrier()
torch.cuda.empty_cache()
if (data_iter_step + 1) % args.vis_every_n_iters == 0:
for _ in range(args.num_vis_samples):
vis_slice_id, valid_slice_id = visualize(
args=args,
model=model_without_ddp,
dset_train=dataset_train,
step=data_iter_step,
train_vis_id=vis_slice_id,
device=device,
dset_val=dataset_val,
val_vis_id=valid_slice_id,
)
torch.distributed.barrier()
torch.cuda.empty_cache()
if (data_iter_step + 1) % args.eval_every_n_iters == 0 and (
data_iter_step + 1
) != args.num_iterations:
eval_result = evaluate(data_loader_eval, model_without_ddp, args, f"{data_iter_step}")
if log_writer is not None and eval_result is not None:
log_writer.update({f"eval/{k}": v for k, v in eval_result.items()})
if args.decoder_type != "conv" and args.dataset == "waymo":
flow_eval_result = evaluate_flow(
data_loader_eval_flow,
model_without_ddp,
args,
name_str=f"{data_iter_step}",
)
if log_writer is not None and flow_eval_result is not None:
log_writer.update({f"eval/{k}": v for k, v in flow_eval_result.items()})
torch.distributed.barrier()
torch.cuda.empty_cache()
data_iter_step += 1
metric_logger.synchronize_between_processes()
total_time = time.time() - start_time + args.total_elapsed_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
logger.info("Training time {}".format(total_time_str))
eval_result = evaluate(data_loader_eval, model_without_ddp, args)
if log_writer is not None and eval_result is not None:
log_writer.update({f"eval/{k}": v for k, v in eval_result.items()})
if args.decoder_type != "conv" and args.dataset == "waymo":
flow_eval_result = evaluate_flow(data_loader_eval_flow, model_without_ddp, args)
if log_writer is not None and flow_eval_result is not None:
log_writer.update({f"eval/{k}": v for k, v in flow_eval_result.items()})
logger.info("Done!")
if __name__ == "__main__":
args = get_args_parser().parse_args()
main(args)
================================================
FILE: preproc/utils.py
================================================
# To track process without installing mmcv
# Code is modified from mmcv.utils
import sys
import time
from collections.abc import Iterable
from multiprocessing import Pool
from shutil import get_terminal_size
import numpy as np
class ProgressBar:
"""A progress bar which can print the progress."""
def __init__(self, task_num=0, bar_width=50, start=True, file=sys.stdout):
self.task_num = task_num
self.bar_width = bar_width
self.completed = 0
self.file = file
if start:
self.start()
@property
def terminal_width(self):
width, _ = get_terminal_size()
return width
def start(self):
if self.task_num > 0:
self.file.write(f'[{" " * self.bar_width}] 0/{self.task_num}, ' "elapsed: 0s, ETA:")
else:
self.file.write("completed: 0, elapsed: 0s")
self.file.flush()
self.start_time = time.time()
def update(self, num_tasks=1):
assert num_tasks > 0
self.completed += num_tasks
elapsed = time.time() - self.start_time
if elapsed > 0:
fps = self.completed / elapsed
else:
fps = float("inf")
if self.task_num > 0:
percentage = self.completed / float(self.task_num)
eta = int(elapsed * (1 - percentage) / percentage + 0.5)
msg = (
f"\r[{{}}] {self.completed}/{self.task_num}, "
f"{fps:.1f} task/s, elapsed: {int(elapsed + 0.5)}s, "
f"ETA: {eta:5}s"
)
bar_width = min(
self.bar_width,
int(self.terminal_width - len(msg)) + 2,
int(self.terminal_width * 0.6),
)
bar_width = max(2, bar_width)
mark_width = int(bar_width * percentage)
bar_chars = ">" * mark_width + " " * (bar_width - mark_width)
self.file.write(msg.format(bar_chars))
else:
self.file.write(
f"completed: {self.completed}, elapsed: {int(elapsed + 0.5)}s,"
f" {fps:.1f} tasks/s"
)
self.file.flush()
def init_pool(process_num, initializer=None, initargs=None):
if initializer is None:
return Pool(process_num)
elif initargs is None:
return Pool(process_num, initializer)
else:
if not isinstance(initargs, tuple):
raise TypeError('"initargs" must be a tuple')
return Pool(process_num, initializer, initargs)
def track_parallel_progress(
func,
tasks,
nproc,
initializer=None,
initargs=None,
bar_width=50,
chunksize=1,
skip_first=False,
keep_order=True,
file=sys.stdout,
):
"""Track the progress of parallel task execution with a progress bar.
The built-in :mod:`multiprocessing` module is used for process pools and
tasks are done with :func:`Pool.map` or :func:`Pool.imap_unordered`.
Args:
func (callable): The function to be applied to each task.
tasks (list or tuple[Iterable, int]): A list of tasks or
(tasks, total num).
nproc (int): Process (worker) number.
initializer (None or callable): Refer to :class:`multiprocessing.Pool`
for details.
initargs (None or tuple): Refer to :class:`multiprocessing.Pool` for
details.
chunksize (int): Refer to :class:`multiprocessing.Pool` for details.
bar_width (int): Width of progress bar.
skip_first (bool): Whether to skip the first sample for each worker
when estimating fps, since the initialization step may takes
longer.
keep_order (bool): If True, :func:`Pool.imap` is used, otherwise
:func:`Pool.imap_unordered` is used.
Returns:
list: The task results.
"""
if isinstance(tasks, tuple):
assert len(tasks) == 2
assert isinstance(tasks[0], Iterable)
assert isinstance(tasks[1], int)
task_num = tasks[1]
tasks = tasks[0]
elif isinstance(tasks, Iterable):
task_num = len(tasks)
else:
raise TypeError('"tasks" must be an iterable object or a (iterator, int) tuple')
pool = init_pool(nproc, initializer, initargs)
start = not skip_first
task_num -= nproc * chunksize * int(skip_first)
prog_bar = ProgressBar(task_num, bar_width, start, file=file)
results = []
if keep_order:
gen = pool.imap(func, tasks, chunksize)
else:
gen = pool.imap_unordered(func, tasks, chunksize)
for result in gen:
results.append(result)
if skip_first:
if len(results) < nproc * chunksize:
continue
elif len(results) == nproc * chunksize:
prog_bar.start()
continue
prog_bar.update()
prog_bar.file.write("\n")
pool.close()
pool.join()
return results
================================================
FILE: preproc/waymo_download.py
================================================
import argparse
import os
import subprocess
from concurrent.futures import ThreadPoolExecutor
from typing import List
def download_file(filename, target_dir, source):
result = subprocess.run(
[
"gsutil",
"cp",
"-n",
f"{source}/{filename}.tfrecord",
target_dir,
],
capture_output=True,
text=True,
)
if result.returncode != 0:
raise Exception(result.stderr)
def download_files(
file_names: List[str],
target_dir: str,
source: str,
max_workers: int = 10,
):
total_files = len(file_names)
with ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = [
executor.submit(download_file, filename, target_dir, source) for filename in file_names
]
for counter, future in enumerate(futures, start=1):
try:
future.result()
print(f"[{counter}/{total_files}] Downloaded successfully!")
except Exception as e:
print(f"[{counter}/{total_files}] Failed to download. Error: {e}")
if __name__ == "__main__":
print("note: `gcloud auth login` is required before running this script")
print("Downloading Waymo dataset from Google Cloud Storage...")
parser = argparse.ArgumentParser()
parser.add_argument(
"--target_dir",
type=str,
default="data/waymo/raw",
help="Path to the target directory",
)
parser.add_argument(
"--split",
type=str,
default="train",
)
parser.add_argument(
"--scene_ids", type=int, nargs="+", help="scene ids to download", default=None
)
parser.add_argument(
"--split_file",
type=str,
default="data/dataset_scene_list/waymo_train_list.txt",
help="",
)
parser.add_argument(
"--max_workers",
type=int,
default=10,
help="Number of threads to use for downloading",
)
args = parser.parse_args()
os.makedirs(args.target_dir, exist_ok=True)
total_list = open(args.split_file, "r").readlines()
total_list = [x.strip() for x in total_list]
if args.scene_ids is not None:
file_names = [total_list[i] for i in args.scene_ids]
else:
file_names = total_list
download_files(
file_names,
args.target_dir,
source=f"gs://waymo_open_dataset_scene_flow/{args.split}",
max_workers=args.max_workers,
)
================================================
FILE: preproc/waymo_preprocess.py
================================================
import json
import os
import time
import imageio
import imageio_ffmpeg as imageio_ffmpeg # Ensure ffmpeg is installed
import numpy as np
import tensorflow as tf
from PIL import Image
from tqdm import tqdm, trange
from waymo_open_dataset.utils.frame_utils import parse_range_image_and_camera_projection
from storm.visualization.visualization_tools import depth_visualizer, scene_flow_to_rgb
from .utils import track_parallel_progress
ORIGINAL_SIZE = {
"0": (1280, 1920),
"1": (1280, 1920),
"2": (1280, 1920),
"3": (886, 1920),
"4": (886, 1920),
}
OPENCV2DATASET = np.array([[0, 0, 1, 0], [-1, 0, 0, 0], [0, -1, 0, 0], [0, 0, 0, 1]])
# Acknowledgement:
# 1. https://github.com/open-mmlab/mmdetection3d/blob/main/tools/dataset_converters/waymo_converter.py
# 2. https://github.com/leolyj/DCA-SRSFE/blob/main/data_preprocess/Waymo/generate_flow.py
try:
from waymo_open_dataset import dataset_pb2
except ImportError:
raise ImportError(
'Please run "pip install waymo-open-dataset-tf-2-6-0" '
">1.4.5 to install the official devkit first."
)
import numpy as np
import tensorflow as tf
from waymo_open_dataset.utils import box_utils, range_image_utils, transform_utils
from waymo_open_dataset.utils.frame_utils import parse_range_image_and_camera_projection
from waymo_open_dataset.wdl_limited.camera.ops import py_camera_model_ops
##### Very important to set memory growth for GPU to avoid OOM errors
gpu_devices = tf.config.experimental.list_physical_devices("GPU")
for device in gpu_devices:
tf.config.experimental.set_memory_growth(device, True)
def project_vehicle_to_image(vehicle_pose, calibration, points):
"""Projects from vehicle coordinate system to image with global shutter.
Arguments:
vehicle_pose: Vehicle pose transform from vehicle into world coordinate
system.
calibration: Camera calibration details (including intrinsics/extrinsics).
points: Points to project of shape [N, 3] in vehicle coordinate system.
Returns:
Array of shape [N, 3], with the latter dimension composed of (u, v, ok).
"""
# Transform points from vehicle to world coordinate system (can be
# vectorized).
pose_matrix = np.array(vehicle_pose.transform).reshape(4, 4)
world_points = np.zeros_like(points)
for i, point in enumerate(points):
cx, cy, cz, _ = np.matmul(pose_matrix, [*point, 1])
world_points[i] = (cx, cy, cz)
# Populate camera image metadata. Velocity and latency stats are filled with
# zeroes.
extrinsic = tf.reshape(
tf.constant(list(calibration.extrinsic.transform), dtype=tf.float32), [4, 4]
)
intrinsic = tf.constant(list(calibration.intrinsic), dtype=tf.float32)
metadata = tf.constant(
[
calibration.width,
calibration.height,
dataset_pb2.CameraCalibration.GLOBAL_SHUTTER,
],
dtype=tf.int32,
)
camera_image_metadata = list(vehicle_pose.transform) + [0.0] * 10
# Perform projection and return projected image coordinates (u, v, ok).
return py_camera_model_ops.world_to_image(
extrinsic, intrinsic, metadata, camera_image_metadata, world_points
).numpy()
def get_ground_np(pts):
"""
This function performs ground removal on a point cloud.
Modified from https://github.com/tusen-ai/LiDAR_SOT/blob/main/waymo_data/data_preprocessing/ground_removal.py
Args:
pts (numpy.ndarray): The input point cloud.
Returns:
numpy.ndarray: A boolean array indicating whether each point is ground or not.
"""
th_seeds_ = 1.2
num_lpr_ = 20
n_iter = 10
th_dist_ = 0.3
pts_sort = pts[pts[:, 2].argsort(), :]
lpr = np.mean(pts_sort[:num_lpr_, 2])
pts_g = pts_sort[pts_sort[:, 2] < lpr + th_seeds_, :]
normal_ = np.zeros(3)
for i in range(n_iter):
mean = np.mean(pts_g, axis=0)[:3]
xx = np.mean((pts_g[:, 0] - mean[0]) * (pts_g[:, 0] - mean[0]))
xy = np.mean((pts_g[:, 0] - mean[0]) * (pts_g[:, 1] - mean[1]))
xz = np.mean((pts_g[:, 0] - mean[0]) * (pts_g[:, 2] - mean[2]))
yy = np.mean((pts_g[:, 1] - mean[1]) * (pts_g[:, 1] - mean[1]))
yz = np.mean((pts_g[:, 1] - mean[1]) * (pts_g[:, 2] - mean[2]))
zz = np.mean((pts_g[:, 2] - mean[2]) * (pts_g[:, 2] - mean[2]))
cov = np.array(
[[xx, xy, xz], [xy, yy, yz], [xz, yz, zz]],
dtype=np.float32,
)
U, S, V = np.linalg.svd(cov)
normal_ = U[:, 2]
d_ = -normal_.dot(mean)
th_dist_d_ = th_dist_ - d_
result = pts[:, :3] @ normal_[..., np.newaxis]
pts_g = pts[result.squeeze(-1) < th_dist_d_]
ground_label = result < th_dist_d_
return ground_label
def compute_range_image_cartesian(
range_image_polar,
extrinsic,
pixel_pose=None,
frame_pose=None,
dtype=tf.float32,
scope=None,
):
"""Computes range image cartesian coordinates from polar ones.
Args:
range_image_polar: [B, H, W, 3] float tensor. Lidar range image in polar
coordinate in sensor frame.
extrinsic: [B, 4, 4] float tensor. Lidar extrinsic.
pixel_pose: [B, H, W, 4, 4] float tensor. If not None, it sets pose for each
range image pixel.
frame_pose: [B, 4, 4] float tensor. This must be set when pixel_pose is set.
It decides the vehicle frame at which the cartesian points are computed.
dtype: float type to use internally. This is needed as extrinsic and
inclination sometimes have higher resolution than range_image.
scope: the name scope.
Returns:
range_image_cartesian: [B, H, W, 3] cartesian coordinates.
"""
range_image_polar_dtype = range_image_polar.dtype
range_image_polar = tf.cast(range_image_polar, dtype=dtype)
extrinsic = tf.cast(extrinsic, dtype=dtype)
if pixel_pose is not None:
pixel_pose = tf.cast(pixel_pose, dtype=dtype)
if frame_pose is not None:
frame_pose = tf.cast(frame_pose, dtype=dtype)
with tf.compat.v1.name_scope(
scope,
"ComputeRangeImageCartesian",
[range_image_polar, extrinsic, pixel_pose, frame_pose],
):
azimuth, inclination, range_image_range = tf.unstack(range_image_polar, axis=-1)
cos_azimuth = tf.cos(azimuth)
sin_azimuth = tf.sin(azimuth)
cos_incl = tf.cos(inclination)
sin_incl = tf.sin(inclination)
# [B, H, W].
x = cos_azimuth * cos_incl * range_image_range
y = sin_azimuth * cos_incl * range_image_range
z = sin_incl * range_image_range
# [B, H, W, 3]
range_image_points = tf.stack([x, y, z], -1)
range_image_origins = tf.zeros_like(range_image_points)
# [B, 3, 3]
rotation = extrinsic[..., 0:3, 0:3]
# translation [B, 1, 3]
translation = tf.expand_dims(tf.expand_dims(extrinsic[..., 0:3, 3], 1), 1)
# To vehicle frame.
# [B, H, W, 3]
range_image_points = tf.einsum("bkr,bijr->bijk", rotation, range_image_points) + translation
range_image_origins = (
tf.einsum("bkr,bijr->bijk", rotation, range_image_origins) + translation
)
if pixel_pose is not None:
# To global frame.
# [B, H, W, 3, 3]
pixel_pose_rotation = pixel_pose[..., 0:3, 0:3]
# [B, H, W, 3]
pixel_pose_translation = pixel_pose[..., 0:3, 3]
# [B, H, W, 3]
range_image_points = (
tf.einsum("bhwij,bhwj->bhwi", pixel_pose_rotation, range_image_points)
+ pixel_pose_translation
)
range_image_origins = (
tf.einsum("bhwij,bhwj->bhwi", pixel_pose_rotation, range_image_origins)
+ pixel_pose_translation
)
if frame_pose is None:
raise ValueError("frame_pose must be set when pixel_pose is set.")
# To vehicle frame corresponding to the given frame_pose
# [B, 4, 4]
world_to_vehicle = tf.linalg.inv(frame_pose)
world_to_vehicle_rotation = world_to_vehicle[:, 0:3, 0:3]
world_to_vehicle_translation = world_to_vehicle[:, 0:3, 3]
# [B, H, W, 3]
range_image_points = (
tf.einsum("bij,bhwj->bhwi", world_to_vehicle_rotation, range_image_points)
+ world_to_vehicle_translation[:, tf.newaxis, tf.newaxis, :]
)
range_image_origins = (
tf.einsum("bij,bhwj->bhwi", world_to_vehicle_rotation, range_image_origins)
+ world_to_vehicle_translation[:, tf.newaxis, tf.newaxis, :]
)
range_image_points = tf.cast(range_image_points, dtype=range_image_polar_dtype)
range_image_origins = tf.cast(range_image_origins, dtype=range_image_polar_dtype)
return range_image_points, range_image_origins
def extract_point_cloud_from_range_image(
range_image,
extrinsic,
inclination,
pixel_pose=None,
frame_pose=None,
dtype=tf.float32,
scope=None,
):
"""Extracts point cloud from range image.
Args:
range_image: [B, H, W] tensor. Lidar range images.
extrinsic: [B, 4, 4] tensor. Lidar extrinsic.
inclination: [B, H] tensor. Inclination for each row of the range image.
0-th entry corresponds to the 0-th row of the range image.
pixel_pose: [B, H, W, 4, 4] tensor. If not None, it sets pose for each range
image pixel.
frame_pose: [B, 4, 4] tensor. This must be set when pixel_pose is set. It
decides the vehicle frame at which the cartesian points are computed.
dtype: float type to use internally. This is needed as extrinsic and
inclination sometimes have higher resolution than range_image.
scope: the name scope.
Returns:
range_image_points: [B, H, W, 3] with {x, y, z} as inner dims in vehicle frame.
range_image_origins: [B, H, W, 3] with {x, y, z}, the origin of the range image
"""
with tf.compat.v1.name_scope(
scope,
"ExtractPointCloudFromRangeImage",
[range_image, extrinsic, inclination, pixel_pose, frame_pose],
):
range_image_polar = range_image_utils.compute_range_image_polar(
range_image, extrinsic, inclination, dtype=dtype
)
(
range_image_points_cartesian,
range_image_origins_cartesian,
) = compute_range_image_cartesian(
range_image_polar,
extrinsic,
pixel_pose=pixel_pose,
frame_pose=frame_pose,
dtype=dtype,
)
return range_image_origins_cartesian, range_image_points_cartesian
def parse_range_image_flow_and_camera_projection(frame):
range_images = {}
camera_projections = {}
range_image_top_pose = None
for laser in frame.lasers:
if (
len(laser.ri_return1.range_image_flow_compressed) > 0
): # pylint: disable=g-explicit-length-test
range_image_str_tensor = tf.io.decode_compressed(
laser.ri_return1.range_image_flow_compressed, "ZLIB"
)
ri = dataset_pb2.MatrixFloat()
ri.ParseFromString(bytearray(range_image_str_tensor.numpy()))
range_images[laser.name] = [ri]
if laser.name == dataset_pb2.LaserName.TOP:
range_image_top_pose_str_tensor = tf.io.decode_compressed(
laser.ri_return1.range_image_pose_compressed, "ZLIB"
)
range_image_top_pose = dataset_pb2.MatrixFloat()
range_image_top_pose.ParseFromString(
bytearray(range_image_top_pose_str_tensor.numpy())
)
camera_projection_str_tensor = tf.io.decode_compressed(
laser.ri_return1.camera_projection_compressed, "ZLIB"
)
cp = dataset_pb2.MatrixInt32()
cp.ParseFromString(bytearray(camera_projection_str_tensor.numpy()))
camera_projections[laser.name] = [cp]
if (
len(laser.ri_return2.range_image_flow_compressed) > 0
): # pylint: disable=g-explicit-length-test
range_image_str_tensor = tf.io.decode_compressed(
laser.ri_return2.range_image_flow_compressed, "ZLIB"
)
ri = dataset_pb2.MatrixFloat()
ri.ParseFromString(bytearray(range_image_str_tensor.numpy()))
range_images[laser.name].append(ri)
camera_projection_str_tensor = tf.io.decode_compressed(
laser.ri_return2.camera_projection_compressed, "ZLIB"
)
cp = dataset_pb2.MatrixInt32()
cp.ParseFromString(bytearray(camera_projection_str_tensor.numpy()))
camera_projections[laser.name].append(cp)
return range_images, camera_projections, range_image_top_pose
def convert_range_image_to_point_cloud_flow(
frame,
range_images,
range_images_flow,
camera_projections,
range_image_top_pose,
ri_index=0,
):
"""
Modified from the codes of Waymo Open Dataset.
Convert range images to point cloud.
Convert range images flow to scene flow.
Args:
frame: open dataset frame
range_images: A dict of {laser_name, [range_image_first_return, range_image_second_return]}.
range_imaages_flow: A dict similar to range_images.
camera_projections: A dict of {laser_name,
[camera_projection_from_first_return, camera_projection_from_second_return]}.
range_image_top_pose: range image pixel pose for top lidar.
ri_index: 0 for the first return, 1 for the second return.
Returns:
points: {[N, 3]} list of 3d lidar points of length 5 (number of lidars).
points_flow: {[N, 3]} list of scene flow vector of each point.
cp_points: {[N, 6]} list of camera projections of length 5 (number of lidars).
"""
calibrations = sorted(frame.context.laser_calibrations, key=lambda c: c.name)
origins, points, cp_points = [], [], []
points_intensity = []
points_elongation = []
points_flow = []
laser_ids = []
frame_pose = tf.convert_to_tensor(np.reshape(np.array(frame.pose.transform), [4, 4]))
# [H, W, 6]
range_image_top_pose_tensor = tf.reshape(
tf.convert_to_tensor(range_image_top_pose.data), range_image_top_pose.shape.dims
)
# [H, W, 3, 3]
range_image_top_pose_tensor_rotation = transform_utils.get_rotation_matrix(
range_image_top_pose_tensor[..., 0],
range_image_top_pose_tensor[..., 1],
range_image_top_pose_tensor[..., 2],
)
range_image_top_pose_tensor_translation = range_image_top_pose_tensor[..., 3:]
range_image_top_pose_tensor = transform_utils.get_transform(
range_image_top_pose_tensor_rotation, range_image_top_pose_tensor_translation
)
for c in calibrations:
range_image = range_images[c.name][ri_index]
range_image_flow = range_images_flow[c.name][ri_index]
if len(c.beam_inclinations) == 0: # pylint: disable=g-explicit-length-test
beam_inclinations = range_image_utils.compute_inclination(
tf.constant([c.beam_inclination_min, c.beam_inclination_max]),
height=range_image.shape.dims[0],
)
else:
beam_inclinations = tf.constant(c.beam_inclinations)
beam_inclinations = tf.reverse(beam_inclinations, axis=[-1])
extrinsic = np.reshape(np.array(c.extrinsic.transform), [4, 4])
range_image_tensor = tf.reshape(
tf.convert_to_tensor(range_image.data), range_image.shape.dims
)
range_image_flow_tensor = tf.reshape(
tf.convert_to_tensor(range_image_flow.data), range_image_flow.shape.dims
)
pixel_pose_local = None
frame_pose_local = None
if c.name == dataset_pb2.LaserName.TOP:
pixel_pose_local = range_image_top_pose_tensor
pixel_pose_local = tf.expand_dims(pixel_pose_local, axis=0)
frame_pose_local = tf.expand_dims(frame_pose, axis=0)
range_image_mask = range_image_tensor[..., 0] > 0
range_image_intensity = range_image_tensor[..., 1]
range_image_elongation = range_image_tensor[..., 2]
flow_x = range_image_flow_tensor[..., 0]
flow_y = range_image_flow_tensor[..., 1]
flow_z = range_image_flow_tensor[..., 2]
flow_class = range_image_flow_tensor[..., 3]
mask_index = tf.where(range_image_mask)
(origins_cartesian, points_cartesian,) = extract_point_cloud_from_range_image(
tf.expand_dims(range_image_tensor[..., 0], axis=0),
tf.expand_dims(extrinsic, axis=0),
tf.expand_dims(tf.convert_to_tensor(beam_inclinations), axis=0),
pixel_pose=pixel_pose_local,
frame_pose=frame_pose_local,
)
origins_cartesian = tf.squeeze(origins_cartesian, axis=0)
points_cartesian = tf.squeeze(points_cartesian, axis=0)
origins_tensor = tf.gather_nd(origins_cartesian, mask_index)
points_tensor = tf.gather_nd(points_cartesian, mask_index)
points_intensity_tensor = tf.gather_nd(range_image_intensity, mask_index)
points_elongation_tensor = tf.gather_nd(range_image_elongation, mask_index)
points_flow_x_tensor = tf.expand_dims(tf.gather_nd(flow_x, mask_index), axis=1)
points_flow_y_tensor = tf.expand_dims(tf.gather_nd(flow_y, mask_index), axis=1)
points_flow_z_tensor = tf.expand_dims(tf.gather_nd(flow_z, mask_index), axis=1)
points_flow_class_tensor = tf.expand_dims(tf.gather_nd(flow_class, mask_index), axis=1)
origins.append(origins_tensor.numpy())
points.append(points_tensor.numpy())
points_intensity.append(points_intensity_tensor.numpy())
points_elongation.append(points_elongation_tensor.numpy())
laser_ids.append(np.full_like(points_intensity_tensor.numpy(), c.name - 1))
points_flow.append(
tf.concat(
[
points_flow_x_tensor,
points_flow_y_tensor,
points_flow_z_tensor,
points_flow_class_tensor,
],
axis=-1,
).numpy()
)
return (
origins,
points,
points_flow,
cp_points,
points_intensity,
points_elongation,
laser_ids,
)
class WaymoProcessor:
def __init__(
self,
load_dir,
save_dir,
scene_lists,
prefix,
downsample_factors=[4],
process_keys=["images", "lidar", "calib", "pose", "ground", "dynamic_masks"],
json_folder_to_save=None,
num_workers=64,
overwrite=False,
):
self.overwrite = overwrite
self.process_keys = process_keys
self.json_folder_to_save = f"{json_folder_to_save}/{prefix}"
os.makedirs(self.json_folder_to_save, exist_ok=True)
print("will process keys: ", self.process_keys)
self.load_dir = f"{load_dir}/{prefix}"
self.save_dir = f"{save_dir}/{prefix}"
self.prefix = prefix
self.downsample_factors = downsample_factors
self.num_workers = int(num_workers)
self.scene_ids = [s[0] for s in scene_lists]
self.scene_names = [s[1] for s in scene_lists]
self.create_folder()
def convert(self):
"""Convert action."""
print("Start converting ...")
track_parallel_progress(self.convert_one, range(len(self.scene_ids)), self.num_workers)
print("\nFinished ...")
def convert_one(self, file_id):
"""Convert action for single file.
Args:
file_id (int): Index of the file to be converted.
"""
scene_id = self.scene_ids[file_id]
scene_name = self.scene_names[file_id]
tfrecord_path = f"{self.load_dir}/{scene_name}.tfrecord"
dataset = tf.data.TFRecordDataset(tfrecord_path, compression_type="")
num_frames = sum(1 for _ in dataset)
for frame_id, data in enumerate(
tqdm(dataset, desc=f"File {file_id}", total=num_frames, dynamic_ncols=True)
):
frame = dataset_pb2.Frame()
frame.ParseFromString(bytearray(data.numpy()))
if "images" in self.process_keys:
self.save_image(frame, file_id, frame_id)
if "calib" in self.process_keys:
self.save_calib(frame, file_id, frame_id)
if "pose" in self.process_keys:
self.save_pose(frame, file_id, frame_id)
if "lidar" in self.process_keys and "depth" not in self.process_keys:
self.save_lidar(frame, file_id, frame_id)
if "depth" in self.process_keys and "lidar" not in self.process_keys:
self.save_depth(frame, file_id, frame_id)
if "dynamic_masks" in self.process_keys:
self.save_dynamic_mask(frame, file_id, frame_id)
if "ground" in self.process_keys:
self.save_ground(frame, file_id, frame_id)
self.make_json(file_id)
# save a video
downsample_factor = self.downsample_factors[-1]
scene_id = f"{scene_id:03d}"
video_path = f"{self.json_folder_to_save.replace('annotations', 'video_preview')}/{scene_id}-{self.scene_names[file_id].split('segment-')[1].split('_')[0]}.mp4"
os.makedirs(os.path.dirname(video_path), exist_ok=True)
scene_path = f"{self.save_dir}/{scene_id}"
video_frames = []
for frame_id in trange(num_frames, desc=f"Making video {scene_id}", dynamic_ncols=True):
frame = []
for cam_id in [3, 1, 0, 2, 4]:
image_path = (
f"{scene_path}/images_{downsample_factor}/"
+ f"{str(frame_id).zfill(3)}_{str(cam_id)}.jpg"
)
image = Image.open(image_path).convert("RGB")
depth_flows_path = (
f"{scene_path}/depth_flows_{downsample_factor}/"
+ f"{str(frame_id).zfill(3)}_{str(cam_id)}.npy"
)
ground_path = image_path.replace("images", "ground_label").replace("jpg", "png")
ground_mask = np.array(Image.open(ground_path))
depth_flows = np.load(depth_flows_path)
depth_image = depth_flows[:, :, 0]
flow_image = depth_flows[:, :, 1:4]
depth_img = depth_visualizer(depth_image, depth_image > 0)
flow_img = scene_flow_to_rgb(flow_image, flow_max_radius=15)
img_array = np.array(image) / 255.0
# Prepare the ground mask and image for blending
ground_mask = (
np.expand_dims(ground_mask, axis=-1) / 255.0
) # Normalize mask to [0, 1]
# Ensure ground_mask is binary (0 or 1), just in case the values are not exactly 0 or 255
ground_mask = np.where(ground_mask > 0.5, 1, 0) # Threshold the mask at 0.5
# Blending the image using the ground mask
# If ground_mask == 1 -> blend with 30% gray
# If ground_mask == 0 -> keep the original image
gray_value = 0.3 # Gray color value (30%)
img_array = img_array * (1 - ground_mask) + ground_mask * gray_value
depth_img = np.array(depth_img)
flow_img = np.array(flow_img)
if img_array.shape[0] != 160:
_img_array = np.zeros((160, 240, 3))
_img_array[50:] = img_array
img_array = _img_array
_depth_img = np.zeros((160, 240, 3))
_depth_img[50:] = depth_img
depth_img = _depth_img
_flow_img = np.zeros((160, 240, 3))
_flow_img[50:] = flow_img
flow_img = _flow_img
img_array = np.concatenate([img_array, depth_img, flow_img], axis=0)
frame.append((img_array * 255).astype(np.uint8))
frame = np.concatenate(frame, axis=1)
video_frames.append(frame)
video_frames = np.stack(video_frames)
# imageio.mimwrite(video_path, video_frames, fps=10)
imageio.mimwrite(video_path, video_frames, fps=10, codec="libx264", quality=4)
print(f"Scene {scene_id} video saved to {video_path}")
def make_json(self, file_id):
scene_id = self.scene_ids[file_id]
scene_name = self.scene_names[file_id]
file_folder = os.path.join(self.save_dir, f"{scene_id:03d}")
if os.path.exists(f"{self.json_folder_to_save}/{scene_name}.json") and not self.overwrite:
print(
f"Scene {scene_id} json already exists at {self.json_folder_to_save}/{scene_name}.json"
)
return
else:
print("start overwriting")
num_timesteps = len(os.listdir(f"{file_folder}/ego_to_world"))
camera_list = ["0", "1", "2", "3", "4"]
scene_dict = {
"dataset": "waymo",
"scene_id": int(scene_id),
"scene_name": scene_name,
"num_timesteps": num_timesteps,
# 0: front, 1: left, 2: right, 3: front_left, 4: front_right
"camera_list": camera_list,
# list for synchronized timestamps, dict for unsynchronized, measured in seconds
"normalized_time": [],
# camera_name: [fx, fy, cx, cy] (real_fx = fx * width, real_fy = fy * height)
"normalized_intrinsics": {cam: [] for cam in camera_list},
# camera_name: 3x4 matrix
"camera_to_ego": {cam: [] for cam in camera_list},
# list of 4x4 matrices
"ego_to_world": [],
"camera_to_world": {cam: [] for cam in camera_list},
# camera_name: [height, width]
"original_image_size": {
"0": [1280, 1920],
"1": [1280, 1920],
"2": [1280, 1920],
"3": [886, 1920],
"4": [886, 1920],
},
# camera_name: relative path to the image
"relative_image_path": {},
"fps": 10, # assume all cameras have the same fps
}
for t in range(num_timesteps):
ego_pose = np.loadtxt(f"{file_folder}/ego_to_world/{t:03d}.txt")
scene_dict["ego_to_world"].append(ego_pose.tolist())
scene_dict["normalized_time"].append(t / scene_dict["fps"])
for cam_id in range(5):
cam_to_world = np.loadtxt(f"{file_folder}/cam_to_world/{t:03d}_{cam_id}.txt")
scene_dict["camera_to_world"][str(cam_id)].append(cam_to_world.tolist())
for cam_id in range(5):
fx, fy, cx, cy = np.loadtxt(f"{file_folder}/intrinsics/{cam_id}.txt")[:4]
extrinsics = np.loadtxt(f"{file_folder}/cam_to_ego/{cam_id}.txt")
height = scene_dict["original_image_size"][str(cam_id)][0]
width = scene_dict["original_image_size"][str(cam_id)][1]
normalized_fx = fx / width
normalized_fy = fy / height
normalized_cx = cx / width
normalized_cy = cy / height
scene_dict["normalized_intrinsics"][str(cam_id)] = [
normalized_fx,
normalized_fy,
normalized_cx,
normalized_cy,
]
scene_dict["camera_to_ego"][str(cam_id)] = extrinsics.tolist()
for cam_id in range(5):
scene_dict["relative_image_path"][str(cam_id)] = []
for t in range(num_timesteps):
scene_dict["relative_image_path"][str(cam_id)].append(
f"{self.prefix}/{scene_id:03d}/images/{t:03d}_{cam_id}.jpg"
)
with open(f"{self.json_folder_to_save}/{scene_name}.json", "w") as f:
json.dump(scene_dict, f)
def __len__(self):
"""Length of the filename list."""
return len(self.scene_ids)
def save_image(self, frame, file_id, frame_id):
"""Parse and save the images in jpg format.
Args:
frame (:obj:`Frame`): Open dataset frame proto.
file_id (int): Current file index.
frame_id (int): Current frame index.
"""
scene_id = self.scene_ids[file_id]
for img in frame.images:
img_path = f"{self.save_dir}/{str(scene_id).zfill(3)}/images/{str(frame_id).zfill(3)}_{img.name - 1}.jpg"
if not os.path.exists(img_path) or self.overwrite:
with open(img_path, "wb") as f:
f.write(img.image)
for downsample_factor in self.downsample_factors:
if downsample_factor == 1:
continue
else:
postfix = f"_{downsample_factor}"
downsampled_img_path = (
f"{self.save_dir}/{str(scene_id).zfill(3)}/images{postfix}/"
+ f"{str(frame_id).zfill(3)}_{img.name - 1}.jpg"
)
if not os.path.exists(downsampled_img_path) or self.overwrite:
image = Image.open(img_path)
new_size = (
image.width // downsample_factor,
image.height // downsample_factor,
)
downsampled_image = image.resize(new_size)
downsampled_image.save(downsampled_img_path, format="JPEG")
def save_calib(self, frame, file_id, frame_id):
"""Parse and save the calibration data.
Args:
file_id (int): Current file index.
frame_id (int): Current frame index.
"""
scene_id = self.scene_ids[file_id]
if frame_id != 0:
# only save the calibration data for the first frame
# because the calibration data is the same for all frames
return
extrinsics = []
intrinsics = []
for camera in frame.context.camera_calibrations:
# extrinsic parameters
extrinsic = np.array(camera.extrinsic.transform).reshape(4, 4)
intrinsic = list(camera.intrinsic)
extrinsics.append(extrinsic)
intrinsics.append(intrinsic)
# all camera ids are saved as id-1 in the result because
# camera 0 is unknown in the proto
for i in range(5):
np.savetxt(
f"{self.save_dir}/{str(scene_id).zfill(3)}/cam_to_ego/" + f"{str(i)}.txt",
extrinsics[i],
)
np.savetxt(
f"{self.save_dir}/{str(scene_id).zfill(3)}/intrinsics/" + f"{str(i)}.txt",
intrinsics[i],
)
def save_lidar(self, frame, file_id, frame_id):
"""Parse and save the lidar data in psd format.
Args:
file_id (int): Current file index.
frame_id (int): Current frame index.
"""
scene_id = self.scene_ids[file_id]
scene_path = f"{self.save_dir}/{str(scene_id).zfill(3)}"
pc_path = f"{scene_path}/lidar/{str(frame_id).zfill(3)}.bin"
if not os.path.exists(pc_path) or self.overwrite:
(
range_images,
camera_projections,
seg_labels,
range_image_top_pose,
) = parse_range_image_and_camera_projection(frame)
# https://github.com/waymo-research/waymo-open-dataset/blob/master/src/waymo_open_dataset/protos/segmentation.proto
if range_image_top_pose is None:
# the camera only split doesn't contain lidar points.
return
# collect first return only
range_images_flow, _, _ = parse_range_image_flow_and_camera_projection(frame)
(
origins,
points,
flows,
cp_points,
intensity,
elongation,
laser_ids,
) = convert_range_image_to_point_cloud_flow(
frame,
range_images,
range_images_flow,
camera_projections,
range_image_top_pose,
ri_index=0,
)
origins = np.concatenate(origins, axis=0)
points = np.concatenate(points, axis=0)
# -1: no-flow-label, the point has no flow information.
# 0: unlabeled or "background,", i.e., the point is not contained in a
# bounding box.
# 1: vehicle, i.e., the point corresponds to a vehicle label box.
# 2: pedestrian, i.e., the point corresponds to a pedestrian label box.
# 3: sign, i.e., the point corresponds to a sign label box.
# 4: cyclist, i.e., the point corresponds to a cyclist label box.
flows = np.concatenate(flows, axis=0)
point_cloud = np.column_stack((origins, points, flows))
point_cloud.astype(np.float32).tofile(pc_path)
else:
point_cloud = np.fromfile(pc_path, dtype=np.float32).reshape(-1, 10)
for cam_id in range(5):
world_to_cam = None
for downsample_factor in self.downsample_factors:
if downsample_factor == 1:
continue
scale_postfix = f"_{downsample_factor}"
depth_path = f"{scene_path}/depth_flows{scale_postfix}/{str(frame_id).zfill(3)}_{str(cam_id)}.npy"
if not os.path.exists(depth_path) or self.overwrite:
points = point_cloud[:, 3:6].astype(np.float32)
flows = point_cloud[:, 6:9].astype(np.float32)
flow_class = point_cloud[:, 9].astype(np.int32)
if world_to_cam is None:
intrinsics = np.loadtxt(
f"{scene_path}/intrinsics/{str(cam_id)}.txt",
dtype=np.float32,
)
fx, fy, cx, cy = intrinsics[:4]
intrinsics = np.array(
[
[fx, 0, cx, 0],
[0, fy, cy, 0],
[0, 0, 1, 0],
[0, 0, 0, 1],
],
dtype=np.float32,
)
extrinsic = np.loadtxt(
f"{scene_path}/cam_to_ego/{str(cam_id)}.txt",
dtype=np.float32,
)
world_to_cam = np.linalg.inv(extrinsic @ OPENCV2DATASET)
target_size = (
ORIGINAL_SIZE[str(cam_id)][0] // downsample_factor,
ORIGINAL_SIZE[str(cam_id)][1] // downsample_factor,
)
_intrinsics = intrinsics.copy()
_intrinsics[0, 0] /= ORIGINAL_SIZE[str(cam_id)][1] / target_size[1]
_intrinsics[1, 1] /= ORIGINAL_SIZE[str(cam_id)][0] / target_size[0]
_intrinsics[0, 2] /= ORIGINAL_SIZE[str(cam_id)][1] / target_size[1]
_intrinsics[1, 2] /= ORIGINAL_SIZE[str(cam_id)][0] / target_size[0]
lidar2img = _intrinsics @ world_to_cam
points_2d = (np.dot(lidar2img[:3, :3], points.T) + lidar2img[:3, 3:4]).T
depth_2d = points_2d[:, 2]
cam_coords = points_2d[:, :2] / (depth_2d[:, None] + 1e-6)
valid_mask = (
(cam_coords[:, 0] >= 0)
& (cam_coords[:, 0] < target_size[1])
& (cam_coords[:, 1] >= 0)
& (cam_coords[:, 1] < target_size[0])
& (depth_2d > 0)
)
# Get valid depth points and corresponding coordinates
valid_depth_points = depth_2d[valid_mask]
valid_cam_coords = cam_coords[valid_mask]
# Convert coordinates to integer indices
x_indices = valid_cam_coords[:, 0].astype(np.int32)
y_indices = valid_cam_coords[:, 1].astype(np.int32)
# Initialize arrays to accumulate depth sums and counts
depth_sums = np.zeros(target_size)
depth_counts = np.zeros(target_size)
np.add.at(depth_sums, (y_indices, x_indices), valid_depth_points)
np.add.at(depth_counts, (y_indices, x_indices), 1)
depth_image = np.divide(depth_sums, depth_counts, where=depth_counts > 0)
depth_image[depth_counts == 0] = 0
valid_flow = flows[valid_mask] * (flow_class[valid_mask][:, None] >= 0)
flow_sums = np.zeros((target_size[0], target_size[1], 3))
flow_counts = np.zeros((target_size[0], target_size[1], 3))
np.add.at(flow_sums, (y_indices, x_indices), valid_flow)
np.add.at(flow_counts, (y_indices, x_indices), 1)
flow_image = np.divide(flow_sums, flow_counts, where=flow_counts > 0)
flow_image[flow_counts == 0] = 0
flow_image[np.linalg.norm(flow_image, axis=-1) < 0.5] = 0
concate_image = np.concatenate(
[depth_image[:, :, None], flow_image], axis=-1
).astype(np.float32)
path = f"{scene_path}/depth_flows{scale_postfix}/{str(frame_id).zfill(3)}_{str(cam_id)}.npy"
os.makedirs(os.path.dirname(path), exist_ok=True)
np.save(path, concate_image)
def save_ground(self, frame, file_id, frame_id):
"""Parse and save the lidar data in psd format.
Args:
file_id (int): Current file index.
frame_id (int): Current frame index.
"""
scene_id = self.scene_ids[file_id]
scene_path = f"{self.save_dir}/{str(scene_id).zfill(3)}"
pc_path = f"{scene_path}/lidar/{str(frame_id).zfill(3)}.bin"
point_cloud = np.fromfile(pc_path, dtype=np.float32).reshape(-1, 10)
for cam_id in range(5):
world_to_cam = None
for downsample_factor in self.downsample_factors:
if downsample_factor == 1:
continue
scale_postfix = f"_{downsample_factor}"
ground_label_path = f"{scene_path}/ground_label{scale_postfix}/{str(frame_id).zfill(3)}_{str(cam_id)}.png"
if not os.path.exists(ground_label_path) or self.overwrite:
points = point_cloud[:, 3:6].astype(np.float32)
ground_label = get_ground_np(points).reshape(-1)
if world_to_cam is None:
intrinsics = np.loadtxt(
f"{scene_path}/intrinsics/{str(cam_id)}.txt",
dtype=np.float32,
)
fx, fy, cx, cy = intrinsics[:4]
intrinsics = np.array(
[
[fx, 0, cx, 0],
[0, fy, cy, 0],
[0, 0, 1, 0],
[0, 0, 0, 1],
],
dtype=np.float32,
)
extrinsic = np.loadtxt(
f"{scene_path}/cam_to_ego/{str(cam_id)}.txt",
dtype=np.float32,
)
world_to_cam = np.linalg.inv(extrinsic @ OPENCV2DATASET)
target_size = (
ORIGINAL_SIZE[str(cam_id)][0] // downsample_factor,
ORIGINAL_SIZE[str(cam_id)][1] // downsample_factor,
)
_intrinsics = intrinsics.copy()
_intrinsics[0, 0] /= ORIGINAL_SIZE[str(cam_id)][1] / target_size[1]
_intrinsics[1, 1] /= ORIGINAL_SIZE[str(cam_id)][0] / target_size[0]
_intrinsics[0, 2] /= ORIGINAL_SIZE[str(cam_id)][1] / target_size[1]
_intrinsics[1, 2] /= ORIGINAL_SIZE[str(cam_id)][0] / target_size[0]
lidar2img = _intrinsics @ world_to_cam
points_2d = (np.dot(lidar2img[:3, :3], points.T) + lidar2img[:3, 3:4]).T
depth_2d = points_2d[:, 2]
cam_coords = points_2d[:, :2] / (depth_2d[:, None] + 1e-6)
valid_mask = (
(cam_coords[:, 0] >= 0)
& (cam_coords[:, 0] < target_size[1])
& (cam_coords[:, 1] >= 0)
& (cam_coords[:, 1] < target_size[0])
& (depth_2d > 0)
)
# Get valid depth points and corresponding coordinates
valid_cam_coords = cam_coords[valid_mask]
# Convert coordinates to integer indices
x_indices = valid_cam_coords[:, 0].astype(np.int32)
y_indices = valid_cam_coords[:, 1].astype(np.int32)
# Initialize arrays to accumulate depth sums and counts
depth_image = np.zeros(target_size)
depth_image[y_indices, x_indices] = ground_label[valid_mask]
imageio.imwrite(ground_label_path, (depth_image * 255).astype(np.uint8))
def save_pose(self, frame, file_id, frame_id):
"""Parse and save the pose data.
Note that SDC's own pose is not included in the regular training
of KITTI dataset. KITTI raw dataset contains ego motion files
but are not often used. Pose is important for algorithms that
take advantage of the temporal information.
Args:
frame (:obj:`Frame`): Open dataset frame proto.
file_id (int): Current file index.
frame_id (int): Current frame index.
"""
scene_id = self.scene_ids[file_id]
scene_path = f"{self.save_dir}/{str(scene_id).zfill(3)}"
ego_to_world = np.array(frame.pose.transform).reshape(4, 4)
np.savetxt(
f"{self.save_dir}/{str(scene_id).zfill(3)}/ego_to_world/"
+ f"{str(frame_id).zfill(3)}.txt",
ego_to_world,
)
for cam_id in range(5):
cam_to_world_path = (
f"{scene_path}/cam_to_world/" + f"{str(frame_id).zfill(3)}_{str(cam_id)}.txt"
)
if not os.path.exists(cam_to_world_path):
cam_to_ego = np.loadtxt(
f"{scene_path}/cam_to_ego/" + f"{str(cam_id)}.txt",
dtype=np.float32,
)
cam_to_world = ego_to_world @ cam_to_ego
np.savetxt(cam_to_world_path, cam_to_world)
def save_dynamic_mask(self, frame, file_id, frame_id):
"""Parse and save the segmentation data.
Args:
frame (:obj:`Frame`): Open dataset frame proto.
file_idx (int): Current file index.
frame_idx (int): Current frame index.
"""
scene_id = self.scene_ids[file_id]
scene_path = f"{self.save_dir}/{str(scene_id).zfill(3)}"
for img in frame.images:
# dynamic_mask
img_path = f"{scene_path}/images/" + f"{str(frame_id).zfill(3)}_{str(img.name - 1)}.jpg"
img_shape = np.array(Image.open(img_path))
dynamic_mask = np.zeros_like(img_shape, dtype=np.float32)[..., 0]
filter_available = any(
[label.num_top_lidar_points_in_box > 0 for label in frame.laser_labels]
)
calibration = next(
cc for cc in frame.context.camera_calibrations if cc.name == img.name
)
for label in frame.laser_labels:
# camera_synced_box is not available for the data with flow.
# box = label.camera_synced_box
box = label.box
meta = label.metadata
speed = np.linalg.norm([meta.speed_x, meta.speed_y])
if not box.ByteSize():
continue # Filter out labels that do not have a camera_synced_box.
if (filter_available and not label.num_top_lidar_points_in_box) or (
not filter_available and not label.num_lidar_points_in_box
):
continue # Filter out likely occluded objects.
# Retrieve upright 3D box corners.
box_coords = np.array(
[
[
box.center_x,
box.center_y,
box.center_z,
box.length,
box.width,
box.height,
box.heading,
]
]
)
corners = box_utils.get_upright_3d_box_corners(box_coords)[0].numpy() # [8, 3]
# Project box corners from vehicle coordinates onto the image.
projected_corners = project_vehicle_to_image(frame.pose, calibration, corners)
u, v, ok = projected_corners.transpose()
ok = ok.astype(bool)
# Skip object if any corner projection failed. Note that this is very
# strict and can lead to exclusion of some partially visible objects.
if not all(ok):
continue
u = u[ok]
v = v[ok]
# Clip box to image bounds.
u = np.clip(u, 0, calibration.width)
v = np.clip(v, 0, calibration.height)
if u.max() - u.min() == 0 or v.max() - v.min() == 0:
continue
# Draw projected 2D box onto the image.
xy = (u.min(), v.min())
width = u.max() - u.min()
height = v.max() - v.min()
# max pooling
dynamic_mask[
int(xy[1]) : int(xy[1] + height),
int(xy[0]) : int(xy[0] + width),
] = np.maximum(
dynamic_mask[
int(xy[1]) : int(xy[1] + height),
int(xy[0]) : int(xy[0] + width),
],
speed,
)
# thresholding, use 1.0 m/s to determine whether the pixel is moving
dynamic_mask = np.clip((dynamic_mask > 1.0) * 255, 0, 255).astype(np.uint8)
dynamic_mask = Image.fromarray(dynamic_mask, "L")
dynamic_mask_path = (
f"{scene_path}/dynamic_masks/" + f"{str(frame_id).zfill(3)}_{str(img.name - 1)}.png"
)
dynamic_mask.save(dynamic_mask_path)
def create_folder(self):
"""Create folder for data preprocessing."""
for i in self.scene_ids:
scene_path = f"{self.save_dir}/{str(i).zfill(3)}"
os.makedirs(f"{scene_path}/images", exist_ok=True)
os.makedirs(f"{scene_path}/dynamic_masks", exist_ok=True)
for downsample_factor in self.downsample_factors:
postfix = "" if downsample_factor == 1 else f"_{downsample_factor}"
os.makedirs(f"{scene_path}/images{postfix}", exist_ok=True)
os.makedirs(f"{scene_path}/depth_flows{postfix}", exist_ok=True)
os.makedirs(f"{scene_path}/ground_label{postfix}", exist_ok=True)
os.makedirs(f"{scene_path}/ego_to_world", exist_ok=True)
os.makedirs(f"{scene_path}/cam_to_world", exist_ok=True)
os.makedirs(f"{scene_path}/cam_to_ego", exist_ok=True)
os.makedirs(f"{scene_path}/intrinsics", exist_ok=True)
os.makedirs(f"{scene_path}/lidar", exist_ok=True)
================================================
FILE: preprocess.py
================================================
import argparse
import os
import numpy as np
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Data converter arg parser")
parser.add_argument("--data_root", type=str, required=True, help="root path of dataset")
parser.add_argument("--dataset", type=str, default="waymo", help="dataset name")
parser.add_argument("--scene_list_file", type=str, default=None)
parser.add_argument(
"--split",
type=str,
default="training",
help="split of the dataset, e.g. training, validation, testing, please specify the split name for different dataset",
)
parser.add_argument(
"--target_dir",
type=str,
required=True,
help="output directory of processed data",
)
parser.add_argument(
"--json_folder_to_save",
type=str,
required=True,
help="to save the json files",
)
parser.add_argument(
"--num_workers",
type=int,
default=4,
help="number of threads to be used",
)
# priority: scene_ids > start_idx + num_scenes
parser.add_argument(
"--scene_ids",
default=None,
type=int,
nargs="+",
help="scene ids to be processed, a list of integers separated by space. Range: [0, 798] for training, [0, 202] for validation",
)
parser.add_argument(
"--start_idx",
type=int,
default=0,
help="If no scene id is given, use start_idx and num_scenes to generate scene_ids",
)
parser.add_argument(
"--num_scenes",
type=int,
default=200,
help="number of scenes to be processed",
)
parser.add_argument(
"--interpolate_N",
type=int,
default=0,
help="Interpolate to get frames at higher frequency, this is only used for nuscene dataset",
)
parser.add_argument(
"--overwrite",
action="store_true",
help="overwrite the existing files",
)
parser.add_argument(
"--process_keys",
nargs="+",
default=["images", "lidar", "calib", "pose", "ground", "dynamic_masks"],
)
args = parser.parse_args()
if args.dataset != "nuscenes" and args.interpolate_N > 0:
parser.error("interpolate_N > 0 is only allowed when dataset is 'nuscenes'")
os.makedirs(args.target_dir, exist_ok=True)
if args.scene_ids is not None:
scene_ids = args.scene_ids
else:
scene_ids = np.arange(args.start_idx, args.start_idx + args.num_scenes)
if args.dataset == "nuscenes":
scene_lists = scene_ids
else:
if args.scene_list_file is None:
raise ValueError("scene_list_file is required for non-nuscenes dataset")
if not os.path.exists(args.scene_list_file):
raise ValueError(f"scene_list_file {args.scene_list_file} does not exist")
scene_lists = open(args.scene_list_file, "r").read().splitlines()
if np.max(scene_ids) >= len(scene_lists):
scene_ids = [scene_id for scene_id in scene_ids if scene_id < len(scene_lists)]
scene_lists = [(i, scene_lists[i]) for i in scene_ids]
print(f"scene_lists: {scene_lists}")
if args.dataset == "waymo":
from preproc.waymo_preprocess import WaymoProcessor
dataset_processor = WaymoProcessor(
load_dir=args.data_root,
save_dir=args.target_dir,
scene_lists=scene_lists,
prefix=args.split,
process_keys=args.process_keys,
json_folder_to_save=args.json_folder_to_save,
num_workers=args.num_workers,
overwrite=args.overwrite,
)
elif args.dataset == "argoverse":
raise NotImplementedError("ArgoverseProcessor is not implemented yet")
from preproc.argoverse_preprocess import ArgoVerseProcessor
scene_ids = [int(scene_id) for scene_id in scene_ids]
dataset_processor = ArgoVerseProcessor(
load_dir=args.data_root,
save_dir=args.target_dir,
process_keys=args.process_keys,
scene_lists=scene_lists,
prefix=args.split,
num_workers=args.num_workers,
json_folder_to_save=args.json_folder_to_save,
)
elif args.dataset == "nuscenes":
raise NotImplementedError("NuScenesProcessor is not implemented yet")
from preproc.nuscenes_preprocess import NuScenesProcessor
scene_ids = [f"{scene_id:03d}" for scene_id in scene_ids]
dataset_processor = NuScenesProcessor(
load_dir=args.data_root,
save_dir=args.target_dir,
split=args.split,
interpolate_N=args.interpolate_N,
process_keys=args.process_keys,
scene_lists=scene_ids,
num_workers=args.num_workers,
overwrite=args.overwrite,
json_folder_to_save=args.json_folder_to_save,
)
else:
raise ValueError(
f"Unknown dataset {args.dataset}, please choose from waymo, pandaset, argoverse, nuscenes, kitti, nuplan"
)
if args.scene_ids is not None and args.num_workers <= 1:
for idx in range(len(args.scene_ids)):
dataset_processor.convert_one(idx)
else:
dataset_processor.convert()
================================================
FILE: requirements.txt
================================================
--extra-index-url https://download.pytorch.org/whl/cu121
torch==2.3.1
torchvision
timm
einops
jaxtyping
opencv-python
matplotlib==3.7.3
imageio
imageio-ffmpeg
gdown
wandb
tensorboard
isort
pre-commit
black==22.6.0
flake8==5.0.4
pylint==2.15.0
pandas
# viewer:
nerfview
viser
splines
loguru
torch_kmeans
pynvml
================================================
FILE: requirements_data_preprocess.txt
================================================
--extra-index-url https://download.pytorch.org/whl/cu112
torch
waymo-open-dataset-tf-2-11-0==1.6.1
tensorflow-gpu==2.11.0
nuscenes-devkit
tqdm
imageio
imageio[ffmpeg]
scipy
gdown
gsutil
================================================
FILE: storm/dataset/constants.py
================================================
import numpy as np
MEAN = [0.5, 0.5, 0.5]
STD = [0.5, 0.5, 0.5]
IMGNET_MEAN = [0.485, 0.456, 0.406]
IMGNET_STD = [0.229, 0.224, 0.225]
opencv2waymo = np.array([[0, 0, 1, 0], [-1, 0, 0, 0], [0, -1, 0, 0], [0, 0, 0, 1]])
DATASETS = {
"waymo": {"opencv2dataset": opencv2waymo, "canonical_to_flu": np.eye(4)},
"nuscenes": {"opencv2dataset": np.eye(4), "canonical_to_flu": opencv2waymo},
"argoverse2": {"opencv2dataset": np.eye(4), "canonical_to_flu": opencv2waymo},
}
DATASET_DICT = {
"waymo": {
"size": [160, 240],
"temporal": True,
"num_context_timesteps": 4,
"num_target_timesteps": 4,
"annotation_txt_file_train": "scene_list/waymo_train.txt",
"annotation_txt_file_val": "scene_list/waymo_val.txt",
"camera_list": {
1: ["0"],
3: ["1", "0", "2"],
5: ["3", "1", "0", "2", "4"],
6: ["3", "1", "0", "2", "4"], # capped at 5
7: ["3", "1", "0", "2", "4"],
},
"ref_camera": "0",
},
"nuscenes": {
"size": [160, 288],
"temporal": True,
"num_context_timesteps": 4,
"num_target_timesteps": 4,
"annotation_txt_file_train": "scene_list/nuscenes_train.txt",
"annotation_txt_file_val": "scene_list/nuscenes_val.txt",
"camera_list": {
1: ["CAM_FRONT"],
3: ["CAM_FRONT_LEFT", "CAM_FRONT", "CAM_FRONT_RIGHT"],
5: [
"CAM_FRONT_LEFT",
"CAM_FRONT",
"CAM_FRONT_RIGHT",
"CAM_BACK_RIGHT",
"CAM_BACK",
"CAM_BACK_LEFT",
],
6: [
"CAM_FRONT_LEFT",
"CAM_FRONT",
"CAM_FRONT_RIGHT",
"CAM_BACK_RIGHT",
"CAM_BACK",
"CAM_BACK_LEFT",
],
7: [
"CAM_FRONT_LEFT",
"CAM_FRONT",
"CAM_FRONT_RIGHT",
"CAM_BACK_RIGHT",
"CAM_BACK",
"CAM_BACK_LEFT",
],
},
"ref_camera": "CAM_FRONT",
},
"argoverse2": {
"size": [192, 256],
"temporal": True,
"num_context_timesteps": 4,
"num_target_timesteps": 4,
"annotation_txt_file_train": "scene_list/argoverse2_train.txt",
"annotation_txt_file_val": "scene_list/argoverse2_val.txt",
"camera_list": {
1: ["0"],
3: ["1", "0", "2"],
5: ["3", "1", "0", "2", "4"],
6: ["3", "1", "0", "2", "4"],
7: ["5", "3", "1", "0", "2", "4", "6"],
},
"ref_camera": "0",
},
"rel10k": {
"size": [160, 296],
"temporal": False,
"num_context_timesteps": 2,
"num_target_timesteps": 8,
"annotation_txt_file_train": "scene_list/rel10k_train.txt",
"annotation_txt_file_val": "scene_list/rel10k_val.txt",
"batch_size_scale": 6,
"camera_list": {
1: ["0"],
3: ["0"],
5: ["0"],
6: ["0"],
7: ["0"],
},
},
"dl3dv": {
"size": [160, 288],
"temporal": False,
"num_context_timesteps": 2,
"num_target_timesteps": 8,
"annotation_txt_file_train": "scene_list/dl3dv_train.txt",
"annotation_txt_file_val": "scene_list/dl3dv_val.txt",
"batch_size_scale": 6,
"camera_list": {
1: ["0"],
3: ["0"],
5: ["0"],
6: ["0"],
7: ["0"],
},
},
}
================================================
FILE: storm/dataset/data_utils.py
================================================
from typing import List, Tuple, Union
import numpy as np
import torch
import torch.nn.functional as F
from scipy import interpolate
from scipy.spatial.transform import Rotation as R
from scipy.spatial.transform import Slerp
def forward_pose(pose: torch.tensor, transform: torch.tensor, inv=False) -> torch.tensor:
rotation = pose[..., :3, :3]
translation = pose[..., :3, 3]
if inv:
return (torch.transpose(rotation, -1, -2) * (transform - translation).unsqueeze(-2)).sum(-1)
else:
return (rotation * transform.unsqueeze(-2)).sum(-1) + translation
def get_path_front_left_lift_then_spiral_forward(
pose_ref: np.ndarray, # [N0,4,4], Original ego pose; will generate trajectories along this
num_frames: int, # Number of frames / waypoints to generate
duration_frames: int = 100, # Number of frames per round / cycle
# Spiral configs
up_max: float = 0.3,
up_min: float = -0.3,
left_max: float = 2.0,
left_min: float = -2.0,
elongation: float = 1.0,
first_with_forward=False,
# Frontal direction vector of OpenCV
forward_vec: np.ndarray = np.array([0.0, 0.0, 1.0]),
# Frontal direction vector of OpenCV
up_vec: np.ndarray = np.array([0.0, -1.0, 0.0]),
# Frontal direction vector of OpenCV
left_vec: np.ndarray = np.array([-1.0, 0.0, 0.0]),
) -> np.ndarray:
"""
First lift ego in the front left direction, then do spiral forward
"""
pose_ref = torch.tensor(pose_ref, dtype=torch.float32, device="cpu")
n_rots = num_frames / duration_frames
track_ref = pose_ref[..., :3, 3]
rot_ref = pose_ref[..., :3, :3]
# NOTE: Convert from observer's coords dir to world coords dir;
# If the pose_ref is the pose of camera, `forward/up/left_vec` can be used as-is;
# (since the cameras are already OpenCV cameras if dataio's xxx_dataset.py is correctly implemented)
# otherwise, you should specify the coords dir vector of your given pose_ref.
forward_vecs_ref = (
forward_pose(pose_ref, torch.tensor(forward_vec, dtype=torch.float32)) - track_ref
)
up_vecs_ref = forward_pose(pose_ref, torch.tensor(up_vec, dtype=torch.float32)) - track_ref
left_vecs_ref = forward_pose(pose_ref, torch.tensor(left_vec, dtype=torch.float32)) - track_ref
forward_vecs_ref = forward_vecs_ref.numpy()
up_vecs_ref = up_vecs_ref.numpy()
left_vecs_ref = left_vecs_ref.numpy()
track_ref = track_ref.numpy()
rot_ref = rot_ref.numpy()
nvs_seqs = []
verti_radius = (up_max - up_min) / 2.0
up_offset = (up_max + up_min) / 2.0
horiz_radius = (left_max - left_min) / 2.0
left_offset = (left_max + left_min) / 2.0
assert (verti_radius >= -1e-5) and (horiz_radius >= -1e-5)
# ----------------------------------------
# ---- First: lift up & left
# ----------------------------------------
first_frames = int(0.1 * duration_frames) + 1
remain_frames = num_frames - first_frames
pace = np.linalg.norm(track_ref[0] - track_ref[-1]) * elongation
forward_1st = ((first_frames / num_frames) * pace) if first_with_forward else 0
track = (
track_ref[0]
+ np.linspace(0, verti_radius + up_offset, first_frames)[..., None] * up_vecs_ref[0]
+ np.linspace(0, horiz_radius + left_offset, first_frames)[..., None] * left_vecs_ref[0]
+ np.linspace(0, forward_1st, first_frames)[..., None] * forward_vecs_ref[0]
)
pose = np.eye(4)[None, ...].repeat(first_frames, 0)
pose[:, :3, 3] = track
pose[:, :3, :3] = rot_ref[0]
nvs_seqs.append(pose)
# ----------------------------------------
# ---- Then: Sprial forward
# ----------------------------------------
w = np.linspace(0, 1, remain_frames) # [0->1], data key time
t = np.arange(len(track_ref)) / (
len(track_ref) - 1
) # [0->1], render key time (could be extended)
track_interp = interpolate.interp1d(t, track_ref, axis=0, fill_value="extrapolate")
up_vec_interp = interpolate.interp1d(t, up_vecs_ref, axis=0, fill_value="extrapolate")
left_vec_interp = interpolate.interp1d(t, left_vecs_ref, axis=0, fill_value="extrapolate")
up_vecs_all = up_vec_interp(w * elongation)
left_vecs_all = left_vec_interp(w * elongation)
# ---- Base: left * [1], up * [1]
track_base_all = (
track_interp(w * elongation)
+ (up_offset + verti_radius) * up_vecs_all
+ (left_offset + horiz_radius) * left_vecs_all
+ forward_1st * forward_vecs_ref[None, 0]
)
# up_vecs_all = np.percentile(up_vecs_ref, w*100, 0)
# left_vecs_all = np.percentile(left_vecs_ref, w*100, 0)
# track_base_all = np.percentile(track_ref, w*100, 0) + (up_max+up_offset) * up_vecs_all + (left_max+left_offset) * left_vecs_all
key_rots = R.from_matrix(rot_ref)
rot_slerp = Slerp(t, key_rots)
if elongation > 1:
mask = (w * elongation) < 1.0
rot_base_all = np.eye(3)[None, ...].repeat(remain_frames, 0)
rot_base_all[mask] = rot_slerp(w[mask]).as_matrix()
rot_base_all[~mask] = rot_ref[-1]
else:
rot_base_all = rot_slerp(w).as_matrix()
rads = np.linspace(0, remain_frames / duration_frames * np.pi * 2.0, remain_frames)
# ---- Spiral:
# left: [0, -1, -2, -1, 0] + base: [1] -> [1, 0, -1, 0, 1]
# up: [0, 1, 0, -1, 0] + base [1] -> [1, 2, 1, 0, 1]
track = (
track_base_all
+ (np.cos(rads) - 1)[..., None] * horiz_radius * left_vecs_all
+ (np.sin(rads))[..., None] * verti_radius * up_vecs_all
)
pose = np.eye(4)[None, ...].repeat(remain_frames, 0)
pose[:, :3, 3] = track
pose[:, :3, :3] = rot_base_all
nvs_seqs.append(pose)
render_pose_all = np.concatenate(nvs_seqs, 0)
return render_pose_all
def to_tensor(x: Union[np.ndarray, List, Tuple]) -> torch.Tensor:
if isinstance(x, (list, tuple)):
x = np.array(x)
if isinstance(x, np.ndarray):
x = torch.from_numpy(x).float()
return x
def to_float_tensor(d):
if isinstance(d, dict):
return {k: to_float_tensor(v) for k, v in d.items()}
elif isinstance(d, list):
return [to_float_tensor(v) for v in d]
elif isinstance(d, torch.Tensor):
return d.float()
elif isinstance(d, np.ndarray):
return torch.from_numpy(d).float()
else:
return d
def to_batch_tensor(d):
if isinstance(d, dict):
return {k: to_batch_tensor(v) for k, v in d.items()}
elif isinstance(d, list):
return [to_batch_tensor(v) for v in d]
elif isinstance(d, torch.Tensor):
return d.unsqueeze(0)
else:
return d
def resize_depth(depth, target_size):
height, width = depth.shape[-2:]
if (height, width) == target_size:
return depth
if len(depth.shape) == 2:
depth = depth[None, None, ...]
elif len(depth.shape) == 3:
depth = depth[None, ...]
target_height, target_width = target_size
kernel_size_h = height // target_height
kernel_size_w = width // target_width
if kernel_size_h > 0 and kernel_size_w > 0:
depth = F.max_pool2d(
depth,
kernel_size=(kernel_size_h, kernel_size_w),
)
depth = F.interpolate(depth, size=target_size, mode="nearest")
return depth.squeeze()
def resize_flow(flow, target_size):
height, width = flow.shape[-3:-1]
if (height, width) == target_size:
return flow
if len(flow.shape) == 3:
flow = flow[None, ...]
target_height, target_width = target_size
kernel_size_h = height // target_height
kernel_size_w = width // target_width
# flows have direction, so we can't just use max_pool2d.
# otherwise the direction will be wrong, e.g., max_pool2d([0, -1]) = [0]
# previous_valid_flow = (torch.norm(flow, p=2, dim=-1) > 0.5).float().sum()
flow[torch.norm(flow, p=2, dim=-1) < 0.5] = -100000
if kernel_size_h > 0 and kernel_size_w > 0:
flow = F.max_pool2d(
flow.permute(0, 3, 1, 2),
kernel_size=(kernel_size_h, kernel_size_w),
)
flow = F.interpolate(flow, size=target_size, mode="nearest")
else:
flow = F.interpolate(flow.permute(0, 3, 1, 2), size=target_size, mode="nearest")
flow = flow.permute(0, 2, 3, 1)
flow[torch.norm(flow, p=2, dim=-1) > 1000] = 0
# new_valid_flow = (torch.norm(flow, p=2, dim=-1) > 0.5).float().sum()
return flow.squeeze()
def prepare_inputs_and_targets(data_dict, device=torch.device("cuda"), v=3, timespan=2.0):
assert data_dict["context"]["image"].dim() == 5, "need to be b, tv, c, h, w"
b, tv, c, h, w = data_dict["context"]["image"].shape
context_t = tv // v
target_t = data_dict["target"]["image"].shape[1] // v
input_dict = {
"context_image": data_dict["context"]["image"].reshape(b, context_t, v, c, h, w),
# targets to render
"context_camtoworlds": data_dict["context"]["camtoworld"].reshape(b, context_t, v, 4, 4),
"context_intrinsics": data_dict["context"]["intrinsics"].reshape(b, context_t, v, 3, 3),
"target_camtoworlds": data_dict["target"]["camtoworld"].reshape(b, target_t, v, 4, 4),
"target_intrinsics": data_dict["target"]["intrinsics"].reshape(b, target_t, v, 3, 3),
}
if "depth" in data_dict["context"]:
depth_h, depth_w = data_dict["context"]["depth"].shape[-2:]
input_dict["context_depth"] = data_dict["context"]["depth"].reshape(
b, context_t, v, depth_h, depth_w
)
if "flow" in data_dict["context"]:
flow_h, flow_w = data_dict["context"]["flow"].shape[-3:-1]
input_dict["context_flow"] = data_dict["context"]["flow"].reshape(
b, context_t, v, flow_h, flow_w, 3
)
if "time" in data_dict["context"]:
input_dict["context_time"] = (
data_dict["context"]["time"].reshape(b, context_t, v) / timespan
)
if "time" in data_dict["target"]:
input_dict["target_time"] = data_dict["target"]["time"].reshape(b, target_t, v) / timespan
if "sky_masks" in data_dict["context"]:
input_dict["context_sky_masks"] = data_dict["context"]["sky_masks"].reshape(
b, context_t, v, h, w
)
target_dict = {
"target_image": data_dict["target"]["image"].reshape(b, target_t, v, 3, h, w),
}
if "depth" in data_dict["target"]:
target_dict["target_depth"] = data_dict["target"]["depth"].reshape(
b, target_t, v, depth_h, depth_w
)
if "flow" in data_dict["target"]:
target_dict["target_flow"] = data_dict["target"]["flow"].reshape(
b, target_t, v, depth_h, depth_w, 3
)
target_dict["context_flow"] = data_dict["context"]["flow"].reshape(b, context_t, v, h, w, 3)
if "flow" in data_dict["context"]:
target_dict["context_flow"] = data_dict["context"]["flow"].reshape(
b, context_t, v, depth_h, depth_w, 3
)
if "sky_masks" in data_dict["target"]:
target_dict["target_sky_masks"] = data_dict["target"]["sky_masks"].reshape(
b, target_t, v, h, w
)
if "sky_masks" in data_dict["context"]:
target_dict["context_sky_masks"] = data_dict["context"]["sky_masks"].reshape(
b, context_t, v, h, w
)
if "dynamic_masks" in data_dict["target"]:
target_dict["target_dynamic_masks"] = data_dict["target"]["dynamic_masks"].reshape(
b, target_t, v, h, w
)
if "ground_masks" in data_dict["target"]:
target_dict["target_ground_masks"] = data_dict["target"]["ground_masks"].reshape(
b, target_t, v, h, w
)
input_dict["context_frame_idx"] = data_dict["context"]["frame_idx"]
target_dict["target_frame_idx"] = data_dict["target"]["frame_idx"]
input_dict = {k: v.to(device) for k, v in input_dict.items()}
target_dict = {k: v.to(device) for k, v in target_dict.items()}
input_dict["timespan"] = timespan
input_dict["scene_id"] = data_dict["scene_id"]
input_dict["scene_name"] = data_dict["scene_name"]
input_dict["height"], input_dict["width"] = h, w
return input_dict, target_dict
def prepare_inputs_and_targets_novel_view(data_dict, device=torch.device("cpu")):
raise NotImplementedError("Legacy code, not tested for a while. But you can use it as a reference.")
assert data_dict["context"]["image"].dim() == 5, "need to be b, tv, c, h, w"
b, tv, c, h, w = data_dict["context"]["image"].shape
v = int(data_dict["context"]["num_views"].flatten()[0].item())
context_t = tv // v
target_t = data_dict["target"]["image"].shape[1] // v
# move the camera to right by 1 meter:
# freeze time
# cam_to_worlds = data_dict["target"]["camtoworld"].view(b, target_t, v, 4, 4)
# cam_to_worlds0 = cam_to_worlds[:, 0:1]
# cam_to_worlds = cam_to_worlds0.expand(-1, target_t, -1, -1, -1)
# data_dict["target"]["camtoworld"] = cam_to_worlds.reshape(b, target_t * v, 4, 4)
### novel view
cam_to_worlds = data_dict["target"]["camtoworld"].view(b, target_t, v, 4, 4)
left_cam_to_worlds = cam_to_worlds[:, :, 0].numpy()
center_cam_to_worlds = cam_to_worlds[:, :, 1].numpy()
right_cam_to_worlds = cam_to_worlds[:, :, 2].numpy()
new_left_cam_to_worlds = []
new_center_cam_to_worlds = []
new_right_cam_to_worlds = []
for bix in range(left_cam_to_worlds.shape[0]):
new_path = get_path_front_left_lift_then_spiral_forward(
left_cam_to_worlds[bix],
num_frames=target_t,
duration_frames=20,
)
new_left_cam_to_worlds.append(torch.from_numpy(new_path))
new_path = get_path_front_left_lift_then_spiral_forward(
center_cam_to_worlds[bix],
num_frames=target_t,
duration_frames=20,
)
new_center_cam_to_worlds.append(torch.from_numpy(new_path))
new_path = get_path_front_left_lift_then_spiral_forward(
right_cam_to_worlds[bix],
num_frames=target_t,
duration_frames=20,
)
new_right_cam_to_worlds.append(torch.from_numpy(new_path))
new_left_cam_to_worlds = torch.stack(new_left_cam_to_worlds, dim=0)
new_center_cam_to_worlds = torch.stack(new_center_cam_to_worlds, dim=0)
new_right_cam_to_worlds = torch.stack(new_right_cam_to_worlds, dim=0)
new_cam_to_worlds = torch.stack(
[new_left_cam_to_worlds, new_center_cam_to_worlds, new_right_cam_to_worlds],
dim=2,
)
data_dict["target"]["camtoworld"] = new_cam_to_worlds.reshape(b, target_t * v, 4, 4).to(
data_dict["target"]["camtoworld"]
)
### novel view
input_dict = {
"context_image": data_dict["context"]["image"].reshape(b, context_t, v, c, h, w),
# targets to render
"context_camtoworlds": data_dict["context"]["camtoworld"].reshape(b, context_t, v, 4, 4),
"context_intrinsics": data_dict["context"]["intrinsics"].reshape(b, context_t, v, 3, 3),
"target_camtoworlds": data_dict["target"]["camtoworld"].reshape(b, target_t, v, 4, 4),
"target_intrinsics": data_dict["target"]["intrinsics"].reshape(b, target_t, v, 3, 3),
}
if "depth" in data_dict["context"]:
input_dict["context_depth"] = data_dict["context"]["depth"].reshape(b, context_t, v, h, w)
if "flow" in data_dict["context"]:
input_dict["context_flow"] = data_dict["context"]["flow"].reshape(b, context_t, v, h, w, 3)
if "time" in data_dict["context"]:
input_dict["context_time"] = data_dict["context"]["time"].reshape(b, context_t)
if "time" in data_dict["target"]:
input_dict["target_time"] = data_dict["target"]["time"].reshape(b, target_t)
if "sky_masks" in data_dict["context"]:
input_dict["context_sky_masks"] = data_dict["context"]["sky_masks"].reshape(
b, context_t, v, h, w
)
target_dict = {
"target_image": data_dict["target"]["image"].reshape(b, target_t, v, 3, h, w),
}
if "depth" in data_dict["target"]:
target_dict["target_depth"] = data_dict["target"]["depth"].reshape(b, target_t, v, h, w)
if "depth" in data_dict["context"]:
target_dict["context_depth"] = data_dict["context"]["depth"].reshape(b, context_t, v, h, w)
if "flow" in data_dict["target"]:
target_dict["target_flow"] = data_dict["target"]["flow"].reshape(b, target_t, v, h, w, 3)
if "sky_masks" in data_dict["target"]:
target_dict["target_sky_masks"] = data_dict["target"]["sky_masks"].reshape(
b, target_t, v, h, w
)
if "sky_masks" in data_dict["context"]:
target_dict["context_sky_masks"] = data_dict["context"]["sky_masks"].reshape(
b, context_t, v, h, w
)
if "dynamic_masks" in data_dict["target"]:
target_dict["target_dynamic_masks"] = data_dict["target"]["dynamic_masks"].reshape(
b, target_t, v, h, w
)
if "ground_masks" in data_dict["target"]:
target_dict["target_ground_masks"] = data_dict["target"]["ground_masks"].reshape(
b, target_t, v, h, w
)
target_dict["target_frame_idx"] = data_dict["target"]["frame_idx"]
input_dict = {k: v.to(device) for k, v in input_dict.items()}
target_dict = {k: v.to(device) for k, v in target_dict.items()}
try:
input_dict["height"] = data_dict["height"][0].item()
input_dict["width"] = data_dict["width"][0].item()
if "timespan" in data_dict:
input_dict["timespan"] = data_dict["timespan"].flatten()[0].item()
except:
input_dict["height"] = data_dict["height"]
input_dict["width"] = data_dict["width"]
if "timespan" in data_dict:
input_dict["timespan"] = data_dict["timespan"]
input_dict["context_frame_idx"] = data_dict["context"]["frame_idx"]
input_dict["scene_id"] = data_dict["scene_id"]
input_dict["scene_name"] = data_dict["scene_name"]
return input_dict, target_dict
================================================
FILE: storm/dataset/samplers.py
================================================
import itertools
from typing import Any, Optional
import torch
from torch.utils.data.sampler import Sampler
import storm.utils.distributed as distributed
def _get_torch_dtype(size: int) -> Any:
"""Return the appropriate PyTorch dtype based on size."""
return torch.int32 if size <= 2**31 else torch.int64
def _generate_randperm_indices(*, size: int, generator: torch.Generator):
"""
Generate the indices of a random permutation.
This matches PyTorch's CPU implementation.
See: https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/native/TensorFactories.cpp#L900-L921
"""
dtype = _get_torch_dtype(size)
perm = torch.arange(size, dtype=dtype)
for i in range(size):
j = torch.randint(i, size, size=(1,), generator=generator).item()
# Always swap even if no-op
value = perm[j].item()
perm[j] = perm[i].item()
perm[i] = value
yield value
class InfiniteSampler(Sampler):
def __init__(
self,
sample_count: int,
shuffle: bool = False,
seed: int = 0,
start: Optional[int] = None,
step: Optional[int] = None,
advance: int = 0,
):
"""
A sampler that infinitely yields indices for a dataset.
Args:
sample_count (int): Number of samples in the dataset.
shuffle (bool): Whether to shuffle indices.
seed (int): Seed for random generator.
start (Optional[int]): Starting index for sampling.
step (Optional[int]): Step size for sampling.
advance (int): Number of indices to skip at the start.
"""
self._sample_count = sample_count
self._seed = seed
self._shuffle = shuffle
self._start = distributed.get_global_rank() if start is None else start
self._step = distributed.get_world_size() if step is None else step
self._advance = advance
def __iter__(self):
"""Yield indices based on the specified configuration."""
iterator = self._shuffled_iterator() if self._shuffle else self._iterator()
yield from itertools.islice(iterator, self._advance, None)
def _iterator(self):
"""Generate indices sequentially."""
assert not self._shuffle
while True:
iterable = range(self._sample_count)
yield from itertools.islice(iterable, self._start, None, self._step)
def _shuffled_iterator(self):
"""Generate shuffled indices."""
assert self._shuffle
generator = torch.Generator().manual_seed(self._seed)
while True:
iterable = _generate_randperm_indices(size=self._sample_count, generator=generator)
yield from itertools.islice(iterable, self._start, None, self._step)
class NoPaddingDistributedSampler(Sampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=False):
"""
A distributed sampler without padding.
Used for distributed evaluation, i.e., when the dataset is not divisible by the number of
replicas but we still want to evaluate on the full dataset.
Args:
dataset: The dataset to sample from.
num_replicas (int): Number of replicas in distributed setting.
rank (int): Rank of the current process.
shuffle (bool): Whether to shuffle indices.
"""
self.dataset = dataset
self.num_replicas = distributed.get_world_size() if num_replicas is None else num_replicas
self.rank = distributed.get_global_rank() if rank is None else rank
self.shuffle = shuffle
self.num_samples = len(self.dataset) // self.num_replicas
self.total_size = self.num_samples * self.num_replicas
self.rank_start = self.rank * self.num_samples
self.rank_end = (
(self.rank + 1) * self.num_samples
if self.rank < self.num_replicas - 1
else len(self.dataset)
)
def __iter__(self):
"""Yield indices for the current rank."""
indices = list(range(len(self.dataset)))
if self.shuffle:
g = torch.Generator()
g.manual_seed(0)
indices = torch.randperm(len(indices), generator=g).tolist()
indices = indices[self.rank_start : self.rank_end]
return iter(indices)
def __len__(self):
"""Return the number of samples for the current rank."""
return self.num_samples
================================================
FILE: storm/dataset/storm_dataset.py
================================================
import json
import logging
import os
from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union
import numpy as np
import torch
import torchvision.transforms as transforms
from PIL import Image
from torch.utils.data import Dataset
from torch.utils.data.dataloader import default_collate
from tqdm import trange
from .constants import DATASET_DICT, DATASETS, MEAN, STD
from .data_utils import resize_depth, resize_flow, to_float_tensor, to_tensor
logger = logging.getLogger("STORM")
class STORMDataset(Dataset):
def __init__(
self,
data_root: str,
annotation_txt_file_list: Union[str, List[str]],
target_size: Tuple[int, int] = (160, 240),
num_context_timesteps: int = 4,
num_target_timesteps: int = 4,
num_max_cams: Literal[1, 3, 5, 6, 7] = 3,
timespan: float = 2.0, # 2.0 seconds
subset_indices: Optional[List[int]] = None,
num_replicas: int = 1,
equispaced: bool = True,
load_depth: bool = True,
load_flow: bool = False,
load_dynamic_mask: bool = False,
load_ground_label: bool = False,
return_context_as_target: bool = False,
skip_sky_mask: bool = False,
):
super().__init__()
self.data_root = data_root
self.target_size = target_size
self.num_context_timesteps = num_context_timesteps
self.num_target_timesteps = num_target_timesteps
self.num_max_cams = num_max_cams
self.timespan = timespan
self.load_depth = load_depth
self.load_flow = load_flow
self.load_dynamic_mask = load_dynamic_mask
self.load_ground_label = load_ground_label
self.skip_sky_mask = skip_sky_mask
if isinstance(annotation_txt_file_list, str):
annotation_txt_file_list = [annotation_txt_file_list]
scene_list = []
for annotation_txt_file in annotation_txt_file_list:
with open(annotation_txt_file, "r") as f:
scene_list += f.readlines()
annotation_paths = [line.strip() for line in scene_list]
if subset_indices is not None:
annotation_paths = [annotation_paths[i] for i in subset_indices]
self.annotations = []
for annotation_path in annotation_paths:
with open(os.path.join(data_root, annotation_path), "r") as f:
self.annotations.append(json.load(f))
logger.info(f"Loaded {len(self.annotations)} annotations.")
self.num_replicas = num_replicas
if self.num_replicas > 1:
self.annotations *= self.num_replicas
self.equispaced = equispaced
self.return_context_as_target = return_context_as_target
self.img_transformation = transforms.Compose(
[
transforms.Resize(target_size, interpolation=Image.BICUBIC, antialias=True),
transforms.ToTensor(),
transforms.Normalize(mean=MEAN, std=STD),
]
)
def __len__(self) -> int:
return len(self.annotations)
def get_frame(
self,
scene_json: Dict[str, Any],
frame_idx: int,
source_frame_idx: int = -1,
) -> Dict[str, Any]:
"""Retrieve a single frame from the dataset."""
normalized_intrinsics = scene_json["normalized_intrinsics"]
dataset_name = scene_json["dataset"]
cam_to_world = scene_json["camera_to_world"]
images, depths, sky_masks, flows = [], [], [], []
camtoworlds, intrinsics = [], []
dynamic_masks, ground_masks = [], []
if source_frame_idx < 0:
source_frame_idx = frame_idx
camera_list = DATASET_DICT[dataset_name]["camera_list"][self.num_max_cams]
ref_camera_name = DATASET_DICT[dataset_name]["ref_camera"]
world_to_canonical = np.linalg.inv(cam_to_world[ref_camera_name][source_frame_idx])
for camera in camera_list:
img_relative_path = scene_json["relative_image_path"][camera][frame_idx]
if dataset_name in ["waymo", "nuscenes", "argoverse2", "argoverse"]:
img_relative_path = img_relative_path.replace("images", f"images_4")
img_relative_path = img_relative_path.replace("sweeps", f"sweeps_4")
img_relative_path = img_relative_path.replace("samples", f"samples_4")
# Get RGB
img_path = os.path.join(self.data_root, "datasets", dataset_name, img_relative_path)
img = Image.open(img_path).convert("RGB")
img = self.img_transformation(img)
images.append(img)
# Get sky mask
if dataset_name in ["waymo", "nuscenes", "argoverse2"]:
# if dataset_name in ["waymo", "nuscenes"]:
if dataset_name == "nuscenes":
sky_path = img_path.replace("samples", "samples_sky_mask")
sky_path = sky_path.replace("sweeps", "sweeps_sky_mask")
elif dataset_name == "waymo":
sky_path = img_path.replace("images", "sky_masks")
elif dataset_name == "argoverse2":
sky_path = img_path.replace("images_4", "sky_masks_512")
sky_path = sky_path.replace("jpg", "png")
if self.skip_sky_mask:
sky = torch.zeros(self.target_size[0], self.target_size[1]).float()
else:
try:
new_sky_path = sky_path.replace("STORM2", "STORM_masks")
sky = Image.open(new_sky_path).convert("L").resize(self.target_size[::-1])
except FileNotFoundError:
sky = Image.open(sky_path).convert("L").resize(self.target_size[::-1])
sky = to_tensor(np.array(sky) > 0).float()
sky_masks.append(sky)
# Get dynamic mask, this is for dynamic region evaluation.
if dataset_name in ["waymo"] and self.load_dynamic_mask:
dynamic_path = img_path.replace("images_8", "dynamic_masks/all")
dynamic_path = dynamic_path.replace("images_4", "dynamic_masks/all")
dynamic_path = dynamic_path.replace("jpg", "png")
if not os.path.exists(dynamic_path):
dynamic_path = dynamic_path.replace("STORM2", "STORM")
dynamic_mask = Image.open(dynamic_path).convert("L").resize(self.target_size[::-1])
dynamic_mask = to_tensor(np.array(dynamic_mask) > 0).float()
dynamic_masks.append(dynamic_mask)
# Get ground label, this is for flow evaluation, i.e., we use this to exclude the ground lidar points.
if dataset_name in ["waymo"] and self.load_ground_label:
ground_path = img_path.replace("images", "ground_label")
ground_path = ground_path.replace("jpg", "png")
ground = Image.open(ground_path).convert("L").resize(self.target_size[::-1])
ground = to_tensor(np.array(ground) > 0).float()
ground_masks.append(ground)
camtoworld = (
DATASETS[dataset_name]["canonical_to_flu"]
@ world_to_canonical
@ cam_to_world[camera][frame_idx]
@ DATASETS[dataset_name]["opencv2dataset"]
)
camtoworld = to_tensor(camtoworld)
camtoworlds.append(camtoworld)
# intrinsics
fx, fy, cx, cy = np.array(normalized_intrinsics[camera])
fx = fx * self.target_size[1]
fy = fy * self.target_size[0]
cx = cx * self.target_size[1]
cy = cy * self.target_size[0]
intrinsics.append(
torch.tensor(
[
[fx, 0.0, cx],
[0.0, fy, cy],
[0.0, 0.0, 1.0],
]
).float()
)
if self.load_depth or self.load_flow:
if dataset_name == "waymo":
depth_path = img_path.replace("images", "depth_flows").replace("jpg", "npy")
depth_and_flow = np.load(depth_path)
if self.load_depth:
depth = depth_and_flow[..., 0]
depth = torch.tensor(depth).float()
depth = resize_depth(depth, self.target_size)
depths.append(depth)
if self.load_flow:
flow = depth_and_flow[..., 1:]
flow = torch.tensor(flow).float()
flow = resize_flow(flow, self.target_size)
# there must be a better way to do this: rotate the flow to the canonical view
flow = (
flow
@ torch.tensor(
(
world_to_canonical
@ cam_to_world[camera][frame_idx]
@ np.linalg.inv(scene_json["camera_to_ego"][camera])
)
)
.float()[:3, :3]
.T
)
flows.append(flow)
if dataset_name == "nuscenes":
depth_path = img_path.replace("samples", "samples_depth")
depth_path = depth_path.replace("sweeps", "sweeps_depth")
depth_path = depth_path.replace("jpg", "npy")
depth = np.load(depth_path)
depth = torch.tensor(depth).float()
depth = resize_depth(depth, self.target_size)
depths.append(depth)
if dataset_name == "argoverse2":
depth_path = img_path.replace("images", "depths")
depth_path = depth_path.replace("jpg", "npy")
depth = np.load(depth_path)
depth = torch.tensor(depth).float()
depth = resize_depth(depth, self.target_size)
depths.append(depth)
frame_images = torch.stack(images)
frame_depths = torch.stack(depths) if len(depths) > 0 else None
frame_sky_masks = torch.stack(sky_masks) if len(sky_masks) > 0 else None
frame_flows = torch.stack(flows) if len(flows) > 0 else None
frame_dynamic_masks = torch.stack(dynamic_masks) if len(dynamic_masks) > 0 else None
frame_camtoworlds = torch.stack(camtoworlds)
frame_intrinsics = torch.stack(intrinsics)
ground_masks = torch.stack(ground_masks) if len(ground_masks) > 0 else None
data_dict = {
"image": frame_images,
"camtoworld": frame_camtoworlds,
"intrinsics": frame_intrinsics,
"frame_idx": frame_idx,
"depth": frame_depths,
"sky_masks": frame_sky_masks,
"flow": frame_flows,
"dynamic_masks": frame_dynamic_masks,
"ground_masks": ground_masks,
}
return {k: v for k, v in data_dict.items() if v is not None}
def __getitem__(
self, index: int, context_frame_idx: int = -1, return_all=False
) -> Dict[str, Any]:
try:
scene_json = self.annotations[index % len(self.annotations)]
scene_id = scene_json["scene_id"]
num_timesteps = scene_json["num_timesteps"]
fps = scene_json["fps"]
num_max_future_frames = int(self.timespan * fps)
if num_max_future_frames > num_timesteps:
num_max_future_frames = int(fps) # make it 1 seconds
time_in_seconds = scene_json["normalized_time"]
if context_frame_idx < 0:
context_frame_idx = np.random.randint(0, num_timesteps - num_max_future_frames)
if context_frame_idx + num_max_future_frames >= num_timesteps:
context_frame_idx = np.random.randint(0, num_timesteps - num_max_future_frames)
assert (
context_frame_idx + num_max_future_frames < num_timesteps
), f"scene_id: {scene_id}, context_frame_idx: {context_frame_idx}, num_timesteps: {num_timesteps}, num_max_future_frames: {num_max_future_frames}"
if self.equispaced:
context_frame_idx = np.arange(
context_frame_idx,
context_frame_idx + num_max_future_frames,
num_max_future_frames // self.num_context_timesteps,
)
else:
context_frame_idx = np.random.choice(
np.arange(
context_frame_idx,
context_frame_idx + num_max_future_frames,
),
size=self.num_context_timesteps,
replace=False,
)
context_frame_idx = sorted(context_frame_idx)
if return_all:
# return all frames between context_frame_idx and context_frame_idx + num_max_future_frames
target_frame_idx = np.arange(
context_frame_idx[0],
context_frame_idx[0] + num_max_future_frames,
)
else:
# randomly sample "num_target_timesteps" frames
target_frame_idx = np.random.choice(
np.arange(
context_frame_idx[0],
context_frame_idx[0] + num_max_future_frames,
),
self.num_target_timesteps,
replace=False,
)
target_frame_idx = [min(idx, num_timesteps - 1) for idx in target_frame_idx]
target_frame_idx = sorted(target_frame_idx)
# get context
context_dict_list = []
for ctx_id in context_frame_idx:
context_dict = self.get_frame(
scene_json=scene_json,
frame_idx=ctx_id,
source_frame_idx=context_frame_idx[0],
)
context_dict["time"] = torch.tensor(
[time_in_seconds[ctx_id] - time_in_seconds[context_frame_idx[0]]]
* self.num_max_cams
)
context_dict_list.append(context_dict)
if self.return_context_as_target:
target_frame_idx = context_frame_idx
target_dict_list = []
for target_id in target_frame_idx:
target_dict = self.get_frame(
scene_json=scene_json,
frame_idx=target_id,
source_frame_idx=context_frame_idx[0],
)
target_dict["time"] = torch.tensor(
[time_in_seconds[target_id] - time_in_seconds[context_frame_idx[0]]]
* self.num_max_cams
)
target_dict_list.append(target_dict)
context_dict = default_collate(context_dict_list)
target_dict = default_collate(target_dict_list)
for k, v in context_dict.items():
if isinstance(v, torch.Tensor) and len(v.shape) >= 2:
context_dict[k] = torch.cat([d for d in v], dim=0)
for k, v in target_dict.items():
if isinstance(v, torch.Tensor) and len(v.shape) >= 2:
target_dict[k] = torch.cat([d for d in v], dim=0)
sample = {
"context": context_dict,
"target": target_dict,
"scene_id": scene_id,
"scene_name": scene_json["scene_name"],
"width": self.target_size[1],
"height": self.target_size[0],
"fps": fps,
"timespan": self.timespan,
}
return to_float_tensor(sample)
except Exception as e:
logger.info(
f"Error in scene_id: {scene_id}, context_frame_idx: {context_frame_idx}, scene_name: {scene_json['scene_name']}"
)
logger.info(e)
try:
return self.__getitem__(index + 1, 0, return_all)
except Exception as e:
logger.info(e)
return self.__getitem__(index + 1)
class STORMDatasetEval(STORMDataset):
def __init__(
self,
data_root: str,
annotation_txt_file_list: Union[str, List[str]],
target_size: Tuple[int, int] = (225, 400),
num_context_timesteps: int = 4,
num_target_timesteps: int = 4,
num_max_cams: Literal[1, 3, 5, 6, 7] = 3,
timespan: float = 2.0, # how many seconds
subset_indices: Optional[List[int]] = None,
num_replicas: int = 1,
equispaced: bool = True,
load_depth: bool = True,
load_flow: bool = False,
load_dynamic_mask: bool = False,
load_ground_label: bool = False,
return_context_as_target: bool = False,
skip_sky_mask: bool = False,
scene_id_list: Optional[List[int]] = None,
):
super(STORMDatasetEval, self).__init__(
data_root=data_root,
annotation_txt_file_list=annotation_txt_file_list,
target_size=target_size,
num_context_timesteps=num_context_timesteps,
num_target_timesteps=num_target_timesteps,
num_max_cams=num_max_cams,
timespan=timespan,
subset_indices=subset_indices,
num_replicas=num_replicas,
equispaced=equispaced,
load_depth=load_depth,
load_flow=load_flow,
load_dynamic_mask=load_dynamic_mask,
load_ground_label=load_ground_label,
return_context_as_target=return_context_as_target,
skip_sky_mask=skip_sky_mask,
)
val_sample_list = []
if scene_id_list is None:
scene_id_list = list(range(len(self.annotations)))
for scene_id in scene_id_list:
for start_id in range(0, 200, 20):
if scene_id == 63 and start_id == 0:
continue
val_sample_list.append((scene_id, start_id))
self.val_sample_list = val_sample_list
def __len__(self) -> int:
return len(self.val_sample_list)
def __getitem__(self, index: int):
return super(STORMDatasetEval, self).__getitem__(
self.val_sample_list[index][0],
self.val_sample_list[index][1],
return_all=True,
)
class SingleSequenceDataset(Dataset):
def __init__(
self,
data_root: str,
annotation_txt_file_list: Union[str, List[str]],
target_size: Tuple[int, int] = (225, 400),
num_context_timesteps: int = 4,
num_target_timesteps: int = 4,
num_max_cams: Literal[1, 3, 5, 6, 7] = 3,
timespan: float = 2.0, # how many seconds
subset_indices: Optional[List[int]] = None,
num_replicas: int = 1,
equispaced: bool = True,
load_depth: bool = True,
load_flow: bool = False,
load_dynamic_mask: bool = False,
load_ground_label: bool = False,
load_scale="4",
):
super().__init__()
self.data_root = data_root
self.target_size = target_size
self.timespan = timespan
self.num_context_timesteps = num_context_timesteps
self.num_target_timesteps = num_target_timesteps
self.num_max_cams = num_max_cams
self.load_depth = load_depth
self.load_flow = load_flow
self.load_dynamic_mask = load_dynamic_mask
self.load_ground_label = load_ground_label
self.load_scale = load_scale
if isinstance(annotation_txt_file_list, str):
annotation_txt_file_list = [annotation_txt_file_list]
scene_list = []
for annotation_txt_file in annotation_txt_file_list:
with open(annotation_txt_file, "r") as f:
scene_list += f.readlines()
annotation_paths = [line.strip() for line in scene_list]
if subset_indices is not None:
annotation_paths = [annotation_paths[i] for i in subset_indices]
self.annotations = []
for annotation_path in annotation_paths:
with open(os.path.join(data_root, annotation_path), "r") as f:
self.annotations.append(json.load(f))
logger.info(f"Loaded {len(self.annotations)} annotations.")
self.num_replicas = num_replicas
if self.num_replicas > 1:
self.annotations *= self.num_replicas
self.equispaced = equispaced
self.img_transformation = transforms.Compose(
[
transforms.Resize(target_size, interpolation=Image.BICUBIC, antialias=True),
transforms.ToTensor(),
transforms.Normalize(mean=MEAN, std=STD),
]
)
def __len__(self) -> int:
return len(self.annotations)
def get_frame(
self,
scene_json: Dict[str, Any],
frame_idx: int,
source_frame_idx: int = -1,
) -> Dict[str, Any]:
normalized_intrinsics = scene_json["normalized_intrinsics"]
dataset_name = scene_json["dataset"]
cam_to_world = scene_json["camera_to_world"]
images, depths, sky_masks, flows = [], [], [], []
camtoworlds, intrinsics = [], []
dynamic_masks, ground_masks = [], []
if source_frame_idx < 0:
source_frame_idx = frame_idx
camera_list = DATASET_DICT[dataset_name]["camera_list"][self.num_max_cams]
ref_camera_name = DATASET_DICT[dataset_name]["ref_camera"]
world_to_canonical = np.linalg.inv(cam_to_world[ref_camera_name][source_frame_idx])
for camera in camera_list:
img_relative_path = scene_json["relative_image_path"][camera][frame_idx]
if dataset_name in ["waymo", "nuscenes", "argoverse2", "argoverse"]:
if self.load_scale == "4":
img_relative_path = img_relative_path.replace("images", f"images_4")
img_relative_path = img_relative_path.replace("sweeps", f"sweeps_4")
img_relative_path = img_relative_path.replace("samples", f"samples_4")
# Get RGB
img_path = os.path.join(self.data_root, "datasets", dataset_name, img_relative_path)
img = Image.open(img_path).convert("RGB")
img = self.img_transformation(img)
images.append(img)
# Get sky mask
if dataset_name in ["waymo", "nuscenes", "argoverse2"]:
# if dataset_name in ["waymo", "nuscenes"]:
if dataset_name == "nuscenes":
sky_path = img_path.replace("samples", "samples_sky_mask")
sky_path = sky_path.replace("sweeps", "sweeps_sky_mask")
elif dataset_name == "waymo":
sky_path = img_path.replace("images", "sky_masks")
elif dataset_name == "argoverse2":
sky_path = img_path.replace("images_4", "sky_masks_512")
sky_path = sky_path.replace("jpg", "png")
try:
new_sky_path = sky_path.replace("STORM2", "STORM_masks")
sky = Image.open(new_sky_path).convert("L").resize(self.target_size[::-1])
except FileNotFoundError:
sky = Image.open(sky_path).convert("L").resize(self.target_size[::-1])
sky = to_tensor(np.array(sky) > 0).float()
sky_masks.append(sky)
# Get dynamic mask, this is for dynamic region evaluation.
if dataset_name in ["waymo"] and self.load_dynamic_mask:
dynamic_path = dynamic_path.replace("images_4", "dynamic_masks/all")
dynamic_path = dynamic_path.replace("jpg", "png")
if not os.path.exists(dynamic_path):
dynamic_path = dynamic_path.replace("STORM2", "STORM")
dynamic_mask = Image.open(dynamic_path).convert("L").resize(self.target_size[::-1])
dynamic_mask = to_tensor(np.array(dynamic_mask) > 0).float()
dynamic_masks.append(dynamic_mask)
# Get ground label, this is for flow evaluation, i.e., we use this to exclude the ground lidar points.
if dataset_name in ["waymo"] and self.load_ground_label:
ground_path = img_path.replace("images", "ground_label")
ground_path = ground_path.replace("jpg", "png")
ground = Image.open(ground_path).convert("L").resize(self.target_size[::-1])
ground = to_tensor(np.array(ground) > 0).float()
ground_masks.append(ground)
camtoworld = (
DATASETS[dataset_name]["canonical_to_flu"]
@ world_to_canonical
@ cam_to_world[camera][frame_idx]
@ DATASETS[dataset_name]["opencv2dataset"]
)
camtoworld = to_tensor(camtoworld)
camtoworlds.append(camtoworld)
# intrinsics
fx, fy, cx, cy = np.array(normalized_intrinsics[camera])
fx = fx * self.target_size[1]
fy = fy * self.target_size[0]
cx = cx * self.target_size[1]
cy = cy * self.target_size[0]
intrinsics.append(
torch.tensor(
[
[fx, 0.0, cx],
[0.0, fy, cy],
[0.0, 0.0, 1.0],
]
).float()
)
if self.load_depth or self.load_flow:
if dataset_name == "waymo":
depth_path = img_path.replace("images", "depth_flows").replace("jpg", "npy")
depth_and_flow = np.load(depth_path)
if self.load_depth:
depth = depth_and_flow[..., 0]
depth = torch.tensor(depth).float()
depth = resize_depth(depth, self.target_size)
depths.append(depth)
if self.load_flow:
flow = depth_and_flow[..., 1:]
flow = torch.tensor(flow).float()
flow = resize_flow(flow, self.target_size)
# there must be a better way to do this: rotate the flow to the canonical view
flow = (
flow
@ torch.tensor(
(
world_to_canonical
@ cam_to_world[camera][frame_idx]
@ np.linalg.inv(scene_json["camera_to_ego"][camera])
)
)
.float()[:3, :3]
.T
)
flows.append(flow)
if dataset_name == "nuscenes":
depth_path = img_path.replace("samples", "samples_depth")
depth_path = depth_path.replace("sweeps", "sweeps_depth")
depth_path = depth_path.replace("jpg", "npy")
depth = np.load(depth_path)
depth = torch.tensor(depth).float()
depth = resize_depth(depth, self.target_size)
depths.append(depth)
if dataset_name == "argoverse2":
depth_path = img_path.replace("images", "depths")
depth_path = depth_path.replace("jpg", "npy")
depth = np.load(depth_path)
depth = torch.tensor(depth).float()
depth = resize_depth(depth, self.target_size)
depths.append(depth)
frame_images = torch.stack(images)
frame_depths = torch.stack(depths) if len(depths) > 0 else None
frame_sky_masks = torch.stack(sky_masks) if len(sky_masks) > 0 else None
frame_flows = torch.stack(flows) if len(flows) > 0 else None
frame_dynamic_masks = torch.stack(dynamic_masks) if len(dynamic_masks) > 0 else None
frame_camtoworlds = torch.stack(camtoworlds)
frame_intrinsics = torch.stack(intrinsics)
ground_masks = torch.stack(ground_masks) if len(ground_masks) > 0 else None
data_dict = {
"image": frame_images,
"camtoworld": frame_camtoworlds,
"intrinsics": frame_intrinsics,
"frame_idx": frame_idx,
"depth": frame_depths,
"sky_masks": frame_sky_masks,
"flow": frame_flows,
"dynamic_masks": frame_dynamic_masks,
"ground_masks": ground_masks,
}
return {k: v for k, v in data_dict.items() if v is not None}
def get_segment(self, index: int, context_frame_idx: int = -1, return_all=False):
scene_json = self.annotations[index % len(self.annotations)]
scene_id = scene_json["scene_id"]
num_timesteps = scene_json["num_timesteps"]
fps = scene_json["fps"]
num_max_future_frames = int(self.timespan * fps)
if num_max_future_frames > num_timesteps:
num_max_future_frames = int(fps) # make it 1 seconds
time_in_seconds = scene_json["normalized_time"]
if context_frame_idx < 0:
context_frame_idx = np.random.randint(0, num_timesteps - num_max_future_frames)
if context_frame_idx + num_max_future_frames >= num_timesteps:
context_frame_idx = np.random.randint(0, num_timesteps - num_max_future_frames)
assert (
context_frame_idx + num_max_future_frames < num_timesteps
), f"scene_id: {scene_id}, context_frame_idx: {context_frame_idx}, num_timesteps: {num_timesteps}, num_max_future_frames: {num_max_future_frames}"
if self.equispaced:
context_frame_idx = np.arange(
context_frame_idx,
context_frame_idx + num_max_future_frames,
num_max_future_frames // self.num_context_timesteps,
)
else:
context_frame_idx = np.random.choice(
np.arange(
context_frame_idx,
context_frame_idx + num_max_future_frames,
),
size=self.num_context_timesteps,
replace=False,
)
context_frame_idx = sorted(context_frame_idx)
if return_all:
# return all frames between context_frame_idx and context_frame_idx + num_max_future_frames
target_frame_idx = np.arange(
context_frame_idx[0],
context_frame_idx[0] + num_max_future_frames,
)
else:
# randomly sample "num_target_timesteps" frames
target_frame_idx = np.random.choice(
np.arange(
context_frame_idx[0],
context_frame_idx[0] + num_max_future_frames,
),
self.num_target_timesteps,
replace=False,
)
target_frame_idx = [min(idx, num_timesteps - 1) for idx in target_frame_idx]
target_frame_idx = sorted(target_frame_idx)
# get context
context_dict_list = []
for ctx_id in context_frame_idx:
context_dict = self.get_frame(
scene_json=scene_json,
frame_idx=ctx_id,
source_frame_idx=context_frame_idx[0],
)
context_dict["time"] = torch.tensor(
[time_in_seconds[ctx_id] - time_in_seconds[context_frame_idx[0]]]
* self.num_max_cams
)
context_dict_list.append(context_dict)
target_dict_list = []
for target_id in target_frame_idx:
target_dict = self.get_frame(
scene_json=scene_json,
frame_idx=target_id,
source_frame_idx=context_frame_idx[0],
)
target_dict["time"] = torch.tensor(
[time_in_seconds[target_id] - time_in_seconds[context_frame_idx[0]]]
* self.num_max_cams
)
target_dict_list.append(target_dict)
context_dict = default_collate(context_dict_list)
target_dict = default_collate(target_dict_list)
for k, v in context_dict.items():
if isinstance(v, torch.Tensor) and len(v.shape) >= 2:
context_dict[k] = torch.cat([d for d in v], dim=0)
for k, v in target_dict.items():
if isinstance(v, torch.Tensor) and len(v.shape) >= 2:
target_dict[k] = torch.cat([d for d in v], dim=0)
sample = {
"context": context_dict,
"target": target_dict,
"scene_id": scene_id,
"scene_name": scene_json["scene_name"],
"width": self.target_size[1],
"height": self.target_size[0],
"fps": fps,
"timespan": self.timespan,
}
return to_float_tensor(sample)
def __getitem__(self, index: int, start_index: int = 0, end_index: int = -1) -> Dict[str, Any]:
scene_json = self.annotations[index % len(self.annotations)]
num_timesteps = scene_json["num_timesteps"]
if end_index < 0:
end_index = num_timesteps
segment_data = []
cam_to_world = scene_json["camera_to_world"]
ref_camera_name = DATASET_DICT[scene_json["dataset"]]["ref_camera"]
world_to_canonical = np.linalg.inv(cam_to_world[ref_camera_name][start_index])
for start_id in trange(start_index, end_index, 20):
segment_dict = self.get_segment(
index=index, context_frame_idx=start_id, return_all=True
)
# compute the relative transformation from a start_id to the first frame
current_world = cam_to_world[ref_camera_name][start_id]
segment_to_ref = world_to_canonical @ current_world
segment_dict["segment_to_ref"] = to_float_tensor(segment_to_ref)
segment_data.append(segment_dict)
return segment_data
================================================
FILE: storm/models/__init__.py
================================================
from .storm import STORM_models
from .vit import ViT_models
================================================
FILE: storm/models/decoder.py
================================================
import logging
from typing import Dict
import torch
import torch.nn as nn
from einops import rearrange, repeat
from torch import Tensor
from torch.utils.checkpoint import checkpoint
from .layers import GroupNorm, NonLocalBlock, ResidualBlock, Swish, UpSampleBlock, modulate
logger = logging.getLogger("STORM")
def check_results(result_dict) -> bool:
assert "rgb_key" in result_dict, "rgb_key not found in result_dict"
assert "depth_key" in result_dict, "depth_key not found in result_dict"
assert "alpha_key" in result_dict, "alpha_key not found in result_dict"
assert "flow_key" in result_dict, "flow_key not found in result_dict"
assert "decoder_depth_key" in result_dict, "decoder_depth_key not found in result_dict"
assert "decoder_alpha_key" in result_dict, "decoder_alpha_key not found in result_dict"
assert "decoder_flow_key" in result_dict, "decoder_flow_key not found in result_dict"
return True
class Decoder(nn.Module):
def __init__(
self,
latent_dim,
channels=[512, 256, 256, 128, 128],
num_res_blocks=3,
):
super(Decoder, self).__init__()
in_channels = channels[0]
layers = [
nn.Conv2d(latent_dim, in_channels, 3, 1, 1),
ResidualBlock(in_channels, in_channels),
NonLocalBlock(in_channels),
ResidualBlock(in_channels, in_channels),
]
for i in range(len(channels)):
out_channels = channels[i]
for j in range(num_res_blocks):
layers.append(ResidualBlock(in_channels, out_channels))
in_channels = out_channels
if i != 0:
layers.append(UpSampleBlock(in_channels))
layers.append(GroupNorm(in_channels))
layers.append(Swish())
# rgb-d
layers.append(nn.Conv2d(in_channels, 4, 3, 1, 1))
self.model = nn.Sequential(*layers)
def forward(self, render_results):
x = render_results["rendered_image"]
b, t, v, h, w, c = x.shape
x = rearrange(x, "b t v h w c -> (b t v) c h w")
x = self.model(x)
x = rearrange(x, "(b t v) c h w -> b t v h w c", b=b, t=t, v=v)
decoder_dict = {}
decoder_dict["decoded_image"] = x[..., :3]
decoder_dict["decoded_depth"] = x[..., 3]
decoder_dict["rgb_key"] = "decoded_image"
decoder_dict["decoder_depth_key"] = "decoded_depth"
render_results.update(decoder_dict)
if not check_results(render_results):
raise ValueError("Invalid result dict")
return render_results
class ConvDecoder(nn.Module):
def __init__(
self,
latent_dim,
out_channels=3,
channels=[512, 256, 256, 128, 128],
num_res_blocks=3,
grad_checkpointing=False,
):
super(ConvDecoder, self).__init__()
in_channels = channels[0]
self.input_projection_layer = nn.Conv2d(latent_dim, in_channels, 3, 1, 1)
layers = [
ResidualBlock(in_channels, in_channels),
NonLocalBlock(in_channels),
ResidualBlock(in_channels, in_channels),
]
for i in range(len(channels)):
out_chans = channels[i]
for j in range(num_res_blocks):
layers.append(ResidualBlock(in_channels, out_chans))
in_channels = out_chans
if i != 0:
layers.append(UpSampleBlock(in_channels))
layers.append(GroupNorm(in_channels))
layers.append(Swish())
# rgb-d
layers.append(nn.Conv2d(in_channels, 4, 3, 1, 1))
self.layers = nn.ModuleList(layers)
self.out_channels = out_channels
self.grad_checkpointing = grad_checkpointing
logger.info(f"ConvDecoder: grad_checkpointing: {grad_checkpointing}")
self.mask_token = nn.Parameter(torch.randn(channels[0]) * 0.02)
def forward(self, render_results) -> Dict[str, Tensor]:
x, opacity = render_results["rendered_image"], render_results["rendered_alpha"]
b, t, v, h, w, c = x.shape
x = rearrange(x, "b t v h w c -> (b t v) c h w")
opacity = rearrange(opacity, "b t v h w -> (b t v) h w")
x = self.input_projection_layer(x)
mask_token = repeat(
self.mask_token,
"d -> b d h w",
b=x.shape[0],
h=x.shape[-2],
w=x.shape[-1],
)
x = x * opacity.unsqueeze(1) + mask_token * (1 - opacity.unsqueeze(1))
# chunk to avoid OOM
for layer in self.layers:
if self.grad_checkpointing and not torch.jit.is_scripting():
x = checkpoint(layer, x)
else:
x = layer(x)
x = rearrange(x, "(b t v) c h w -> b t v h w c", b=b, t=t, v=v)
decoder_dict = {}
if self.out_channels == 3:
decoder_dict["decoded_image"] = x[..., :3]
decoder_dict["rgb_key"] = "decoded_image"
elif self.out_channels == 4:
decoder_dict["decoded_image"] = x[..., :3]
decoder_dict["decoded_depth"] = x[..., 3]
decoder_dict["rgb_key"] = "decoded_image"
decoder_dict["decoder_depth_key"] = "decoded_depth"
else:
# TODO
raise ValueError("Invalid out_channels")
render_results.update(decoder_dict)
if not check_results(render_results):
raise ValueError("Invalid result dict")
return render_results
class ModulatedLinearLayer(nn.Module):
def __init__(self, in_channels, hidden_channels=64, condition_channels=768, out_channels=3):
super().__init__()
self.linear = nn.Linear(in_channels, hidden_channels)
self.norm = nn.LayerNorm(hidden_channels, elementwise_affine=False, eps=1e-6)
self.adaLN_modulation = nn.Sequential(
nn.SiLU(), nn.Linear(hidden_channels, 2 * hidden_channels, bias=True)
)
self.condition_mapping = nn.Linear(condition_channels, hidden_channels)
self.output = nn.Linear(hidden_channels, out_channels)
def forward(self, x, c):
x = self.linear(x)
c = self.condition_mapping(c.squeeze(1))
shift, scale = self.adaLN_modulation(c).chunk(2, dim=-1)
x_shape = x.shape
x = modulate(self.norm(x.reshape(x_shape[0], -1, x.shape[-1])), shift, scale)
x = self.output(x)
x = x.reshape(*x_shape[:-1], -1)
return x
class ModulatedMLP(nn.Module):
def __init__(
self,
in_channels,
hidden_channels=64,
condition_channels=768,
out_channels=3,
num_layers=1,
):
super().__init__()
self.linear = nn.Linear(in_channels, hidden_channels)
self.norm = nn.LayerNorm(hidden_channels, elementwise_affine=False, eps=1e-6)
self.adaLN_modulation = nn.Sequential(
nn.SiLU(), nn.Linear(hidden_channels, 2 * hidden_channels, bias=True)
)
self.condition_mapping = nn.Linear(condition_channels, hidden_channels)
output_layers = []
for i in range(num_layers):
if i < num_layers - 1:
output_layers.append(nn.Linear(hidden_channels, hidden_channels))
output_layers.append(nn.SiLU())
else:
output_layers.append(nn.Linear(hidden_channels, out_channels))
self.output = nn.Sequential(*output_layers)
def forward(self, x, c):
x = self.linear(x)
c = self.condition_mapping(c.squeeze(1))
shift, scale = self.adaLN_modulation(c).chunk(2, dim=-1)
x_shape = x.shape
x = modulate(self.norm(x.reshape(x_shape[0], -1, x.shape[-1])), shift, scale)
x = self.output(x)
x = x.reshape(*x_shape[:-1], -1)
return x
class DummyDecoder(nn.Module):
def __init__(self, **kwargs):
super(DummyDecoder, self).__init__()
def forward(self, render_results):
if not check_results(render_results):
raise ValueError("Invalid result dict")
return render_results
================================================
FILE: storm/models/embedders.py
================================================
# --------------------------------------------------------
# References:
# timm: https://github.com/rwightman/pytorch-image-models/tree/master/timm
# --------------------------------------------------------
import math
from typing import Callable, Literal, Optional, Tuple, Union
import torch
import torch.nn as nn
import torch.nn.functional as F
from einops import rearrange
from torch import Tensor
from storm.utils.misc import to_2tuple
class PatchEmbed(nn.Module):
"""2D Image to Patch Embedding"""
dynamic_img_pad: torch.jit.Final[bool]
def __init__(
self,
img_size: Optional[int] = 224,
patch_size: int = 16,
in_chans: int = 3,
embed_dim: int = 768,
norm_layer: Optional[Callable] = None,
output_fmt: Literal["NCHW", "NHWC", "NLC", "NCL"] = "NCHW",
bias: bool = True,
dynamic_img_pad: bool = False,
):
super().__init__()
self.patch_size = to_2tuple(patch_size)
if img_size is not None:
self.img_size = to_2tuple(img_size)
self.grid_size = tuple([s // p for s, p in zip(self.img_size, self.patch_size)])
self.num_patches = self.grid_size[0] * self.grid_size[1]
else:
self.img_size = None
self.grid_size = None
self.num_patches = None
self.output_fmt = output_fmt
self.dynamic_img_pad = dynamic_img_pad
self.proj = nn.Conv2d(in_chans, embed_dim, patch_size, stride=patch_size, bias=bias)
self.norm = norm_layer(embed_dim) if norm_layer else None
def feat_ratio(self, as_scalar=True) -> Union[Tuple[int, int], int]:
if as_scalar:
return max(self.patch_size)
else:
return self.patch_size
def dynamic_feat_size(self, img_size: Tuple[int, int]) -> Tuple[int, int]:
"""Get grid (feature) size for given image size taking account of dynamic padding.
NOTE: must be torchscript compatible so using fixed tuple indexing
"""
if self.dynamic_img_pad:
return math.ceil(img_size[0] / self.patch_size[0]), math.ceil(
img_size[1] / self.patch_size[1]
)
else:
return img_size[0] // self.patch_size[0], img_size[1] // self.patch_size[1]
def forward(self, x: Tensor) -> Tensor:
B, C, H, W = x.shape
if self.dynamic_img_pad:
pad_h = (self.patch_size[0] - H % self.patch_size[0]) % self.patch_size[0]
pad_w = (self.patch_size[1] - W % self.patch_size[1]) % self.patch_size[1]
x = F.pad(x, (0, pad_w, 0, pad_h))
# defaut output format is NCHW
x = self.proj(x)
if self.output_fmt == "NHWC":
x = rearrange(x, "B C H W -> B H W C")
elif self.output_fmt == "NLC":
x = rearrange(x, "B C H W -> B (H W) C")
elif self.output_fmt == "NCL":
x = rearrange(x, "B C H W -> B C (H W)")
if self.norm:
x = self.norm(x)
return x
class PluckerEmbedder(nn.Module):
"""
Convert rays to plucker embedding
"""
def __init__(
self,
img_size: Optional[int] = 224,
patch_size: int = 1,
):
super().__init__()
self.patch_size = to_2tuple(patch_size)
self.img_size = to_2tuple(img_size)
self.grid_size = tuple([s // p for s, p in zip(self.img_size, self.patch_size)])
x, y = torch.meshgrid(
torch.arange(self.grid_size[1]),
torch.arange(self.grid_size[0]),
indexing="xy",
)
x = x.float().reshape(1, -1) + 0.5
y = y.float().reshape(1, -1) + 0.5
self.register_buffer("x", x)
self.register_buffer("y", y)
def forward(
self,
intrinsics: Tensor,
camtoworlds: Tensor,
image_size: Optional[Union[int, Tuple[int, int]]] = None,
patch_size: Optional[Union[int, Tuple[int, int]]] = None,
) -> Tensor:
assert intrinsics.shape[-2:] == (3, 3), "intrinsics should be (B, 3, 3)"
assert camtoworlds.shape[-2:] == (4, 4), "camtoworlds should be (B, 4, 4)"
intrinsics_shape = intrinsics.shape
intrinsics = intrinsics.reshape(-1, 3, 3)
camtoworlds = camtoworlds.reshape(-1, 4, 4)
if image_size is not None:
image_size = to_2tuple(image_size)
else:
image_size = self.img_size
if patch_size is not None:
patch_size = to_2tuple(patch_size)
else:
patch_size = self.patch_size
grid_size = tuple([s // p for s, p in zip(image_size, patch_size)])
if grid_size != self.grid_size:
grid_size = tuple([s // p for s, p in zip(image_size, patch_size)])
x, y = torch.meshgrid(
torch.arange(grid_size[1]),
torch.arange(grid_size[0]),
indexing="xy",
)
x = x.float().reshape(1, -1) + 0.5
y = y.float().reshape(1, -1) + 0.5
x = x.to(intrinsics.device)
y = y.to(intrinsics.device)
intrinsics = intrinsics.clone()
# intrinsics should be scaled to the grid size
intrinsics[..., 0, 0] = intrinsics[..., 0, 0] / patch_size[1]
intrinsics[..., 0, 2] = intrinsics[..., 0, 2] / patch_size[1]
intrinsics[..., 1, 1] = intrinsics[..., 1, 1] / patch_size[0]
intrinsics[..., 1, 2] = intrinsics[..., 1, 2] / patch_size[0]
else:
x, y = self.x, self.y
x = x.repeat(intrinsics.size(0), 1)
y = y.repeat(intrinsics.size(0), 1)
camera_dirs = torch.nn.functional.pad(
torch.stack(
[
(x - intrinsics[:, 0, 2][..., None] + 0.5) / intrinsics[:, 0, 0][..., None],
(y - intrinsics[:, 1, 2][..., None] + 0.5) / intrinsics[:, 1, 1][..., None],
],
dim=-1,
),
(0, 1),
value=1.0,
)
directions = torch.sum(camera_dirs[:, :, None, :] * camtoworlds[:, None, :3, :3], dim=-1)
origins = torch.broadcast_to(camtoworlds[:, :3, -1].unsqueeze(1), directions.shape)
direction_norm = torch.linalg.norm(directions, dim=-1, keepdims=True)
viewdirs = directions / (direction_norm + 1e-8)
cross_prod = torch.cross(origins, viewdirs, dim=-1)
plucker = torch.cat((cross_prod, viewdirs), dim=-1)
origins = rearrange(origins, "b (h w) c -> b h w c", h=grid_size[0])
viewdirs = rearrange(viewdirs, "b (h w) c -> b h w c", h=grid_size[0])
directions = rearrange(directions, "b (h w) c -> b h w c", h=grid_size[0])
plucker = rearrange(plucker, "b (h w) c -> b h w c", h=grid_size[0])
return {
"origins": origins.view(*intrinsics_shape[:-2], *grid_size, 3),
"viewdirs": viewdirs.view(*intrinsics_shape[:-2], *grid_size, 3),
"dirs": directions.view(*intrinsics_shape[:-2], *grid_size, 3),
"plucker": plucker.view(*intrinsics_shape[:-2], *grid_size, 6),
}
class TimestepEmbedder(nn.Module):
"""
From DiT
Embeds scalar timesteps into vector representations.
"""
def __init__(self, hidden_size, frequency_embedding_size=256):
super().__init__()
self.mlp = nn.Sequential(
nn.Linear(frequency_embedding_size, hidden_size, bias=True),
nn.SiLU(inplace=True),
nn.Linear(hidden_size, hidden_size, bias=True),
)
self.frequency_embedding_size = frequency_embedding_size
@staticmethod
def timestep_embedding(t, dim, max_period=10000):
"""
Create sinusoidal timestep embeddings.
:param t: a 1-D Tensor of N indices, one per batch element.
These may be fractional.
:param dim: the dimension of the output.
:param max_period: controls the minimum frequency of the embeddings.
:return: an (N, D) Tensor of positional embeddings.
"""
# https://github.com/openai/glide-text2im/blob/main/glide_text2im/nn.py
half = dim // 2
freqs = torch.exp(
-math.log(max_period) * torch.arange(start=0, end=half, dtype=torch.float32) / half
).to(device=t.device)
args = t[:, None].float() * freqs[None]
embedding = torch.cat([torch.cos(args), torch.sin(args)], dim=-1)
if dim % 2:
embedding = torch.cat([embedding, torch.zeros_like(embedding[:, :1])], dim=-1)
return embedding
def forward(self, t):
t_freq = self.timestep_embedding(t, self.frequency_embedding_size)
t_emb = self.mlp(t_freq)
return t_emb
class NeRFPosEmbedder(nn.Module):
def __init__(
self,
in_dim: int,
num_frequencies: int,
min_freq_exp: int = 0,
max_freq_exp: int = 8,
include_input: bool = False,
) -> None:
super().__init__()
self.in_dim = in_dim
self.num_frequencies = num_frequencies
self.min_freq = min_freq_exp
self.max_freq = max_freq_exp
self.include_input = include_input
freqs = 2 ** torch.linspace(self.min_freq, self.max_freq, self.num_frequencies)
self.register_buffer("freqs", freqs)
def get_out_dim(self) -> int:
out_dim = self.in_dim * self.num_frequencies * 2
if self.include_input:
out_dim += self.in_dim
return out_dim
def forward(self, in_tensor: Tensor) -> Tensor:
scaled_in_tensor = 2 * torch.pi * in_tensor # scale to [0, 2pi]
scaled_inputs = scaled_in_tensor[..., None] * self.freqs
scaled_inputs = scaled_inputs.view(*scaled_inputs.shape[:-2], -1)
encoded_inputs = torch.sin(
torch.cat([scaled_inputs, scaled_inputs + torch.pi / 2.0], dim=-1)
)
if self.include_input:
encoded_inputs = torch.cat([encoded_inputs, in_tensor], dim=-1)
return encoded_inputs
================================================
FILE: storm/models/layers.py
================================================
import logging
import math
from functools import partial
from typing import List, Optional
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from einops import rearrange
from torch import Tensor
from torch.utils.checkpoint import checkpoint
logger = logging.getLogger("STORM")
def modulate(x, shift=None, scale=None):
if shift is None and scale is None:
return x
return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1)
class Mlp(nn.Module):
def __init__(self, in_dim: int, hidden_dim=None, out_dim=None, act_layer=nn.GELU):
super().__init__()
out_dim = out_dim or in_dim
hidden_dim = hidden_dim or in_dim
self.fc1 = nn.Linear(in_dim, hidden_dim)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_dim, out_dim)
def forward(self, x):
return self.fc2(self.act(self.fc1(x)))
class Attention(nn.Module):
_logged = False
def __init__(
self,
dim: int,
num_heads: int = 8,
qk_norm: bool = False,
norm_layer: nn.Module = nn.LayerNorm,
is_cross_attn: bool = False,
) -> None:
super().__init__()
assert dim % num_heads == 0, f"dim % num_heads !=0, got {dim} and {num_heads}"
self.num_heads = num_heads
self.head_dim = dim // num_heads
self.scale = self.head_dim**-0.5
self.is_cross_attn = is_cross_attn
self.fused_attn = hasattr(torch.nn.functional, "scaled_dot_product_attention")
if self.fused_attn and not Attention._logged:
Attention._logged = True
logger.info(f"[Attention]: Using {torch.__version__} Fused Attention")
if is_cross_attn:
self.c_q = nn.Linear(dim, dim) # context to q
self.c_kv = nn.Linear(dim, dim * 2) # context to kv
else:
self.qkv = nn.Linear(dim, dim * 3)
self.q_norm = norm_layer(self.head_dim) if qk_norm else nn.Identity()
self.k_norm = norm_layer(self.head_dim) if qk_norm else nn.Identity()
self.proj = nn.Linear(dim, dim)
def forward(self, x: Tensor, data: Tensor = None) -> Tensor:
bs, n_ctx, C = x.shape
if self.is_cross_attn:
assert data is not None, "data should not be None for cross attn"
q = self.c_q(x)
kv = self.c_kv(data)
_, n_data, _ = kv.shape
q = q.view(bs, n_ctx, self.num_heads, self.head_dim).permute(0, 2, 1, 3)
kv = kv.view(bs, n_data, 2, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)
k, v = kv.unbind(dim=0)
else:
qkv = (
self.qkv(x)
.reshape(bs, n_ctx, 3, self.num_heads, self.head_dim)
.permute(2, 0, 3, 1, 4)
)
q, k, v = qkv.unbind(dim=0)
q, k = self.q_norm(q), self.k_norm(k)
if self.fused_attn:
x = F.scaled_dot_product_attention(q, k, v)
else:
q = q * self.scale
attn = q @ k.transpose(-2, -1)
attn = attn.softmax(dim=-1)
x = attn @ v
x = x.transpose(1, 2).reshape(bs, n_ctx, C)
x = self.proj(x)
return x
class Block(nn.Module):
_logged = False
def __init__(
self,
dim: int,
num_heads: int,
mlp_ratio: float = 4.0,
qk_norm: bool = False,
act_layer: nn.Module = nn.GELU,
norm_layer: nn.Module = partial(nn.LayerNorm, eps=1e-6),
use_cross_attn: bool = False,
) -> None:
super().__init__()
self.norm1 = norm_layer(dim)
self.use_cross_attn = use_cross_attn
self.attn = Attention(
dim,
num_heads=num_heads,
qk_norm=qk_norm,
norm_layer=norm_layer,
is_cross_attn=use_cross_attn,
)
self.data_norm = norm_layer(dim) if self.use_cross_attn else None
self.norm2 = norm_layer(dim)
self.mlp = Mlp(in_dim=dim, hidden_dim=int(dim * mlp_ratio), act_layer=act_layer)
def forward(self, x: Tensor, data: Tensor = None) -> Tensor:
if self.use_cross_attn:
x = x + self.attn(self.norm1(x), self.data_norm(data))
else:
x = x + self.attn(self.norm1(x))
return x + self.mlp(self.norm2(x))
class Transformer(nn.Module):
def __init__(
self,
embed_dim: int = 768,
depth: int = 12,
num_heads: int = 12,
mlp_ratio: float = 4.0,
qk_norm: bool = False,
norm_layer: nn.Module = partial(nn.LayerNorm, eps=1e-6),
grad_checkpointing: bool = False,
):
super().__init__()
self.embed_dim = embed_dim
self.depth = depth
self.blocks = nn.ModuleList(
[
Block(
dim=embed_dim,
num_heads=num_heads,
mlp_ratio=mlp_ratio,
qk_norm=qk_norm,
norm_layer=norm_layer,
)
for _ in range(depth)
]
)
self.grad_checkpointing = grad_checkpointing
logger.info(f"[Transformer]: grad_checkpointing={grad_checkpointing}")
def forward(self, x: torch.Tensor) -> torch.Tensor:
for block in self.blocks:
if self.grad_checkpointing and self.training:
x = checkpoint(block, x)
else:
x = block(x)
return x
######## Conv Layers ########
class GroupNorm(nn.Module):
def __init__(self, channels):
super(GroupNorm, self).__init__()
self.gn = nn.GroupNorm(num_groups=32, num_channels=channels, eps=1e-6, affine=True)
def forward(self, x):
return self.gn(x)
class Swish(nn.Module):
def forward(self, x):
return x * torch.sigmoid(x)
class ResidualBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super(ResidualBlock, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.block = nn.Sequential(
GroupNorm(in_channels),
Swish(),
nn.Conv2d(in_channels, out_channels, 3, 1, 1),
GroupNorm(out_channels),
Swish(),
nn.Conv2d(out_channels, out_channels, 3, 1, 1),
)
if in_channels != out_channels:
self.channel_up = nn.Conv2d(in_channels, out_channels, 1, 1, 0)
def forward(self, x):
if self.in_channels != self.out_channels:
return self.channel_up(x) + self.block(x)
else:
return x + self.block(x)
class UpSampleBlock(nn.Module):
def __init__(self, channels):
super(UpSampleBlock, self).__init__()
self.conv = nn.Conv2d(channels, channels, 3, 1, 1)
def forward(self, x):
x = F.interpolate(x, scale_factor=2.0)
return self.conv(x)
class NonLocalBlock(nn.Module):
def __init__(self, channels):
super(NonLocalBlock, self).__init__()
self.in_channels = channels
assert channels % 8 == 0, "channels must be divisible by 8"
self.gn = GroupNorm(channels)
self.attention = Attention(dim=channels, num_heads=8)
def forward(self, x):
h = self.gn(x)
h = rearrange(h, "b c h w -> b (h w) c")
h = self.attention(h)
h = rearrange(h, "b (h w) c -> b c h w", h=x.shape[-2], w=x.shape[-1])
return h + x
class LayerNorm2d(nn.Module):
def __init__(self, num_channels: int, eps: float = 1e-6) -> None:
super().__init__()
self.weight = nn.Parameter(torch.ones(num_channels))
self.bias = nn.Parameter(torch.zeros(num_channels))
self.eps = eps
def forward(self, x: torch.Tensor) -> torch.Tensor:
u = x.mean(1, keepdim=True)
s = (x - u).pow(2).mean(1, keepdim=True)
x = (x - u) / torch.sqrt(s + self.eps)
x = self.weight[:, None, None] * x + self.bias[:, None, None]
return x
def pos_enc(x, min_deg=0, max_deg=10, append_identity=True):
"""The positional encoding used by the original NeRF paper."""
scales = 2 ** torch.arange(min_deg, max_deg).float()
scales = scales.to(x.device)
shape = x.shape[:-1] + (-1,)
scaled_x = torch.reshape((x[..., None, :] * scales[:, None]), shape)
# Note that we're not using safe_sin, unlike IPE.
four_feat = torch.sin(torch.concat([scaled_x, scaled_x + 0.5 * np.pi], dim=-1))
if append_identity:
return torch.concat([x] + [four_feat], dim=-1)
else:
return four_feat
def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False, num_extra_tokens=1):
"""
grid_size: int of the grid height and width
return:
pos_embed: [grid_size*grid_size, embed_dim] or [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token)
"""
if isinstance(grid_size, int):
grid_size = [grid_size, grid_size]
grid_h = np.arange(grid_size[0], dtype=np.float32)
grid_w = np.arange(grid_size[1], dtype=np.float32)
grid = np.meshgrid(grid_w, grid_h) # here w goes first
grid = np.stack(grid, axis=0)
grid = grid.reshape([2, 1, grid_size[0], grid_size[1]])
pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid)
if cls_token and num_extra_tokens > 0:
pos_embed = np.concatenate([np.zeros([num_extra_tokens, embed_dim]), pos_embed], axis=0)
return pos_embed
def get_2d_sincos_pos_embed_from_grid(embed_dim, grid):
assert embed_dim % 2 == 0
# use half of dimensions to encode grid_h
emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2)
emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2)
emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D)
return emb
def get_1d_sincos_pos_embed_from_grid(embed_dim, pos):
"""
embed_dim: output dimension for each position
pos: a list of positions to be encoded: size (M,)
out: (M, D)
"""
assert embed_dim % 2 == 0
omega = np.arange(embed_dim // 2, dtype=np.float64)
omega /= embed_dim / 2.0
omega = 1.0 / 10000**omega # (D/2,)
pos = pos.reshape(-1) # (M,)
out = np.einsum("m,d->md", pos, omega) # (M, D/2), outer product
emb_sin = np.sin(out) # (M, D/2)
emb_cos = np.cos(out) # (M, D/2)
emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D)
return emb
def resample_abs_pos_embed(
posemb,
new_size: List[int],
old_size: Optional[List[int]] = None,
n_prefix_tokens: int = 1,
interpolation: str = "bicubic", # bicubic is better.
antialias: bool = True, # antialias is important.
verbose: bool = False,
):
# sort out sizes, assume square if old size not provided
num_pos_tokens = posemb.shape[1]
num_new_tokens = new_size[0] * new_size[1] + n_prefix_tokens
if num_new_tokens == num_pos_tokens and new_size[0] == new_size[1]:
return posemb
if old_size is None:
hw = int(math.sqrt(num_pos_tokens - n_prefix_tokens))
old_size = [hw, hw]
if n_prefix_tokens:
posemb_prefix, posemb = (
posemb[:, :n_prefix_tokens],
posemb[:, n_prefix_tokens:],
)
else:
posemb_prefix, posemb = None, posemb
# do the interpolation
embed_dim = posemb.shape[-1]
orig_dtype = posemb.dtype
posemb = posemb.float() # interpolate needs float32
posemb = posemb.reshape(1, old_size[0], old_size[1], -1).permute(0, 3, 1, 2)
posemb = torch.nn.functional.interpolate(
posemb, size=new_size, mode=interpolation, antialias=antialias
)
posemb = posemb.permute(0, 2, 3, 1).reshape(1, -1, embed_dim)
posemb = posemb.to(orig_dtype)
# add back extra (class, etc) prefix tokens
if posemb_prefix is not None:
posemb = torch.cat([posemb_prefix, posemb], dim=1)
if not torch.jit.is_scripting() and verbose:
logger.info(f"Resized position embedding: {old_size} to {new_size}.")
return posemb
================================================
FILE: storm/models/storm.py
================================================
import torch
import torch.nn as nn
from einops import rearrange, repeat
from gsplat.rendering import rasterization
from torch import Tensor
from torch.utils.checkpoint import checkpoint
from .decoder import ConvDecoder, DummyDecoder, ModulatedLinearLayer
from .embedders import PluckerEmbedder, TimestepEmbedder
from .layers import LayerNorm2d, Mlp
from .vit import VisionTransformer as ViT
class STORM(ViT):
def __init__(
self,
img_size=224,
in_chans=9,
gs_dim=3,
decoder_type="dummy",
near=0.2,
far=400,
scale_offset=-2.3,
opacity_offset=-2.0,
num_cams=3, # to ablate
max_scale=0.5,
disable_pos_embed=False,
use_sky_token=True,
use_affine_token=True,
num_motion_tokens=32,
tau=0.5,
projected_motion_dim=32,
# ViT parameters
patch_size=16,
embed_dim=768,
depth=12,
num_heads=12,
grad_checkpointing=True,
use_latest_gsplat=False,
sigmoid_rgb=False, # a legacy oversight: the sigmoid was accidentally omitted in the earlier implementation
**kwargs,
):
super(STORM, self).__init__(
img_size=img_size,
patch_size=patch_size,
in_chans=in_chans,
embed_dim=embed_dim,
depth=depth,
num_heads=num_heads,
grad_checkpointing=grad_checkpointing,
)
# basic attributes
self.disable_pos_embed = disable_pos_embed
self.gs_dim = gs_dim
self.out_channels = gs_dim + 9
self.num_cams = num_cams
self.grad_checkpointing = grad_checkpointing
self.use_latest_gsplat = use_latest_gsplat
# ------- STORM v.s. Latent-STORM -------
self.decoder_type = decoder_type
self.decoder_upsample_ratio = decoder_upsample_ratio = self.patch_size
# ------- motion predictor -------
self.num_motion_tokens = num_motion_tokens
self.tau = tau
num_velocity_channels = 3
# ------- embedders -------
self.plucker_embedder = PluckerEmbedder(img_size=img_size)
self.time_embedder = TimestepEmbedder(embed_dim)
# ------- auxiliary tokens -------
self.use_sky_token = use_sky_token
self.use_affine_token = use_affine_token
if self.use_sky_token:
self.sky_token = nn.Parameter(torch.randn(1, 1, embed_dim) * 0.02)
self.sky_head = ModulatedLinearLayer(
3,
hidden_channels=512,
condition_channels=embed_dim,
out_channels=self.gs_dim,
)
if self.use_affine_token:
self.affine_token = nn.Parameter(torch.randn(1, self.num_cams, embed_dim) * 0.02)
self.affine_linear = nn.Linear(embed_dim, self.gs_dim * (self.gs_dim + 1))
# ------- gs predictor and mask decoder -------
if decoder_type == "dummy":
self.gs_pred = nn.Linear(embed_dim, decoder_upsample_ratio**2 * self.out_channels)
self.decoder = DummyDecoder()
self.unpatch_size = decoder_upsample_ratio
if self.decoder_upsample_ratio == 8:
# used for upscaling the low-resolution image features to the pixel-resolution
# very handcrafted and never tuned
self.output_upscaling = nn.Sequential(
nn.ConvTranspose2d(embed_dim, 512, kernel_size=2, stride=2),
LayerNorm2d(512),
nn.GELU(),
nn.ConvTranspose2d(512, 256, kernel_size=2, stride=2),
LayerNorm2d(256),
nn.GELU(),
nn.ConvTranspose2d(256, 128, kernel_size=2, stride=2),
LayerNorm2d(128),
nn.GELU(),
)
elif self.decoder_upsample_ratio == 16:
# used for upscaling the low-resolution image features to the pixel-resolution
# very handcrafted and never tuned
self.output_upscaling = nn.Sequential(
nn.ConvTranspose2d(embed_dim, 512, kernel_size=2, stride=2),
LayerNorm2d(512),
nn.GELU(),
nn.ConvTranspose2d(512, 256, kernel_size=2, stride=2),
LayerNorm2d(256),
nn.GELU(),
nn.ConvTranspose2d(256, 128, kernel_size=2, stride=2),
LayerNorm2d(128),
nn.GELU(),
nn.ConvTranspose2d(128, 128, kernel_size=2, stride=2),
LayerNorm2d(128),
nn.GELU(),
)
elif decoder_type == "conv":
self.gs_pred = nn.Linear(embed_dim, self.out_channels)
# latent-STORM decoder
self.decoder = ConvDecoder(
latent_dim=self.gs_dim,
out_channels=4, # 3 for RGB, 1 for depth
num_res_blocks=3,
channels=[512, 256, 256, 128], # 8 times upsample
grad_checkpointing=grad_checkpointing,
)
self.unpatch_size = 1
# upscaling the low-resolution image features to the pixel-resolution
# the "pixel" resolution here is essentially the feature map resolution
# which is 1/patch_size of the image resolution
self.output_upscaling = nn.Sequential(
nn.Conv2d(embed_dim, 512, kernel_size=1),
LayerNorm2d(512),
nn.GELU(),
nn.Conv2d(512, 256, kernel_size=1),
LayerNorm2d(256),
nn.GELU(),
nn.Conv2d(256, 128, kernel_size=1),
LayerNorm2d(128),
nn.GELU(),
)
# ------- activation functions for gs parameters -------
self.max_scale = nn.Parameter(torch.tensor([float(max_scale)]), requires_grad=False)
self.scale_act_fn = lambda x: torch.minimum(torch.exp(x + scale_offset), self.max_scale)
self.opacity_act_fn = lambda x: torch.sigmoid(x + opacity_offset)
self.depth_act_fn = lambda x: near + torch.sigmoid(x) * (far - near)
self.rgb_act_fn = lambda x: torch.sigmoid(x) * 2 - 1 if sigmoid_rgb else x
self.near, self.far = near, far
# ------- motion predictor -------
self.motion_key_head = Mlp(128, 256, projected_motion_dim)
if self.num_motion_tokens > 0:
self.motion_tokens = nn.Parameter(torch.randn(1, num_motion_tokens, embed_dim) * 0.02)
self.motion_query_heads = nn.ModuleList(
[
Mlp(embed_dim, embed_dim, projected_motion_dim)
for _ in range(self.num_motion_tokens)
]
)
self.motion_basis_decoder = Mlp(embed_dim, 256, num_velocity_channels)
else:
self.motion_tokens = None
self.motion_basis_decoder = Mlp(projected_motion_dim, 256, num_velocity_channels)
self.init_weights()
if disable_pos_embed: # remove the default pos_embed in vit
del self.pos_embed
self.pos_embed = None
def _pos_embed(self, x: Tensor) -> Tensor:
if not self.disable_pos_embed:
return super()._pos_embed(x)
return rearrange(x, "b h w c -> b (h w) c")
def _time_embed(self, x: Tensor, time: Tensor, num_views=1) -> Tensor:
if time.ndim == 3:
b, t, v = time.shape
time_embedding = (
self.time_embedder(time.flatten()) # (bt, c)
.view(b, t, v, -1) # (b, t, v, c)
.view(-1, 1, self.embed_dim) # (btv, 1, c)
.repeat(1, x.shape[1], 1) # (btv, n, c)
)
else:
time_embedding = (
self.time_embedder(time.flatten()) # (bt, c)
.view(time.shape[0], time.shape[1], 1, -1) # (b, t, 1, c)
.repeat(1, 1, num_views, 1) # (b, t, v, c)
.view(-1, 1, self.embed_dim) # (btv, 1, c)
.repeat(1, x.shape[1], 1) # (btv, n, c)
)
return x + time_embedding
def forward_decoder(self, render_results):
render_results["rgb_key"] = "rendered_image"
render_results["depth_key"] = "rendered_depth"
render_results["alpha_key"] = "rendered_alpha"
render_results["flow_key"] = "rendered_flow"
render_results["decoder_depth_key"] = None
render_results["decoder_alpha_key"] = None
render_results["decoder_flow_key"] = None
render_results = self.decoder(render_results)
decoded_depth_key = render_results["decoder_depth_key"]
if decoded_depth_key is not None:
decoded_depth = self.depth_act_fn(render_results[decoded_depth_key])
render_results[decoded_depth_key] = decoded_depth
return render_results
def forward_features(self, x, plucker_embeds, time):
b, t, v, c, h, w = x.size()
x = rearrange(x, "b t v c h w -> (b t v) c h w")
plucker_embeds = rearrange(plucker_embeds, "b t v h w c-> (b t v) c h w")
x = torch.cat([x, plucker_embeds], dim=1)
x = self.patch_embed(x) # (b t v) h w c2
x = self._pos_embed(x) # (b t v) (h w) c2
x = self._time_embed(x, time, num_views=v)
x = rearrange(x, "(b t v) hw c -> b (t v hw) c", t=t, v=v)
if self.num_motion_tokens > 0:
motion_tokens = repeat(self.motion_tokens, "1 k d -> b k d", b=x.shape[0])
x = torch.cat([motion_tokens, x], dim=-2)
if self.use_affine_token:
affine_token = repeat(self.affine_token, "1 k d -> b k d", b=b)
x = torch.cat([affine_token, x], dim=-2)
if self.use_sky_token:
sky_token = repeat(self.sky_token, "1 1 d -> b 1 d", b=x.shape[0])
x = torch.cat([sky_token, x], dim=-2)
x = self.transformer(x)
x = self.norm(x)
return x
def forward_motion_predictor(self, x, motion_tokens=None, gs_params=None):
b, t, v, h, w, _ = gs_params["means"].shape
img_embeds = self.unpatchify(
rearrange(x, "b (t v hw) c -> (b t v) hw c", t=t, v=v),
hw=(h // self.unpatch_size, w // self.unpatch_size),
patch_size=1,
)
if self.grad_checkpointing:
img_embeds = checkpoint(self.output_upscaling, img_embeds)
else:
img_embeds = self.output_upscaling(img_embeds)
img_embeds = rearrange(img_embeds, "(b t v) c h w -> b t v h w c", t=t, v=v)
img_keys = self.motion_key_head(img_embeds)
if self.num_motion_tokens > 0:
hyper_in_list = []
for i in range(self.num_motion_tokens):
hyper_in = self.motion_query_heads[i](motion_tokens[:, i])
hyper_in_list.append(hyper_in)
motion_token_queries = torch.stack(hyper_in_list, dim=1)
motion_bases = self.motion_basis_decoder(motion_tokens)
dot_product_similarity = torch.einsum(
"b k c, b t v h w c -> b t v h w k",
motion_token_queries,
img_keys,
)
motion_weights = torch.softmax(dot_product_similarity / self.tau, dim=-1)
forward_flow = torch.einsum(
"b t v h w k, b k c -> b t v h w c", motion_weights, motion_bases
)
gs_params["motion_weights"] = motion_weights
gs_params["motion_bases"] = motion_bases
else:
# if there's no motion token, directly predict the velocity from the upsampled image features
forward_flow = self.motion_basis_decoder(img_keys)
gs_params["forward_flow"] = forward_flow
return {k: v for k, v in gs_params.items() if v is not None}
def forward_gs_predictor(self, x, origins, directions):
b, t, v, h, w, _ = origins.shape
x = rearrange(x, "b (t v hw) c -> (b t v) hw c", t=t, v=v)
gs_params = self.gs_pred(x)
gs_params = self.unpatchify(gs_params, hw=(h, w), patch_size=self.unpatch_size)
gs_params = rearrange(gs_params, "(b t v) c h w -> b t v h w c", t=t, v=v)
depth, scales, quats, opacitys, colors = gs_params.split([1, 3, 4, 1, self.gs_dim], dim=-1)
scales = self.scale_act_fn(scales)
opacitys = self.opacity_act_fn(opacitys)
depths = self.depth_act_fn(depth)
colors = self.rgb_act_fn(colors)
means = origins + directions * depths
return {
"means": means,
"scales": scales,
"quats": quats,
"opacities": opacitys.squeeze(-1),
"colors": colors,
"depths": depths.squeeze(-1),
}
def forward_renderer(self, gs_params, data_dict, render_motion_seg=True, radius_clip=0.0):
b, t, v, h, w, _ = gs_params["means"].shape
tgt_h, tgt_w = data_dict["height"], data_dict["width"]
tgt_t, tgt_v = data_dict["target_camtoworlds"].shape[1:3]
means = rearrange(gs_params["means"], "b t v h w c -> b (t v h w) c")
scales = rearrange(gs_params["scales"], "b t v h w c -> b (t v h w) c")
quats = rearrange(gs_params["quats"], "b t v h w c -> b (t v h w) c")
opacities = rearrange(gs_params["opacities"], "b t v h w -> b (t v h w)")
colors = rearrange(gs_params["colors"], "b t v h w c -> b (t v h w) c")
forward_v = rearrange(gs_params["forward_flow"], "b t v h w c -> b (t v h w) c")
means_batched = means.repeat_interleave(tgt_t, dim=0)
scales_batched = scales.repeat_interleave(tgt_t, dim=0)
quats_batched = quats.repeat_interleave(tgt_t, dim=0)
opacities_batched = opacities.repeat_interleave(tgt_t, dim=0)
color_batched = colors.repeat_interleave(tgt_t, dim=0)
forward_v_batched = forward_v.repeat_interleave(tgt_t, dim=0)
ctx_time = data_dict["context_time"] * data_dict["timespan"]
tgt_time = data_dict["target_time"] * data_dict["timespan"]
if tgt_time.ndim == 3:
tdiff_forward = tgt_time.unsqueeze(2) - ctx_time.unsqueeze(1)
tdiff_forward = tdiff_forward.view(b * tgt_t, t * v, 1)
tdiff_forward_batched = tdiff_forward.repeat_interleave(h * w, dim=1)
else:
tdiff_forward = tgt_time.unsqueeze(-1) - ctx_time.unsqueeze(-2)
tdiff_forward = tdiff_forward.view(b * tgt_t, t, 1)
tdiff_forward_batched = tdiff_forward.repeat_interleave(v * h * w, dim=1)
forward_translation = forward_v_batched * tdiff_forward_batched
means_batched = means_batched + forward_translation
if not self.training: # mask out some noisy flow
forward_v[forward_v.norm(dim=-1) < 1.0] = 0.0
forward_v_batched = forward_v.repeat_interleave(tgt_t, dim=0)
if not self.training and self.num_motion_tokens > 0 and render_motion_seg:
# render the motion segmentation map
motion_weights = rearrange(gs_params["motion_weights"], "b t v h w k -> b (t v h w) k")
weights_batched = motion_weights.repeat_interleave(tgt_t, dim=0)
colors_batched = torch.cat([color_batched, forward_v_batched, weights_batched], dim=-1)
else:
colors_batched = torch.cat([color_batched, forward_v_batched], dim=-1)
camtoworlds_batched = data_dict["target_camtoworlds"].view(b * tgt_t, -1, 4, 4)
viewmats_batched = torch.linalg.inv(camtoworlds_batched.float())
Ks_batched = data_dict["target_intrinsics"].view(b * tgt_t, -1, 3, 3)
motion_seg = None
if self.use_latest_gsplat:
means_batched = means_batched.float()
quats_batched = quats_batched.float()
scales_batched = scales_batched.float()
opacities_batched = opacities_batched.float()
colors_batched = colors_batched.float()
viewmats_batched = viewmats_batched.float()
Ks_batched = Ks_batched.float()
if not self.training:
rendered_colors, rendered_alphas, rendered_flow, motion_seg = [], [], [], []
rendered_depths = []
with torch.autocast("cuda", enabled=False):
for bid in range(means_batched.size(0)):
renderings, alpha, _ = rasterization(
means=means_batched[bid],
quats=quats_batched[bid],
scales=scales_batched[bid],
opacities=opacities_batched[bid],
colors=colors_batched[bid],
viewmats=viewmats_batched[bid],
Ks=Ks_batched[bid],
width=data_dict["width"],
height=data_dict["height"],
render_mode="RGB+ED",
near_plane=self.near,
far_plane=self.far,
packed=False,
radius_clip=radius_clip,
)
color, forward_flow, weights, depth = renderings.split(
[self.gs_dim, 3, self.num_motion_tokens, 1], dim=-1
)
rendered_colors.append(color)
rendered_alphas.append(alpha)
rendered_flow.append(forward_flow)
motion_seg.append(weights)
rendered_depths.append(depth)
color = torch.stack(rendered_colors, dim=0)
rendered_alpha = torch.stack(rendered_alphas, dim=0)
forward_flow = torch.stack(rendered_flow, dim=0)
depth = torch.stack(rendered_depths, dim=0)
motion_seg = torch.stack(motion_seg, dim=0)
if motion_seg.numel() > 0:
motion_seg = motion_seg.reshape(b, tgt_t, v, h, w, -1).argmax(dim=-1)
else:
motion_seg = None
else:
rendered_colors, rendered_alphas, rendered_flow, rendered_depths = [], [], [], []
with torch.autocast("cuda", enabled=False):
for bid in range(means_batched.size(0)):
renderings, alpha, _ = rasterization(
means=means_batched[bid],
quats=quats_batched[bid],
scales=scales_batched[bid],
opacities=opacities_batched[bid],
colors=colors_batched[bid],
viewmats=viewmats_batched[bid],
Ks=Ks_batched[bid],
width=data_dict["width"],
height=data_dict["height"],
render_mode="RGB+ED",
near_plane=self.near,
far_plane=self.far,
packed=False,
radius_clip=radius_clip,
)
color, forward_flow, depth = renderings.split([self.gs_dim, 3, 1], dim=-1)
rendered_colors.append(color)
rendered_alphas.append(alpha)
rendered_flow.append(forward_flow)
rendered_depths.append(depth)
color = torch.stack(rendered_colors, dim=0)
rendered_alpha = torch.stack(rendered_alphas, dim=0)
forward_flow = torch.stack(rendered_flow, dim=0)
depth = torch.stack(rendered_depths, dim=0)
else:
if not self.training:
with torch.autocast("cuda", enabled=False):
rendered_color, rendered_alpha, _ = rasterization(
means=means_batched.float(),
quats=quats_batched.float(),
scales=scales_batched.float(),
opacities=opacities_batched.float(),
colors=(
colors_batched[..., : -self.num_motion_tokens].float()
if self.num_motion_tokens > 0 and render_motion_seg
else colors_batched.float()
),
viewmats=viewmats_batched,
Ks=Ks_batched,
width=tgt_w,
height=tgt_h,
render_mode="RGB+ED",
near_plane=self.near,
far_plane=self.far,
packed=False,
radius_clip=radius_clip,
)
color, forward_flow, depth = rendered_color.split([self.gs_dim, 3, 1], dim=-1)
if self.num_motion_tokens > 0 and render_motion_seg:
chunksize = 32
assignment_map = []
rendered_colors = colors_batched[..., -self.num_motion_tokens :]
for i in range(0, self.num_motion_tokens, chunksize):
weights, _, _ = rasterization(
means=means_batched.float(),
quats=quats_batched.float(),
scales=scales_batched.float(),
opacities=opacities_batched.float(),
colors=rendered_colors[..., i : i + chunksize],
viewmats=viewmats_batched,
Ks=Ks_batched,
width=tgt_w,
height=tgt_h,
render_mode="RGB+ED",
near_plane=self.near,
far_plane=self.far,
packed=False,
radius_clip=radius_clip,
)
weights = weights.split([weights.size(-1) - 1, 1], dim=-1)[0]
assignment_map.append(weights)
motion_seg = torch.cat(assignment_map, dim=-1)
motion_seg = motion_seg.reshape(b, tgt_t, tgt_v, tgt_h, tgt_w, -1).argmax(
dim=-1
)
else:
with torch.autocast("cuda", enabled=False):
rendered_color, rendered_alpha, _ = rasterization(
means=means_batched.float(),
quats=quats_batched.float(),
scales=scales_batched.float(),
opacities=opacities_batched.float(),
colors=colors_batched.float(),
viewmats=viewmats_batched,
Ks=Ks_batched,
width=tgt_w,
height=tgt_h,
render_mode="RGB+ED",
near_plane=self.near,
far_plane=self.far,
packed=False,
radius_clip=radius_clip,
)
color, forward_flow, depth = rendered_color.split([self.gs_dim, 3, 1], dim=-1)
output_dict = {
"rendered_image": color.view(b, tgt_t, tgt_v, tgt_h, tgt_w, -1),
"rendered_depth": depth.view(b, tgt_t, tgt_v, tgt_h, tgt_w),
"rendered_alpha": rendered_alpha.view(b, tgt_t, tgt_v, tgt_h, tgt_w),
"rendered_flow": forward_flow.view(b, tgt_t, tgt_v, tgt_h, tgt_w, -1),
"means_batched": means_batched,
}
if motion_seg is not None:
output_dict["rendered_motion_seg"] = motion_seg.squeeze(-1)
return output_dict
def get_ray_dict(self, data_dict):
ray_dict = self.plucker_embedder(
data_dict["context_intrinsics"],
data_dict["context_camtoworlds"],
image_size=data_dict["context_image"].shape[-2:],
)
if self.decoder_type != "dummy":
feat_ray_dict = self.plucker_embedder(
data_dict["context_intrinsics"],
data_dict["context_camtoworlds"],
image_size=data_dict["context_image"].shape[-2:],
patch_size=self.patch_size,
)
ray_dict["origins"] = feat_ray_dict["origins"]
ray_dict["dirs"] = feat_ray_dict["dirs"]
tgt_intrinsics = data_dict["target_intrinsics"]
tgt_intrinsics[..., 0, 0] = tgt_intrinsics[..., 0, 0] / self.patch_size
tgt_intrinsics[..., 1, 1] = tgt_intrinsics[..., 1, 1] / self.patch_size
tgt_intrinsics[..., 0, 2] = tgt_intrinsics[..., 0, 2] / self.patch_size
tgt_intrinsics[..., 1, 2] = tgt_intrinsics[..., 1, 2] / self.patch_size
data_dict["target_intrinsics"] = tgt_intrinsics
data_dict["width"] //= self.patch_size
data_dict["height"] //= self.patch_size
return data_dict, ray_dict
def forward(self, data_dict):
x = data_dict["context_image"]
b, t, v, c, h, w = x.size()
data_dict, ray_dict = self.get_ray_dict(data_dict)
x = self.forward_features(x, ray_dict["plucker"], data_dict["context_time"])
sky_token, affine_tokens, motion_tokens = None, None, None
if self.use_sky_token:
sky_token = x[:, :1]
x = x[:, 1:]
if self.use_affine_token:
affine_tokens = x[:, : self.num_cams]
x = x[:, self.num_cams :]
if self.num_motion_tokens > 0:
motion_tokens = x[:, : self.num_motion_tokens]
x = x[:, self.num_motion_tokens :]
gs_params = self.forward_gs_predictor(x, ray_dict["origins"], ray_dict["dirs"])
gs_params = self.forward_motion_predictor(x, motion_tokens, gs_params)
# sometimes the number of views is too large, so we split the rendering into chunks
step = 20
if data_dict["target_camtoworlds"].shape[1] <= step:
render_results = self.forward_renderer(gs_params, data_dict)
else:
chunk_data_dict = data_dict.copy()
for chunk_start in range(0, data_dict["target_camtoworlds"].shape[1], step):
chunk_end = min(chunk_start + step, data_dict["target_camtoworlds"].shape[1])
chunk_data_dict["target_camtoworlds"] = data_dict["target_camtoworlds"][
:, chunk_start:chunk_end
]
chunk_data_dict["target_intrinsics"] = data_dict["target_intrinsics"][
:, chunk_start:chunk_end
]
chunk_data_dict["target_time"] = data_dict["target_time"][:, chunk_start:chunk_end]
chunk_render_results = self.forward_renderer(gs_params, chunk_data_dict)
if chunk_start == 0:
render_results = chunk_render_results
else:
for k, v in chunk_render_results.items():
render_results[k] = torch.cat([render_results[k], v], dim=1)
images, opacities = render_results["rendered_image"], render_results["rendered_alpha"]
if self.use_sky_token:
target_ray_dict = self.plucker_embedder(
data_dict["target_intrinsics"],
data_dict["target_camtoworlds"],
image_size=(data_dict["height"], data_dict["width"]),
)
if data_dict["target_camtoworlds"].shape[1] <= step:
sky = self.sky_head(target_ray_dict["dirs"], sky_token)
images = images + (1 - opacities[..., None]) * sky
else:
for chunk_start in range(0, data_dict["target_camtoworlds"].shape[1], step):
dirs = target_ray_dict["dirs"][:, chunk_start : chunk_start + step]
chunk_sky = self.sky_head(dirs, sky_token)
images[:, chunk_start : chunk_start + step] += (
1 - opacities[:, chunk_start : chunk_start + step][..., None]
) * chunk_sky
gs_params["sky_token"] = sky_token
if self.use_affine_token:
affine = self.affine_linear(affine_tokens) # b v (gs_dim * (gs_dim + 1))
affine = rearrange(affine, "b v (p q) -> b v p q", p=self.gs_dim)
images = torch.einsum("b t v h w p, b v p q -> b t v h w p", images, affine)
gs_params["affine"] = affine
render_results["rendered_image"] = images
render_results = self.forward_decoder(render_results)
return {
"ray_dict": ray_dict,
"gs_params": gs_params,
"render_results": render_results,
}
def from_gs_params_to_output(self, gs_params, target_dict, num_cams=1):
render_results = self.forward_renderer(
gs_params, target_dict, render_motion_seg=False, radius_clip=4.0
)
rendered_images = render_results["rendered_image"]
if self.use_sky_token:
sky_token = gs_params["sky_token"]
target_ray_dict = self.plucker_embedder(
target_dict["target_intrinsics"],
target_dict["target_camtoworlds"],
image_size=(target_dict["height"], target_dict["width"]),
)
sky = self.sky_head(target_ray_dict["dirs"], sky_token)
rendered_opacities = render_results["rendered_alpha"]
rendered_images = rendered_images + (1 - rendered_opacities[..., None]) * sky
if self.use_affine_token:
if num_cams == 1:
affine = gs_params["affine"].mean(dim=1)
rendered_images = torch.einsum(
"b t v h w p, b p q -> b t v h w p", rendered_images, affine
)
else:
affine = gs_params["affine"]
rendered_images = torch.einsum(
"b t v h w p, b v p q -> b t v h w p", rendered_images, affine
)
render_results["rendered_image"] = rendered_images
render_results = self.forward_decoder(render_results)
return {"render_results": render_results}
def get_gs_params(self, data_dict):
x = data_dict["context_image"]
data_dict, ray_dict = self.get_ray_dict(data_dict)
x = self.forward_features(x, ray_dict["plucker"], data_dict["context_time"])
sky_token, affine_tokens, motion_tokens = None, None, None
if self.use_sky_token:
sky_token = x[:, :1]
x = x[:, 1:]
if self.use_affine_token:
affine_tokens = x[:, : self.num_cams]
x = x[:, self.num_cams :]
if self.num_motion_tokens > 0:
motion_tokens = x[:, : self.num_motion_tokens]
x = x[:, self.num_motion_tokens :]
gs_params = self.forward_gs_predictor(x, ray_dict["origins"], ray_dict["dirs"])
gs_params = self.forward_motion_predictor(x, motion_tokens, gs_params)
if self.use_sky_token:
gs_params["sky_token"] = sky_token
if self.use_affine_token:
affine = self.affine_linear(affine_tokens) # b v (gs_dim * (gs_dim + 1))
affine = rearrange(affine, "b v (p q) -> b v p q", p=self.gs_dim)
gs_params["affine"] = affine
return gs_params
def STORM_B_8(**kwargs):
return STORM(patch_size=8, embed_dim=768, depth=12, num_heads=12, **kwargs)
def STORM_L_8(**kwargs):
return STORM(patch_size=8, embed_dim=1024, depth=24, num_heads=16, **kwargs)
def STORM_B_16(**kwargs):
return STORM(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs)
def STORM_L_16(**kwargs):
return STORM(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs)
def STORM_XL_8(**kwargs):
return STORM(patch_size=8, embed_dim=1152, depth=28, num_heads=16, **kwargs)
def STORM_H_8(**kwargs):
return STORM(patch_size=8, embed_dim=1280, depth=32, num_heads=16, **kwargs)
def STORM_H_16(**kwargs):
return STORM(patch_size=16, embed_dim=1280, depth=32, num_heads=16, **kwargs)
STORM_models = {
"STORM-B/8": STORM_B_8,
"STORM-L/8": STORM_L_8,
"STORM-XL/8": STORM_XL_8,
"STORM-H/8": STORM_H_8,
"STORM-B/16": STORM_B_16,
"STORM-L/16": STORM_L_16,
"STORM-H/16": STORM_H_16,
}
================================================
FILE: storm/models/vit.py
================================================
# --------------------------------------------------------
# References:
# timm: https://github.com/rwightman/pytorch-image-models/tree/master/timm
# --------------------------------------------------------
from functools import partial
from typing import Tuple, Union
import torch
import torch.nn as nn
from einops import rearrange
from .embedders import PatchEmbed
from .layers import Transformer, get_2d_sincos_pos_embed, resample_abs_pos_embed
class VisionTransformer(nn.Module):
"""Vision Transformer"""
def __init__(
self,
img_size: Union[int, Tuple[int, int]] = 224,
patch_size: Union[int, Tuple[int, int]] = 16,
in_chans: int = 3,
embed_dim: int = 768,
depth: int = 12,
num_heads: int = 12,
qk_norm: bool = False,
pos_embed_requires_grad: bool = True,
norm_layer: nn.Module = partial(nn.LayerNorm, eps=1e-6),
grad_checkpointing: bool = False,
) -> None:
super().__init__()
self.patch_size = patch_size
self.embed_dim = embed_dim
self.depth = depth
self.patch_embed = PatchEmbed(
img_size=img_size,
patch_size=patch_size,
in_chans=in_chans,
embed_dim=embed_dim,
output_fmt="NHWC",
)
self.num_patches = self.patch_embed.num_patches
self.img_size = self.patch_embed.img_size
self.pos_embed = nn.Parameter(
torch.randn(1, self.num_patches, embed_dim) * 0.02,
requires_grad=pos_embed_requires_grad,
)
self.pos_embed_requires_grad = pos_embed_requires_grad
self.transformer = Transformer(
embed_dim=embed_dim,
depth=depth,
num_heads=num_heads,
qk_norm=qk_norm,
norm_layer=norm_layer,
grad_checkpointing=grad_checkpointing,
)
self.norm = norm_layer(embed_dim)
self.init_weights()
def init_weights(self):
def _basic_init(module):
if isinstance(module, nn.Linear):
nn.init.trunc_normal_(module.weight, std=0.02)
if module.bias is not None:
nn.init.zeros_(module.bias)
self.apply(_basic_init)
if not self.pos_embed_requires_grad:
pos_embed = get_2d_sincos_pos_embed(
self.pos_embed.shape[-1], self.patch_embed.grid_size
)
self.pos_embed.data.copy_(torch.from_numpy(pos_embed).float().unsqueeze(0))
def _pos_embed(self, x: torch.Tensor) -> torch.Tensor:
"""
Compared to timm's implementation, this handles non-square images.
"""
B, H, W, C = x.shape
pos_embed = resample_abs_pos_embed(
posemb=self.pos_embed,
new_size=(H, W),
old_size=self.patch_embed.grid_size,
n_prefix_tokens=0,
)
x = x.view(B, -1, C) + pos_embed
return x
def unpatchify(self, x, hw=None, channel_first=True, patch_size=None) -> torch.Tensor:
hw = hw or self.img_size
imgs = rearrange(
x,
"b (h w) (p1 p2 c) -> b c (h p1) (w p2)",
p1=self.patch_size if patch_size is None else patch_size,
p2=self.patch_size if patch_size is None else patch_size,
h=hw[0] // (self.patch_size if patch_size is None else patch_size),
w=hw[1] // (self.patch_size if patch_size is None else patch_size),
)
if not channel_first:
imgs = rearrange(imgs, "b c h w -> b h w c")
return imgs
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = self.patch_embed(x)
x = self._pos_embed(x)
x = self.transformer(x)
x = self.norm(x)
return x
def ViT_S_16(**kwargs):
return VisionTransformer(patch_size=16, embed_dim=384, depth=12, num_heads=6, **kwargs)
def ViT_B_16(**kwargs):
return VisionTransformer(patch_size=16, embed_dim=768, depth=12, num_heads=12, **kwargs)
def ViT_L_16(**kwargs):
return VisionTransformer(patch_size=16, embed_dim=1024, depth=24, num_heads=16, **kwargs)
def ViT_H_14(**kwargs):
return VisionTransformer(patch_size=14, embed_dim=1280, depth=32, num_heads=16, **kwargs)
ViT_models = {
"ViT-B/16": ViT_B_16,
"ViT-L/16": ViT_L_16,
"ViT-H/14": ViT_H_14,
}
================================================
FILE: storm/utils/__init__.py
================================================
================================================
FILE: storm/utils/distributed.py
================================================
import logging
import os
import random
import re
import socket
import sys
from typing import Dict, List
import torch
import torch.distributed as dist
_LOCAL_RANK = -1
_LOCAL_WORLD_SIZE = -1
_TORCH_DISTRIBUTED_ENV_VARS = (
"MASTER_ADDR",
"MASTER_PORT",
"RANK",
"WORLD_SIZE",
"LOCAL_RANK",
"LOCAL_WORLD_SIZE",
)
logger = logging.getLogger("STORM")
def is_enabled() -> bool:
"""Check if distributed mode is enabled."""
return dist.is_available() and dist.is_initialized()
def get_global_rank() -> int:
"""Get the global rank of the current process."""
return dist.get_rank() if is_enabled() else 0
def get_world_size() -> int:
"""Get the world size (number of processes)."""
return dist.get_world_size() if is_enabled() else 1
def is_main_process() -> bool:
"""Check if the current process is the main process."""
return get_global_rank() == 0
def _collect_env_vars() -> Dict[str, str]:
"""Collect PyTorch distributed environment variables."""
env_vars = {
env_var: os.environ[env_var]
for env_var in _TORCH_DISTRIBUTED_ENV_VARS
if env_var in os.environ
}
if "WORLD_SIZE" in env_vars and "LOCAL_WORLD_SIZE" not in env_vars:
env_vars["LOCAL_WORLD_SIZE"] = env_vars["WORLD_SIZE"]
return env_vars
def _is_slurm_job_process() -> bool:
"""Check if the process is running as part of a Slurm job."""
return "SLURM_JOB_ID" in os.environ and not os.isatty(sys.stdout.fileno())
def _parse_slurm_node_list(s: str) -> List[str]:
"""Parse a Slurm node list into a list of hostnames."""
nodes = []
# Extract "hostname", "hostname[1-2,3,4-5]," substrings
pattern = re.compile(r"(([^\[]+)(?:\[([^\]]+)\])?),?")
for match in pattern.finditer(s):
prefix, suffixes = s[match.start(2) : match.end(2)], s[match.start(3) : match.end(3)]
for suffix in suffixes.split(","):
span = suffix.split("-")
if len(span) == 1:
nodes.append(prefix + suffix)
else:
width = len(span[0])
start, end = int(span[0]), int(span[1]) + 1
nodes.extend([prefix + f"{i:0{width}}" for i in range(start, end)])
return nodes
def _check_env_variable(key: str, new_value: str):
"""Ensure that environment variables are consistent."""
if key in os.environ and os.environ[key] != new_value:
raise RuntimeError(f"Environment variable conflict: {key} is already set")
def _restrict_print_to_main_process() -> None:
"""
This function disables printing when not in the main process
"""
import builtins as __builtin__
builtin_print = __builtin__.print
def print(*args, **kwargs):
force = kwargs.pop("force", False)
if is_main_process() or force:
builtin_print(*args, **kwargs)
__builtin__.print = print
def _get_master_port(seed: int = 0) -> int:
"""Get a master port, either from the environment or randomly."""
MIN_MASTER_PORT, MAX_MASTER_PORT = 20_000, 60_000
master_port_str = os.environ.get("MASTER_PORT")
if master_port_str is None:
rng = random.Random(seed)
return rng.randint(MIN_MASTER_PORT, MAX_MASTER_PORT)
return int(master_port_str)
def _get_available_port() -> int:
"""Find an available port on the current machine."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
# A "" host address means INADDR_ANY i.e. binding to all interfaces.
# Note this is not compatible with IPv6.
s.bind(("", 0))
return s.getsockname()[1]
class _TorchDistributedEnvironment:
"""Manages PyTorch distributed environment variables."""
def __init__(self):
self.master_addr = "127.0.0.1"
self.master_port = 0
self.rank = -1
self.world_size = -1
self.local_rank = -1
self.local_world_size = -1
if _is_slurm_job_process():
return self._set_from_slurm_env()
env_vars = _collect_env_vars()
if not env_vars:
# Environment is not set
pass
elif len(env_vars) == len(_TORCH_DISTRIBUTED_ENV_VARS):
# Environment is fully set
return self._set_from_preset_env()
else:
# Environment is partially set
collected_env_vars = ", ".join(env_vars.keys())
raise RuntimeError(
f"Partially set environment: {collected_env_vars}."
f"Unset environment variables: {[env_var for env_var in _TORCH_DISTRIBUTED_ENV_VARS if env_var not in env_vars]}"
)
if torch.cuda.device_count() > 0:
return self._set_from_local()
raise RuntimeError("Can't initialize PyTorch distributed environment")
def _set_from_slurm_env(self):
"""Slurm job created with sbatch, submitit, etc..."""
logger.info("Initializing from Slurm environment")
job_id = int(os.environ["SLURM_JOB_ID"])
node_count = int(os.environ["SLURM_JOB_NUM_NODES"])
nodes = _parse_slurm_node_list(os.environ["SLURM_JOB_NODELIST"])
assert len(nodes) == node_count, f"SLURM_JOB_NODELIST mismatch: {nodes} vs {node_count}"
self.master_addr = nodes[0]
self.master_port = _get_master_port(seed=job_id)
self.rank = int(os.environ["SLURM_PROCID"])
self.world_size = int(os.environ["SLURM_NTASKS"])
logger.info(
f"Master address: {self.master_addr}, Master port: {self.master_port}, Rank: {self.rank}, World size: {self.world_size}"
)
assert self.rank < self.world_size
self.local_rank = int(os.environ["SLURM_LOCALID"])
self.local_world_size = self.world_size // node_count
assert self.local_rank < self.local_world_size
def _set_from_preset_env(self):
logger.info("Initialization from preset environment")
self.master_addr = os.environ["MASTER_ADDR"]
self.master_port = os.environ["MASTER_PORT"]
self.rank = int(os.environ["RANK"])
self.world_size = int(os.environ["WORLD_SIZE"])
assert self.rank < self.world_size
self.local_rank = int(os.environ["LOCAL_RANK"])
self.local_world_size = (
int(os.environ["LOCAL_WORLD_SIZE"])
if "LOCAL_WORLD_SIZE" in os.environ
else self.world_size
)
assert self.local_rank < self.local_world_size
def _set_from_local(self):
"""Single node and GPU job (i.e. local script run)"""
logger.info("Initializing from local environment")
self.master_addr = "127.0.0.1"
self.master_port = _get_available_port()
self.rank = 0
self.world_size = 1
self.local_rank = 0
self.local_world_size = 1
def export(self, *, overwrite: bool) -> "_TorchDistributedEnvironment":
"""Export the environment variables for distributed initialization."""
env_vars = {
"MASTER_ADDR": self.master_addr,
"MASTER_PORT": str(self.master_port),
"RANK": str(self.rank),
"WORLD_SIZE": str(self.world_size),
"LOCAL_RANK": str(self.local_rank),
"LOCAL_WORLD_SIZE": str(self.local_world_size),
}
if not overwrite:
for k, v in env_vars.items():
_check_env_variable(k, v)
os.environ.update(env_vars)
return self
def enable(
*,
set_cuda_current_device: bool = True,
overwrite: bool = False,
allow_nccl_timeout: bool = False,
):
"""Enable distributed mode
Args:
set_cuda_current_device: If True, call torch.cuda.set_device() to set the
current PyTorch CUDA device to the one matching the local rank.
overwrite: If True, overwrites already set variables. Else fails.
"""
global _LOCAL_RANK, _LOCAL_WORLD_SIZE
if _LOCAL_RANK >= 0 or _LOCAL_WORLD_SIZE >= 0:
raise RuntimeError("Distributed mode already enabled")
torch_env = _TorchDistributedEnvironment()
torch_env.export(overwrite=overwrite)
if set_cuda_current_device:
torch.cuda.set_device(torch_env.local_rank)
if allow_nccl_timeout:
# This allows to use torch distributed timeout in a NCCL backend
key, value = "NCCL_ASYNC_ERROR_HANDLING", "1"
if not overwrite:
_check_env_variable(key, value)
os.environ[key] = value
dist.init_process_group(backend="nccl")
dist.barrier()
# Finalize setup
_LOCAL_RANK = torch_env.local_rank
_LOCAL_WORLD_SIZE = torch_env.local_world_size
_restrict_print_to_main_process()
================================================
FILE: storm/utils/logging.py
================================================
import datetime
import functools
import json
import logging
import os
import sys
import time
from collections import defaultdict, deque
from typing import List, Optional
import torch
import torch.distributed as dist
import wandb
from .distributed import get_global_rank, is_enabled, is_main_process
logger = logging.getLogger("STORM")
class SmoothedValue:
"""Tracks a series of values and computes smoothed statistics (e.g., median, average)."""
def __init__(self, window_size=20, fmt=None):
self.deque = deque(maxlen=window_size)
self.total = 0.0
self.count = 0
self.fmt = fmt or "{median:.4f} ({global_avg:.4f})"
def update(self, value, num=1):
"""Updates the tracked values with a new value."""
self.deque.append(value)
self.total += value * num
self.count += num
def synchronize_between_processes(self):
"""Synchronizes the metric values across distributed processes."""
if not is_enabled():
return
t = torch.tensor([self.count, self.total], dtype=torch.float64, device="cuda")
dist.barrier()
dist.all_reduce(t)
self.count, self.total = int(t[0].item()), t[1].item()
@property
def median(self):
"""Computes the median of the tracked values."""
return torch.tensor(list(self.deque)).median().item()
@property
def avg(self):
"""Computes the average of the tracked values."""
return torch.tensor(list(self.deque), dtype=torch.float32).mean().item()
@property
def global_avg(self):
"""Computes the global average across all processes."""
return self.total / self.count
@property
def max(self):
"""Returns the maximum tracked value."""
return max(self.deque)
@property
def value(self):
"""Returns the most recent value."""
return self.deque[-1]
def __str__(self):
"""Formats the smoothed statistics as a string."""
return self.fmt.format(
median=self.median,
avg=self.avg,
global_avg=self.global_avg,
max=self.max,
value=self.value,
)
class MetricLogger:
def __init__(self, delimiter="\t", output_file=None):
self.meters = defaultdict(SmoothedValue)
self.delimiter = delimiter
self.output_file = output_file
def update(self, **kwargs):
for k, v in kwargs.items():
if v is None:
continue
if isinstance(v, torch.Tensor):
v = v.item()
assert isinstance(v, (float, int))
self.meters[k].update(v)
def __getattr__(self, attr):
if attr in self.meters:
return self.meters[attr]
if attr in self.__dict__:
return self.__dict__[attr]
raise AttributeError("'{}' object has no attribute '{}'".format(type(self).__name__, attr))
def __str__(self):
loss_str = []
for name, meter in self.meters.items():
loss_str.append("{}: {}".format(name, str(meter)))
return self.delimiter.join(loss_str)
def synchronize_between_processes(self):
for meter in self.meters.values():
meter.synchronize_between_processes()
def add_meter(self, name, meter):
self.meters[name] = meter
def dump_in_output_file(self, iteration, iter_time, data_time):
if self.output_file is None or not is_main_process():
return
dict_to_dump = dict(
iteration=iteration,
iter_time=iter_time,
data_time=data_time,
)
dict_to_dump.update({k: v.median for k, v in self.meters.items()})
with open(self.output_file, "a") as f:
f.write(json.dumps(dict_to_dump) + "\n")
def log_every(self, iterable, print_freq, header=None, n_iterations=None, start_iteration=0):
i = start_iteration
if not header:
header = ""
start_time = time.time()
end = time.time()
iter_time, data_time = SmoothedValue(fmt="{avg:.4f}"), SmoothedValue(fmt="{avg:.4f}")
n_iterations = n_iterations or len(iterable)
space_fmt = ":" + str(len(str(n_iterations))) + "d"
log_list = [
header,
"[{0" + space_fmt + "}/{1}]",
"eta: {eta}",
"elapsed: {elapsed_time_str}",
"{meters}",
"time: {time}",
"data: {data}",
]
if torch.cuda.is_available():
log_list += ["max mem: {memory:.0f}"]
log_msg = self.delimiter.join(log_list)
MB = 1024.0 * 1024.0
for obj in iterable:
data_time.update(time.time() - end)
yield obj
iter_time.update(time.time() - end)
if i % print_freq == 0 or i == n_iterations - 1:
self.dump_in_output_file(
iteration=i, iter_time=iter_time.avg, data_time=data_time.avg
)
eta_seconds = iter_time.global_avg * (n_iterations - i)
eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))
elapsed_time = time.time() - start_time
elapsed_time_str = str(datetime.timedelta(seconds=int(elapsed_time)))
if torch.cuda.is_available():
logger.info(
log_msg.format(
i,
n_iterations,
eta=eta_string,
elapsed_time_str=elapsed_time_str,
meters=str(self),
time=str(iter_time),
data=str(data_time),
memory=torch.cuda.max_memory_allocated() / MB,
)
)
else:
logger.info(
log_msg.format(
i,
n_iterations,
eta=eta_string,
meters=str(self),
time=str(iter_time),
data=str(data_time),
)
)
i += 1
end = time.time()
if i >= n_iterations:
break
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
logger.info(
"{} Total time: {} ({:.6f} s / it)".format(
header, total_time_str, total_time / n_iterations
)
)
class WandbLogger:
def __init__(self, args, resume="must", id=None):
if id is None:
resume = "allow" if resume else "never"
wandb.init(
config=args,
entity=args.entity,
project=args.project,
name=args.exp_name,
dir=args.log_dir,
resume=resume,
id=id,
)
self.run_id = wandb.run.id
self.step = 0
wandb.run.save()
@staticmethod
def wandb_safe_log(*args, **kwargs):
"""Safely logs metrics to Wandb, handling errors."""
try:
wandb.log(*args, **kwargs)
except (wandb.CommError, BrokenPipeError):
logger.error("Wandb logging failed, skipping...")
def set_step(self, step=None):
"""Sets the logging step."""
self.step = step if step is not None else self.step + 1
def update(self, metrics):
"""Updates metrics in Weights & Biases."""
log_dict = {
k: (v.item() if isinstance(v, torch.Tensor) else v)
for k, v in metrics.items()
if v is not None
}
self.wandb_safe_log(log_dict, step=self.step)
def flush(self):
pass
def finish(self):
try:
wandb.finish()
except (wandb.CommError, BrokenPipeError):
logger.error("Wandb failed to finish")
@functools.lru_cache()
def _configure_logger(
name: Optional[str] = None,
*,
level: int = logging.DEBUG,
output: Optional[str] = None,
time_string: Optional[str] = None,
rank0_log: bool = True,
) -> logging.Logger:
"""
Configure a logger with optional file and console outputs.
Args:
name: Name of the logger to configure. Defaults to the root logger.
level: Logging level (e.g., DEBUG, INFO). Default is DEBUG.
output: Path to save logs. If None, logs are not saved.
- If it ends with ".txt" or ".log", treated as a file name.
- Otherwise, logs are saved to `output/logs/log.txt`.
time_string: Timestamp string to append to log filenames.
Returns:
A configured logger instance.
"""
logger = logging.getLogger(name)
logger.setLevel(level)
logger.propagate = False
# Loosely match Google glog format:
# [IWEF]yyyymmdd hh:mm:ss.uuuuuu threadid file:line] msg
# but use a shorter timestamp and include the logger name:
# [IWEF]yyyymmdd hh:mm:ss logger threadid file:line] msg
fmt_prefix = "%(levelname).1s%(asctime)s %(name)s %(filename)s:%(lineno)s] "
fmt_message = "%(message)s"
fmt = fmt_prefix + fmt_message
datefmt = "%Y%m%d %H:%M:%S"
formatter = logging.Formatter(fmt=fmt, datefmt=datefmt)
# stdout logging for main worker only
if is_main_process():
handler = logging.StreamHandler(stream=sys.stdout)
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
# File logging
# Only create a file handler if:
# - There is an output path, AND
# - (We are rank 0 or rank0_log=False)
if output and (is_main_process() or not rank0_log):
if os.path.splitext(output)[-1] in (".txt", ".log"):
filename = output
else:
if time_string is None:
filename = os.path.join(output, "logs", "log.txt")
else:
filename = os.path.join(output, "logs", f"log_{time_string}.txt")
# If it's not rank 0 but rank0_log=False, append the rank ID
if not is_main_process() and not rank0_log:
global_rank = get_global_rank()
filename = f"{filename}.rank{global_rank}"
os.makedirs(os.path.dirname(filename), exist_ok=True)
handler = logging.StreamHandler(open(filename, "a"))
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def setup_logging(
output: Optional[str] = None,
*,
name: Optional[str] = None,
level: int = logging.DEBUG,
capture_warnings: bool = True,
time_string: Optional[str] = None,
) -> None:
"""
Setup logging with optional console and file handlers.
Args:
output: Path to save log files. If None, logs are not saved.
- If it ends with ".txt" or ".log", treated as a file name.
- Otherwise, logs are saved to `output/logs/log.txt`.
name: Name of the logger to configure. Defaults to the root logger.
level: Logging level (e.g., DEBUG, INFO). Default is DEBUG.
capture_warnings: Whether Python warnings should be captured as logs.
time_string: Timestamp string to append to log filenames.
"""
logging.captureWarnings(capture_warnings)
_configure_logger(name, level=level, output=output, time_string=time_string)
================================================
FILE: storm/utils/losses.py
================================================
import torch
import torch.nn.functional as F
from einops import rearrange
from storm.dataset.constants import MEAN, STD
def compute_depth_loss(pred_depth, gt_depth, max_depth=None):
pred_depth = pred_depth.squeeze()
gt_depth = gt_depth.squeeze()
if pred_depth.shape != gt_depth.shape:
# resize pred_depth to match gt depth size
try:
b, v, h, w = pred_depth.shape
gt_h, gt_w = gt_depth.shape[-2:]
pred_depth = F.interpolate(
rearrange(pred_depth, "b v h w -> (b v) 1 h w"),
size=(gt_h, gt_w),
mode="bilinear",
align_corners=False,
)
pred_depth = rearrange(pred_depth, "(b v) 1 h w -> b v h w", b=b, v=v)
except:
b, t, v, h, w = pred_depth.shape
gt_h, gt_w = gt_depth.shape[-2:]
pred_depth = F.interpolate(
rearrange(pred_depth, "b t v h w -> (b t v) 1 h w"),
size=(gt_h, gt_w),
mode="bilinear",
align_corners=False,
)
pred_depth = rearrange(pred_depth, "(b t v) 1 h w -> b t v h w", b=b, t=t, v=v)
valid_mask = gt_depth > 0.01
if max_depth is None:
max_depth = gt_depth.max()
pred_depth = pred_depth[valid_mask] / max_depth
gt_depth = gt_depth[valid_mask] / max_depth
return F.l1_loss(pred_depth, gt_depth)
def compute_sky_depth_loss(pred_depth, gt_sky_mask, sky_depth: float = 1e3, flow=None):
pred_depth = pred_depth.squeeze()
gt_sky_mask = gt_sky_mask.squeeze()
gt_h, gt_w = gt_sky_mask.shape[-2:]
if pred_depth.shape != gt_sky_mask.shape:
# resize pred_depth to match gt depth size
b, t, v, h, w = pred_depth.shape
pred_depth = F.interpolate(
rearrange(pred_depth, "b t v h w -> (b t v) 1 h w"),
size=(gt_h, gt_w),
mode="bilinear",
align_corners=False,
)
pred_depth = rearrange(pred_depth, "(b t v) 1 h w -> b t v h w", b=b, t=t, v=v)
if flow is not None and (flow.shape[-3] != gt_h or flow.shape[-2] != gt_w):
flow = F.interpolate(
rearrange(flow, "b t v h w c -> (b t v) c h w"),
size=(gt_h, gt_w),
mode="bilinear",
align_corners=False,
)
flow = rearrange(flow, "(b t v) c h w -> b t v h w c", b=b, t=t, v=v)
# penalize flow in sky region
sky_flow = flow[gt_sky_mask > 0]
sky_flow_reg_loss = F.mse_loss(sky_flow, torch.zeros_like(sky_flow))
else:
sky_flow_reg_loss = torch.tensor(0.0).to(pred_depth.device)
sky_region = gt_sky_mask > 0
pred_depth = pred_depth[sky_region]
return (
F.mse_loss(pred_depth / sky_depth, torch.ones_like(pred_depth)) * 0.01,
sky_flow_reg_loss,
)
def compute_loss(output_dict, target_dict, args=None, lpips_loss=None):
gs_params, pred_dict = output_dict["gs_params"], output_dict["render_results"]
device = pred_dict[pred_dict["rgb_key"]].device
mean, std = torch.tensor(MEAN).to(device), torch.tensor(STD).to(device)
pred_rgb = pred_dict[pred_dict["rgb_key"]] * std + mean
target_rgb = rearrange(target_dict["target_image"], "b t v c h w -> b t v h w c") * std + mean
if lpips_loss is not None:
loss_dict = lpips_loss(pred_rgb, target_rgb)
else:
rgb_loss = F.mse_loss(pred_rgb, target_rgb)
loss_dict = {"rgb_loss": rgb_loss}
if args.enable_depth_loss and "target_depth" in target_dict:
pred_depth, target_depth = pred_dict[pred_dict["depth_key"]], target_dict["target_depth"]
depth_loss = compute_depth_loss(pred_depth, target_depth)
loss_dict["depth_loss"] = depth_loss
if pred_dict["decoder_depth_key"] is not None:
pred_decoder_depth = pred_dict[pred_dict["decoder_depth_key"]]
decoded_depth_loss = compute_depth_loss(pred_decoder_depth, target_depth)
loss_dict["decoded_depth_loss"] = decoded_depth_loss
if (
args.enable_sky_depth_loss or args.enable_sky_opacity_loss
) and "target_sky_masks" in target_dict:
sky_decoded_depth_loss, _ = compute_sky_depth_loss(
pred_decoder_depth,
target_dict["target_sky_masks"],
sky_depth=args.sky_depth,
)
loss_dict["sky_decodede_depth_loss"] = sky_decoded_depth_loss
if args.enable_flow_reg_loss and pred_dict["flow_key"] is not None:
pred_flow = gs_params["forward_flow"]
zero_flow = torch.zeros_like(gs_params["forward_flow"]).to(device)
forward_flow_reg = F.mse_loss(pred_flow, zero_flow, reduction="none")
loss_dict["flow_reg_loss"] = args.flow_reg_coeff * forward_flow_reg.mean()
if args.enable_sky_depth_loss and "target_sky_masks" in target_dict:
# real gaussian depth
sky_depth_loss, sky_flow_reg_loss = compute_sky_depth_loss(
pred_dict[pred_dict["depth_key"]],
target_dict["target_sky_masks"],
sky_depth=args.sky_depth,
flow=(pred_dict[pred_dict["flow_key"]] if pred_dict["flow_key"] is not None else None),
)
loss_dict["sky_depth_loss"] = sky_depth_loss
loss_dict["sky_flow_reg_loss"] = sky_flow_reg_loss
loss_dict["opacity_loss"] = 0.01 * F.mse_loss(
pred_dict[pred_dict["alpha_key"]],
torch.ones_like(pred_dict[pred_dict["alpha_key"]]),
)
if pred_dict["decoder_depth_key"] is not None:
(sky_decoded_depth_loss, sky_decoded_flow_reg_loss,) = compute_sky_depth_loss(
pred_dict[pred_dict["decoder_depth_key"]],
target_dict["target_sky_masks"],
sky_depth=args.sky_depth,
flow=(
pred_dict[pred_dict["decoder_flow_key"]]
if pred_dict["decoder_flow_key"] is not None
else None
),
)
loss_dict["sky_decodede_depth_loss"] = sky_decoded_depth_loss
loss_dict["sky_decoded_flow_reg_loss"] = sky_decoded_flow_reg_loss
elif args.enable_sky_opacity_loss and "target_sky_masks" in target_dict:
opacity = pred_dict[pred_dict["alpha_key"]].squeeze(-1)
b, t, v, h, w = opacity.shape
gt_h, gt_w = target_dict["target_sky_masks"].shape[-2:]
if h != gt_h or w != gt_w:
opacity = F.interpolate(
rearrange(opacity, "b t v h w -> (b t v) 1 h w"),
size=(gt_h, gt_w),
mode="bilinear",
align_corners=False,
)
opacity = rearrange(opacity, "(b t v) 1 h w -> b t v h w", b=b, t=t, v=v)
sky_opacity_loss = F.l1_loss(opacity, 1 - target_dict["target_sky_masks"])
loss_dict["sky_opacity_loss"] = sky_opacity_loss * args.sky_opacity_loss_coeff
return loss_dict
def compute_scene_flow_metrics(pred, labels):
"""
Computes the scene flow metrics between the predicted and target scene flow values.
# modified from https://github.com/Lilac-Lee/Neural_Scene_Flow_Prior/blob/0e4f403c73cb3fcd5503294a7c461926a4cdd1ad/utils.py#L12
Args:
pred (Tensor): predicted scene flow values
labels (Tensor): target scene flow values
Returns:
dict: scene flow metrics
"""
l2_norm = torch.sqrt(torch.sum((pred - labels) ** 2, -1)).cpu()
# Absolute distance error.
labels_norm = torch.sqrt(torch.sum(labels * labels, -1)).cpu()
relative_err = l2_norm / (labels_norm + 1e-20)
EPE3D = torch.mean(l2_norm).item() # Mean absolute distance error
# NOTE: Acc_5
error_lt_5 = torch.BoolTensor((l2_norm < 0.05))
relative_err_lt_5 = torch.BoolTensor((relative_err < 0.05))
acc3d_strict = torch.mean((error_lt_5 | relative_err_lt_5).float()).item()
# NOTE: Acc_10
error_lt_10 = torch.BoolTensor((l2_norm < 0.1))
relative_err_lt_10 = torch.BoolTensor((relative_err < 0.1))
acc3d_relax = torch.mean((error_lt_10 | relative_err_lt_10).float()).item()
# NOTE: outliers
l2_norm_gt_3 = torch.BoolTensor(l2_norm > 0.3)
relative_err_gt_10 = torch.BoolTensor(relative_err > 0.1)
outlier = torch.mean((l2_norm_gt_3 | relative_err_gt_10).float()).item()
# NOTE: angle error
unit_label = labels / (labels.norm(dim=-1, keepdim=True) + 1e-7)
unit_pred = pred / (pred.norm(dim=-1, keepdim=True) + 1e-7)
# it doesn't make sense to compute angle error on zero vectors
# we use a threshold of 0.1 to avoid noisy gt flow
non_zero_flow_mask = labels_norm > 0.1
# Apply the mask to filter out zero vectors
unit_label = unit_label[non_zero_flow_mask]
unit_pred = unit_pred[non_zero_flow_mask]
# Initialize angle_error
angle_error = 0.0
# Check if there are any valid vectors to compute the angle error
if unit_label.numel() > 0:
eps = 1e-7
# Compute the dot product and clamp its values to avoid numerical issues with acos
dot_product = (unit_label * unit_pred).sum(dim=-1).clamp(min=-1 + eps, max=1 - eps)
# Optionally, handle any remaining NaNs in the dot product
dot_product = torch.nan_to_num(dot_product, nan=0.0)
# Compute the angle error in radians and take the mean
angle_error = torch.acos(dot_product).mean().item()
torch.cuda.empty_cache()
return {
"EPE3D": EPE3D,
"acc3d_strict": acc3d_strict,
"acc3d_relax": acc3d_relax,
"outlier": outlier,
"angle_error": angle_error,
}
================================================
FILE: storm/utils/lpips.py
================================================
"""Stripped version of https://github.com/richzhang/PerceptualSimilarity/tree/master/models"""
import hashlib
import os
from collections import namedtuple
import requests
import torch
import torch.nn as nn
from torchvision import models
from tqdm import tqdm
URL_MAP = {"vgg_lpips": "https://heibox.uni-heidelberg.de/f/607503859c864bc1b30b/?dl=1"}
CKPT_MAP = {"vgg_lpips": "vgg.pth"}
MD5_MAP = {"vgg_lpips": "d507d7349b931f0638a25a48a722f98a"}
def download(url, local_path, chunk_size=1024):
os.makedirs(os.path.split(local_path)[0], exist_ok=True)
with requests.get(url, stream=True) as r:
total_size = int(r.headers.get("content-length", 0))
with tqdm(total=total_size, unit="B", unit_scale=True) as pbar:
with open(local_path, "wb") as f:
for data in r.iter_content(chunk_size=chunk_size):
if data:
f.write(data)
pbar.update(chunk_size)
def md5_hash(path):
with open(path, "rb") as f:
content = f.read()
return hashlib.md5(content).hexdigest()
def get_ckpt_path(name, root, check=False):
assert name in URL_MAP
path = os.path.join(root, CKPT_MAP[name])
if not os.path.exists(path) or (check and not md5_hash(path) == MD5_MAP[name]):
print("Downloading {} model from {} to {}".format(name, URL_MAP[name], path))
download(URL_MAP[name], path)
md5 = md5_hash(path)
assert md5 == MD5_MAP[name], md5
return path
class LPIPS(nn.Module):
# Learned perceptual metric
def __init__(self, use_dropout=True):
super().__init__()
self.scaling_layer = ScalingLayer()
self.chns = [64, 128, 256, 512, 512] # vg16 features
self.net = vgg16(pretrained=True, requires_grad=False)
self.lin0 = NetLinLayer(self.chns[0], use_dropout=use_dropout)
self.lin1 = NetLinLayer(self.chns[1], use_dropout=use_dropout)
self.lin2 = NetLinLayer(self.chns[2], use_dropout=use_dropout)
self.lin3 = NetLinLayer(self.chns[3], use_dropout=use_dropout)
self.lin4 = NetLinLayer(self.chns[4], use_dropout=use_dropout)
self.load_from_pretrained()
for param in self.parameters():
param.requires_grad = False
def load_from_pretrained(self, name="vgg_lpips"):
ckpt = get_ckpt_path(name, "ckpts/lpips", check=True)
self.load_state_dict(torch.load(ckpt, map_location=torch.device("cpu")), strict=False)
print("loaded pretrained LPIPS loss from {}".format(ckpt))
@classmethod
def from_pretrained(cls, name="vgg_lpips"):
if name != "vgg_lpips":
raise NotImplementedError
model = cls()
ckpt = get_ckpt_path(name)
model.load_state_dict(torch.load(ckpt, map_location=torch.device("cpu")), strict=False)
return model
def forward(self, input, target):
in0_input, in1_input = (self.scaling_layer(input), self.scaling_layer(target))
outs0, outs1 = self.net(in0_input), self.net(in1_input)
feats0, feats1, diffs = {}, {}, {}
lins = [self.lin0, self.lin1, self.lin2, self.lin3, self.lin4]
for kk in range(len(self.chns)):
feats0[kk], feats1[kk] = normalize_tensor(outs0[kk]), normalize_tensor(outs1[kk])
diffs[kk] = (feats0[kk] - feats1[kk]) ** 2
res = [
spatial_average(lins[kk].model(diffs[kk]), keepdim=True) for kk in range(len(self.chns))
]
val = res[0]
for l in range(1, len(self.chns)):
val += res[l]
return val
class ScalingLayer(nn.Module):
def __init__(self):
super(ScalingLayer, self).__init__()
self.register_buffer("shift", torch.Tensor([-0.030, -0.088, -0.188])[None, :, None, None])
self.register_buffer("scale", torch.Tensor([0.458, 0.448, 0.450])[None, :, None, None])
def forward(self, inp):
return (inp - self.shift) / self.scale
class NetLinLayer(nn.Module):
"""A single linear layer which does a 1x1 conv"""
def __init__(self, chn_in, chn_out=1, use_dropout=False):
super(NetLinLayer, self).__init__()
layers = (
[
nn.Dropout(),
]
if (use_dropout)
else []
)
layers += [
nn.Conv2d(chn_in, chn_out, 1, stride=1, padding=0, bias=False),
]
self.model = nn.Sequential(*layers)
class vgg16(torch.nn.Module):
def __init__(self, requires_grad=False, pretrained=True):
super(vgg16, self).__init__()
vgg_pretrained_features = models.vgg16(pretrained=pretrained).features
self.slice1 = torch.nn.Sequential()
self.slice2 = torch.nn.Sequential()
self.slice3 = torch.nn.Sequential()
self.slice4 = torch.nn.Sequential()
self.slice5 = torch.nn.Sequential()
self.N_slices = 5
for x in range(4):
self.slice1.add_module(str(x), vgg_pretrained_features[x])
for x in range(4, 9):
self.slice2.add_module(str(x), vgg_pretrained_features[x])
for x in range(9, 16):
self.slice3.add_module(str(x), vgg_pretrained_features[x])
for x in range(16, 23):
self.slice4.add_module(str(x), vgg_pretrained_features[x])
for x in range(23, 30):
self.slice5.add_module(str(x), vgg_pretrained_features[x])
if not requires_grad:
for param in self.parameters():
param.requires_grad = False
def forward(self, X):
h = self.slice1(X)
h_relu1_2 = h
h = self.slice2(h)
h_relu2_2 = h
h = self.slice3(h)
h_relu3_3 = h
h = self.slice4(h)
h_relu4_3 = h
h = self.slice5(h)
h_relu5_3 = h
vgg_outputs = namedtuple(
"VggOutputs", ["relu1_2", "relu2_2", "relu3_3", "relu4_3", "relu5_3"]
)
out = vgg_outputs(h_relu1_2, h_relu2_2, h_relu3_3, h_relu4_3, h_relu5_3)
return out
def normalize_tensor(x, eps=1e-8):
norm_factor = torch.sqrt(torch.sum(x**2, dim=1, keepdim=True))
return x / (norm_factor + eps)
def spatial_average(x, keepdim=True):
return x.mean([2, 3], keepdim=keepdim)
================================================
FILE: storm/utils/lpips_loss.py
================================================
import torch.nn as nn
import torch.nn.functional as F
from einops import rearrange
from .lpips import LPIPS
class RGBLpipsLoss(nn.Module):
"""
Loss module that combines RGB reconstruction loss (MSE) and optional perceptual loss (LPIPS).
Args:
perceptual_weight (float): Weight for the perceptual loss.
use_perceptual_loss (bool): Flag to determine whether perceptual loss is used.
enable_perceptual_loss (bool): Initial state of perceptual loss usage.
"""
def __init__(
self,
perceptual_weight=0.5,
use_perceptual_loss=True,
enable_perceptual_loss=True,
):
super().__init__()
# Initialize the perceptual loss (LPIPS) if enabled
if enable_perceptual_loss:
self.perceptual_loss = LPIPS().eval()
for param in self.perceptual_loss.parameters():
param.requires_grad = False
self.perceptual_weight = perceptual_weight
self.use_perceptual_loss = use_perceptual_loss
self.enable_perceptual_loss = enable_perceptual_loss
def set_perceptual_loss(self, enable=True):
"""
Enable or disable the perceptual loss.
Args:
enable (bool): Whether to enable perceptual loss.
"""
self.enable_perceptual_loss = enable and self.use_perceptual_loss
def forward(self, rgb, targets):
"""
Compute the RGB reconstruction loss and (optionally) perceptual loss.
Args:
rgb (Tensor): Predicted RGB values with shape (..., H, W, C).
targets (Tensor): Ground truth RGB values with shape (..., H, W, C).
Returns:
dict: Dictionary containing 'rgb_loss' and optionally 'perceptual_loss'.
"""
# Rearrange input tensors to the format (batch, channels, height, width)
rgb = rearrange(rgb, "... h w c -> (...) c h w")
targets = rearrange(targets, "... h w c -> (...) c h w")
rgb_loss = F.mse_loss(rgb, targets)
loss_dict = {"rgb_loss": rgb_loss}
if self.enable_perceptual_loss:
perceptual_loss = self.perceptual_weight * self.perceptual_loss(rgb, targets)
loss_dict["perceptual_loss"] = perceptual_loss.mean()
return loss_dict
================================================
FILE: storm/utils/misc.py
================================================
import collections.abc
import datetime
import logging
import math
import os
import random
from collections import OrderedDict
from glob import glob
from itertools import repeat
import numpy as np
import torch
from torch import inf
logger = logging.getLogger("STORM")
def fix_random_seeds(seed=31):
torch.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
np.random.seed(seed)
random.seed(seed)
def _ntuple(n):
"""
Creates a parser that converts an input to a tuple of length n.
Args:
n (int): Length of the tuple.
Returns:
Callable: A function that parses the input into a tuple of length n.
"""
def parse(x):
if isinstance(x, collections.abc.Iterable) and not isinstance(x, str):
return tuple(x)
return tuple(repeat(x, n))
return parse
to_2tuple = _ntuple(2)
def cleanup_checkpoints(ckpt_dir, keep_num=1):
"""
Clean up old checkpoints, keeping only the latest 'keep_num' checkpoints.
Args:
ckpt_dir (str): Directory containing the checkpoints.
keep_num (int): Number of recent checkpoints to keep.
"""
ckpts = glob(f"{ckpt_dir}/*.pth")
ckpts = [ckpt for ckpt in ckpts if "latest" not in ckpt and "best" not in ckpt]
ckpts = sorted(ckpts, key=lambda x: int(x.split("_")[-1].split(".")[0]))
# Remove older checkpoints
for ckpt in ckpts[:-keep_num]:
os.remove(ckpt)
logger.info(f"Removed checkpoint: {ckpt}")
# Create or update latest symlink
if ckpts:
latest_symlink = f"{ckpt_dir}/latest.pth"
try:
os.remove(latest_symlink)
except FileNotFoundError:
pass
os.symlink(os.path.abspath(ckpts[-1]), latest_symlink)
logger.info(f"Created symlink: {latest_symlink} -> {ckpts[-1]}")
def load_model(args, model_without_ddp, optimizer=None, loss_scaler=None):
"""
Load model, optimizer, and loss scaler states from a checkpoint.
Args:
args: Arguments containing checkpoint paths and loading configurations.
model_without_ddp (torch.nn.Module): Model to load the state into.
optimizer (torch.optim.Optimizer, optional): Optimizer for loading states.
loss_scaler (torch.cuda.amp.GradScaler, optional): Loss scaler for AMP.
Returns:
int: Visualization slice ID if available.
"""
vis_slice_id, checkpoint_loaded = 0, False
if args.resume_from or args.auto_resume:
if not args.resume_from:
# Checkpoint not provided, auto-resume from the latest checkpoint
checkpoints = [ckpt for ckpt in glob(f"{args.ckpt_dir}/*.pth") if "latest" not in ckpt]
checkpoints = sorted(checkpoints, key=os.path.getmtime)
if len(checkpoints) > 0:
# Resume from the latest checkpoint
args.resume_from = checkpoints[-1]
if args.resume_from and os.path.exists(args.resume_from):
logger.info(f"[Model-resume] Resuming from: {args.resume_from}")
checkpoint = torch.load(args.resume_from, map_location="cpu", weights_only=False)
msg = model_without_ddp.load_state_dict(checkpoint["model"], strict=True)
logger.info(f"[Model-resume] Loaded model: {msg}")
checkpoint_loaded = True
if "optimizer" in checkpoint and "latest_step" in checkpoint and optimizer is not None:
msg = optimizer.load_state_dict(checkpoint["optimizer"])
logger.info(f"[Model-resume] Loaded optimizer: {msg}")
args.start_iteration = checkpoint["latest_step"] + 1
if "loss_scaler" in checkpoint and loss_scaler is not None:
msg = loss_scaler.load_state_dict(checkpoint["loss_scaler"])
logger.info(f"[Model-resume] Loaded loss_scaler: {msg}")
if "vis_slice_id" in checkpoint:
vis_slice_id = checkpoint["vis_slice_id"] + 1
if "latest_step" in checkpoint:
args.prev_num_iterations = checkpoint["latest_step"]
args.start_iteration = checkpoint["latest_step"] + 1
if "total_elapsed_time" in checkpoint:
args.total_elapsed_time = float(checkpoint["total_elapsed_time"])
elapsed_time_str = str(datetime.timedelta(seconds=int(args.total_elapsed_time)))
logger.info(f"Loaded elapsed_time: {elapsed_time_str}")
del checkpoint
if not checkpoint_loaded and args.load_from and os.path.exists(args.load_from):
# args.resume_from has the highest priority. If it's not found, try args.load_from
# this is useful for loading a model without optimizer and scheduler states
# or for loading a pre-trained model for initialization, fine-tuning, or evaluation.
logger.info(f"Loading checkpoint from: {args.load_from}")
checkpoint = torch.load(args.load_from)
if "model" in checkpoint:
checkpoint = checkpoint["model"]
try:
msg = model_without_ddp.load_state_dict(checkpoint, strict=False)
checkpoint_loaded = True
logger.info(f"[Model-init] Loaded model: {msg}")
except Exception as e:
logger.error(e)
logger.info(f"[Model-init] Loading model from {args.load_from} failed. Error: {e}")
model_state_dict = model_without_ddp.state_dict()
# Create a new OrderedDict that will only contain matching parameter shapes
filtered_dict = OrderedDict()
for k, v in checkpoint.items():
if k in model_state_dict:
if v.shape == model_state_dict[k].shape:
filtered_dict[k] = v
else:
logger.info(
f"Skipping parameter due to shape mismatch: {k} "
f"({v.shape} vs {model_state_dict[k].shape})"
)
else:
logger.info(f"Skipping unexpected key: {k}")
# Load the filtered state dict into the model (strict=False to allow missing keys)
msg = model_without_ddp.load_state_dict(filtered_dict, strict=False)
logger.info(f"Load status: {msg}")
del checkpoint
if not checkpoint_loaded:
logger.info(f"Training from scratch. No checkpoint found.")
return vis_slice_id
def adjust_learning_rate(optimizer, iteration, args):
"""
Adjust the learning rate using a cosine decay schedule with warmup.
Args:
optimizer (torch.optim.Optimizer): Optimizer to update learning rate.
iteration (int): Current training iteration.
args: Arguments defining the learning rate schedule.
Returns:
float: Updated learning rate.
"""
if iteration < args.warmup_iters:
lr = args.lr * iteration / args.warmup_iters
else:
if args.lr_sched == "constant":
lr = args.lr
elif args.lr_sched == "cosine":
lr = args.min_lr + (args.lr - args.min_lr) * 0.5 * (
1.0
+ math.cos(
math.pi
* (iteration - args.warmup_iters)
/ (args.num_iterations - args.warmup_iters)
)
)
else:
raise ValueError(f"Unknown lr_sched: {args.lr_sched}")
for param_group in optimizer.param_groups:
param_group["lr"] = lr * param_group.get("lr_scale", 1.0)
return lr
def unwrap_model(model):
return model.module if hasattr(model, "module") else model
def get_grad_norm_(parameters, norm_type=2.0):
"""
Compute gradient norm for a set of parameters.
Args:
parameters (Iterable): Parameters to compute gradients for.
norm_type (float): Norm type for gradient computation.
Returns:
torch.Tensor: Gradient norm.
"""
parameters = [p for p in parameters if p.grad is not None]
norm_type = float(norm_type)
if len(parameters) == 0:
return torch.tensor(0.0)
device = parameters[0].grad.device
if norm_type == inf:
total_norm = max(p.grad.detach().abs().max().to(device) for p in parameters)
else:
total_norm = torch.norm(
torch.stack([torch.norm(p.grad.detach(), norm_type).to(device) for p in parameters]),
norm_type,
)
return total_norm
class NativeScalerWithGradNormCount:
"""
A wrapper for torch.cuda.amp.GradScaler with gradient norm tracking.
Args:
enabled (bool): Whether to enable automatic mixed precision.
"""
state_dict_key = "amp_scaler"
def __init__(self, enabled=True):
self._scaler = torch.cuda.amp.GradScaler(enabled=enabled)
def __call__(
self,
loss,
optimizer,
parameters,
clip_grad=None,
create_graph=False,
update_grad=True,
):
self._scaler.scale(loss).backward(create_graph=create_graph)
norm = None
if update_grad:
self._scaler.unscale_(optimizer)
if clip_grad is not None and clip_grad > 0.0:
norm = torch.nn.utils.clip_grad_norm_(parameters, clip_grad)
else:
norm = get_grad_norm_(parameters)
self._scaler.step(optimizer)
self._scaler.update()
return norm
def state_dict(self):
"""Save state dictionary for the scaler."""
return self._scaler.state_dict()
def load_state_dict(self, state_dict):
"""Load state dictionary for the scaler."""
self._scaler.load_state_dict(state_dict)
================================================
FILE: storm/visualization/annotation.py
================================================
"""
From https://github.com/dcharatan/pixelsplat/blob/main/src/visualization/annotation.py
"""
from pathlib import Path
from string import ascii_letters, digits, punctuation
import numpy as np
import torch
from einops import rearrange
from jaxtyping import Float
from PIL import Image, ImageDraw, ImageFont
from torch import Tensor
from .layout import vcat
EXPECTED_CHARACTERS = digits + punctuation + ascii_letters
def draw_label(
text: str,
font: Path,
font_size: int,
device: torch.device = torch.device("cpu"),
) -> Float[Tensor, "3 height width"]:
"""Draw a black label on a white background with no border."""
try:
font = ImageFont.truetype(str(font), font_size)
except OSError:
font = ImageFont.load_default()
left, _, right, _ = font.getbbox(text)
width = right - left
_, top, _, bottom = font.getbbox(EXPECTED_CHARACTERS)
height = bottom - top
image = Image.new("RGB", (width, height), color="white")
draw = ImageDraw.Draw(image)
draw.text((0, 0), text, font=font, fill="black")
image = torch.tensor(np.array(image) / 255, dtype=torch.float32, device=device)
return rearrange(image, "h w c -> c h w")
def add_label(
image: Float[Tensor, "3 width height"],
label: str,
font: Path = Path("assets/Inter-Regular.otf"),
font_size: int = 24,
align: str = "center",
) -> Float[Tensor, "3 width_with_label height_with_label"]:
return vcat(
draw_label(label, font, font_size, image.device),
image,
align=align,
gap=4,
)
================================================
FILE: storm/visualization/layout.py
================================================
"""
From https://github.com/dcharatan/pixelsplat/blob/main/src/visualization/layout.py
This file contains useful layout utilities for images. They are:
- add_border: Add a border to an image.
- cat/hcat/vcat: Join images by arranging them in a line. If the images have different
sizes, they are aligned as specified (start, end, center). Allows you to specify a gap
between images.
Images are assumed to be float32 tensors with shape (channel, height, width).
"""
from typing import Any, Generator, Iterable, Literal, Optional, Union
import numpy as np
import torch
import torch.nn.functional as F
from einops import rearrange, repeat
from jaxtyping import Float, UInt8
from torch import Tensor
Alignment = Literal["start", "center", "end"]
Axis = Literal["horizontal", "vertical"]
Color = Union[
int,
float,
Iterable[int],
Iterable[float],
Float[Tensor, "#channel"],
Float[Tensor, ""],
]
def _sanitize_color(color: Color) -> Float[Tensor, "#channel"]:
# Convert tensor to list (or individual item).
if isinstance(color, torch.Tensor):
color = color.tolist()
# Turn iterators and individual items into lists.
if isinstance(color, Iterable):
color = list(color)
else:
color = [color]
return torch.tensor(color, dtype=torch.float32)
def _intersperse(iterable: Iterable, delimiter: Any) -> Generator[Any, None, None]:
it = iter(iterable)
yield next(it)
for item in it:
yield delimiter
yield item
def _get_main_dim(main_axis: Axis) -> int:
return {
"horizontal": 2,
"vertical": 1,
}[main_axis]
def _get_cross_dim(main_axis: Axis) -> int:
return {
"horizontal": 1,
"vertical": 2,
}[main_axis]
def _compute_offset(base: int, overlay: int, align: Alignment) -> slice:
assert base >= overlay
offset = {
"start": 0,
"center": (base - overlay) // 2,
"end": base - overlay,
}[align]
return slice(offset, offset + overlay)
def overlay(
base: Float[Tensor, "channel base_height base_width"],
overlay: Float[Tensor, "channel overlay_height overlay_width"],
main_axis: Axis,
main_axis_alignment: Alignment,
cross_axis_alignment: Alignment,
) -> Float[Tensor, "channel base_height base_width"]:
# The overlay must be smaller than the base.
_, base_height, base_width = base.shape
_, overlay_height, overlay_width = overlay.shape
assert base_height >= overlay_height and base_width >= overlay_width
# Compute spacing on the main dimension.
main_dim = _get_main_dim(main_axis)
main_slice = _compute_offset(base.shape[main_dim], overlay.shape[main_dim], main_axis_alignment)
# Compute spacing on the cross dimension.
cross_dim = _get_cross_dim(main_axis)
cross_slice = _compute_offset(
base.shape[cross_dim], overlay.shape[cross_dim], cross_axis_alignment
)
# Combine the slices and paste the overlay onto the base accordingly.
selector = [..., None, None]
selector[main_dim] = main_slice
selector[cross_dim] = cross_slice
result = base.clone()
result[selector] = overlay
return result
def cat(
main_axis: Axis,
*images: Iterable[Float[Tensor, "channel _ _"]],
align: Alignment = "center",
gap: int = 8,
gap_color: Color = 1,
) -> Float[Tensor, "channel height width"]:
"""Arrange images in a line. The interface resembles a CSS div with flexbox."""
device = images[0].device
gap_color = _sanitize_color(gap_color).to(device)
# Find the maximum image side length in the cross axis dimension.
cross_dim = _get_cross_dim(main_axis)
cross_axis_length = max(image.shape[cross_dim] for image in images)
# Pad the images.
padded_images = []
for image in images:
# Create an empty image with the correct size.
padded_shape = list(image.shape)
padded_shape[cross_dim] = cross_axis_length
base = torch.ones(padded_shape, dtype=torch.float32, device=device)
base = base * gap_color[:, None, None]
padded_images.append(overlay(base, image, main_axis, "start", align))
# Intersperse separators if necessary.
if gap > 0:
# Generate a separator.
c, _, _ = images[0].shape
separator_size = [gap, gap]
separator_size[cross_dim - 1] = cross_axis_length
separator = torch.ones((c, *separator_size), dtype=torch.float32, device=device)
separator = separator * gap_color[:, None, None]
# Intersperse the separator between the images.
padded_images = list(_intersperse(padded_images, separator))
return torch.cat(padded_images, dim=_get_main_dim(main_axis))
def hcat(
*images: Iterable[Float[Tensor, "channel _ _"]],
align: Literal["start", "center", "end", "top", "bottom"] = "start",
gap: int = 8,
gap_color: Color = 1,
):
"""Shorthand for a horizontal linear concatenation."""
return cat(
"horizontal",
*images,
align={
"start": "start",
"center": "center",
"end": "end",
"top": "start",
"bottom": "end",
}[align],
gap=gap,
gap_color=gap_color,
)
def vcat(
*images: Iterable[Float[Tensor, "channel _ _"]],
align: Literal["start", "center", "end", "left", "right"] = "start",
gap: int = 8,
gap_color: Color = 1,
):
"""Shorthand for a horizontal linear concatenation."""
return cat(
"vertical",
*images,
align={
"start": "start",
"center": "center",
"end": "end",
"left": "start",
"right": "end",
}[align],
gap=gap,
gap_color=gap_color,
)
def add_border(
image: Float[Tensor, "channel height width"],
border: int = 8,
color: Color = 1,
) -> Float[Tensor, "channel new_height new_width"]:
color = _sanitize_color(color).to(image)
c, h, w = image.shape
result = torch.empty(
(c, h + 2 * border, w + 2 * border), dtype=torch.float32, device=image.device
)
result[:] = color[:, None, None]
result[:, border : h + border, border : w + border] = image
return result
def resize(
image: Float[Tensor, "channel height width"],
shape: Optional[tuple[int, int]] = None,
width: Optional[int] = None,
height: Optional[int] = None,
) -> Float[Tensor, "channel new_height new_width"]:
assert (shape is not None) + (width is not None) + (height is not None) == 1
_, h, w = image.shape
if width is not None:
shape = (int(h * width / w), width)
elif height is not None:
shape = (height, int(w * height / h))
return F.interpolate(
image[None],
shape,
mode="bilinear",
align_corners=False,
antialias="bilinear",
)[0]
def prep_image(image: torch.Tensor) -> UInt8[np.ndarray, "height width channel"]:
# Handle batched images.
if image.ndim == 4:
image = rearrange(image, "b c h w -> c h (b w)")
# Handle single-channel images.
if image.ndim == 2:
image = rearrange(image, "h w -> () h w")
# Ensure that there are 3 or 4 channels.
channel, _, _ = image.shape
if channel == 1:
image = repeat(image, "() h w -> c h w", c=3)
assert image.shape[0] in (3, 4)
image = (image.detach().clip(min=0, max=1) * 255).type(torch.uint8)
return rearrange(image, "c h w -> h w c").cpu().numpy()
================================================
FILE: storm/visualization/video_maker.py
================================================
import logging
import time
import imageio
import numpy as np
import torch
import torch.nn.functional as F
from einops import rearrange
from matplotlib import cm
from storm.dataset.constants import MEAN, STD
from storm.dataset.data_utils import (
prepare_inputs_and_targets,
prepare_inputs_and_targets_novel_view,
to_batch_tensor,
)
from .annotation import add_label
from .layout import add_border, hcat, prep_image, vcat
from .visualization_tools import depth_visualizer, scene_flow_to_rgb
logger = logging.getLogger("STORM")
def get_pca_map(x):
# x: (...., c) channel last
x_shape = x.shape
x = x.view(-1, x.shape[-1])
x = x @ torch.pca_lowrank(x, q=3, niter=20)[2]
x = (x - x.min(dim=0)[0]) / (x.max(dim=0)[0] - x.min(dim=0)[0])
return x.view(*x_shape[:-1], 3)
@torch.no_grad()
def make_video(
dataset,
model,
device,
output_filename,
scene_id=None,
skip_plot_gt_depth_and_flow: bool = False,
data_dict=None,
input_dict=None,
target_dict=None,
pred_dict=None,
):
if data_dict is None:
if scene_id is None:
scene_id = np.random.randint(0, len(dataset))
data_dict = dataset.__getitem__(scene_id, np.random.randint(10, 100), return_all=True)
data_dict = to_batch_tensor(data_dict)
input_dict, target_dict = prepare_inputs_and_targets(data_dict, device)
model = model.eval()
if pred_dict is None:
with torch.autocast(device_type=device.type, dtype=torch.bfloat16):
start_time = time.perf_counter()
gs_params = model.get_gs_params(input_dict)
end_time = time.perf_counter()
logger.info(f"Time taken to get gs_params: {end_time - start_time} seconds")
start_time = time.perf_counter()
pred_dict = model.from_gs_params_to_output(gs_params, input_dict)
end_time = time.perf_counter()
logger.info(f"Time taken to get rendered results: {end_time - start_time} seconds")
# pred_dict = model(input_dict)
B, context_t, context_v, _, H, W = input_dict["context_image"].shape
_, target_t, target_v, _, H_tgt, W_tgt = target_dict["target_image"].shape
device = input_dict["context_image"].device
mean = torch.tensor([[MEAN]], device=device)
std = torch.tensor([[STD]], device=device)
def denormalize(x, already_channel_last=False):
if not already_channel_last:
x = rearrange(x, "t v c h w -> t v h w c")
x = (x * std + mean).clamp(0.0, 1.0)
return rearrange(x, "t v h w c -> t v c h w")
# t, v, c, h, w
context_images = input_dict["context_image"][0]
context_images = denormalize(context_images)
if context_v <= 3:
n_ctx_per_row = 2
else:
n_ctx_per_row = 1
# concate context images horizontally
context_frames = []
for t in range(context_t):
current_frame_idx = int(input_dict["context_frame_idx"][0][t].item())
row = add_label(
hcat(*[context_images[t][v_id] for v_id in range(context_v)]),
f"Context RGB (t={current_frame_idx})",
font_size=24,
align="center",
)
context_frames.append(row)
num_rows = max(1, len(context_frames) // n_ctx_per_row)
context_frames = vcat(
*[
hcat(
*context_frames[row * n_ctx_per_row : (row + 1) * n_ctx_per_row],
gap=24,
)
for row in range(num_rows)
]
)
target_images = target_dict["target_image"][0]
target_images = denormalize(target_images)
render_results = pred_dict["render_results"]
pred_images = render_results[render_results["rgb_key"]][0]
pred_images = denormalize(pred_images, already_channel_last=True)
if "rendered_motion_seg" in render_results:
# Get the max index (clusters) from the rendered results
max_idx = render_results["rendered_motion_seg"][0]
# Identify unique clusters
unique_clusters = torch.unique(max_idx)
try:
velocities = pred_dict["gs_params"]["motion_bases"][0][unique_clusters]
except:
velocities = pred_dict["gs_params"]["motion_bases"][0].mean(dim=0)[unique_clusters]
velocity_norm = torch.norm(velocities, dim=-1)
# Sort the unique clusters according to velocity norm (lowest first)
sorted_indices = torch.argsort(velocity_norm)
sorted_clusters = unique_clusters[sorted_indices]
# Number of unique clusters
num_unique_clusters = len(unique_clusters)
# Create a new colormap based on the unique clusters
cmap = cm.get_cmap("rainbow", num_unique_clusters)
# Map sorted unique clusters to new colors
cluster_to_color_map = torch.tensor([cmap(i) for i in range(num_unique_clusters)])[
:, :3
].to(max_idx.device)
# Create a mapping from original clusters to the reassigned clusters
cluster_mapping = torch.zeros_like(max_idx)
# Map each pixel in max_idx to the new cluster index based on sorted clusters
for new_cluster_idx, original_cluster in enumerate(sorted_clusters):
cluster_mapping[max_idx == original_cluster] = new_cluster_idx
# Assign the new colors to the cluster image
cluster_image = cluster_to_color_map[cluster_mapping]
if cluster_image.shape[-3] != H_tgt or cluster_image.shape[-2] != W_tgt:
cluster_image = F.interpolate(
rearrange(cluster_image, "t v h w c -> (t v) c h w"),
size=(H_tgt, W_tgt),
mode="nearest",
)
cluster_image = rearrange(
cluster_image, "(t v) c h w -> t v h w c", t=target_t, v=target_v
)
else:
cluster_image = None
video_frames = []
for t in range(target_t):
frame_list = []
current_frame_idx = int(target_dict["target_frame_idx"][0][t].item())
pred_rgb = add_label(
hcat(*[pred_images[t][v_id] for v_id in range(target_v)]),
f"Predicted RGB (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(pred_rgb)
gt_rgb = add_label(
hcat(*[target_images[t][v_id] for v_id in range(target_v)]),
f"Target GT RGB (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(gt_rgb)
if render_results["decoder_depth_key"] is not None:
# this is a decoder depth map
depth_image = render_results[render_results["decoder_depth_key"]][0][t]
alpha_image = None
depth_image = depth_image.detach().cpu().numpy()
depth_image = depth_visualizer(depth_image, alpha_image)
depth_image = torch.from_numpy(depth_image)
depth_image = rearrange(depth_image, "v h w c -> v c h w")
pred_depth = add_label(
hcat(*depth_image),
f"Predicted Decoder Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(pred_depth)
if render_results["depth_key"] is not None:
# actual gs depth map
depth_image = render_results[render_results["depth_key"]][0][t]
alpha_image = render_results[render_results["alpha_key"]][0][t]
if depth_image.shape[-2] != H_tgt or depth_image.shape[-1] != W_tgt:
depth_image = F.interpolate(
depth_image.unsqueeze(-3),
size=(H_tgt, W_tgt),
mode="bilinear",
align_corners=False,
).squeeze(-3)
alpha_image = F.interpolate(
alpha_image.unsqueeze(-3),
size=(H_tgt, W_tgt),
mode="bilinear",
align_corners=False,
).squeeze(-3)
depth_image = depth_image.detach().cpu().numpy()
alpha_image = alpha_image.detach().cpu().numpy()
depth_image = depth_visualizer(depth_image, alpha_image)
depth_image = torch.from_numpy(depth_image)
depth_image = rearrange(depth_image, "v h w c -> v c h w")
pred_depth = add_label(
hcat(*depth_image),
f"Predicted Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(pred_depth)
if "target_depth" in target_dict.keys():
gt_depth = target_dict["target_depth"][0][t]
gt_depth = gt_depth.detach().cpu().numpy()
gt_depth = depth_visualizer(gt_depth, gt_depth > 0)
gt_depth = torch.from_numpy(gt_depth)
gt_depth = rearrange(gt_depth, "v h w c -> v c h w")
gt_depth = add_label(
hcat(*gt_depth),
f"Target GT Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(gt_depth)
else:
if not skip_plot_gt_depth_and_flow:
gt_depth = torch.full((target_v, 3, H_tgt, W_tgt), 0.5)
gt_depth = add_label(
hcat(*gt_depth),
f"Target GT Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(gt_depth)
if render_results["flow_key"] is not None:
flow_image = render_results[render_results["flow_key"]][0][t]
flow_image = scene_flow_to_rgb(flow_image, flow_max_radius=15)
flow_image = rearrange(flow_image, "v h w c -> v c h w")
if flow_image.shape[-2] != H_tgt or flow_image.shape[-1] != W_tgt:
flow_image = F.interpolate(
flow_image,
size=(H_tgt, W_tgt),
mode="bilinear",
align_corners=False,
)
flow_image = add_label(
hcat(*flow_image),
f"Predicted Flow (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(flow_image)
if "target_flow" in target_dict.keys():
gt_flow = target_dict["target_flow"][0][t]
gt_flow = scene_flow_to_rgb(gt_flow, flow_max_radius=15)
gt_flow = rearrange(gt_flow, "v h w c -> v c h w")
gt_flow = add_label(
hcat(*gt_flow),
f"Target GT Flow (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(gt_flow)
if render_results["depth_key"] is not None:
alpha_image = torch.from_numpy(alpha_image).unsqueeze(1)
alpha_image = alpha_image.repeat(1, 3, 1, 1)
alpha_image = add_label(
hcat(*alpha_image),
f"Predicted Opacity (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(alpha_image)
if "target_sky_masks" in target_dict.keys():
sky_mask = target_dict["target_sky_masks"][0][t].unsqueeze(1)
sky_mask = sky_mask.repeat(1, 3, 1, 1)
sky_mask = add_label(
hcat(*sky_mask),
f"GT Sky Mask (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(sky_mask)
if cluster_image is not None:
cluster_image_t = cluster_image[t]
cluster_image_t = rearrange(cluster_image_t, "v h w c -> v c h w")
cluster_image_t = add_label(
hcat(*cluster_image_t),
f"Motion Segmentation (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(cluster_image_t)
num_rows = len(frame_list) // n_ctx_per_row
frame = vcat(
context_frames,
vcat(
*[
hcat(
*frame_list[row * n_ctx_per_row : (row + 1) * n_ctx_per_row],
gap=24,
)
for row in range(num_rows)
]
),
)
# if there's a residual, we add it to the end
if len(frame_list) % n_ctx_per_row != 0:
frame = vcat(
frame,
hcat(
*frame_list[num_rows * n_ctx_per_row :],
gap=24,
),
)
frame = add_border(
add_label(
frame,
f"Scene{input_dict['scene_id']:03d}-{input_dict['scene_name'][:15]}",
font_size=24,
align="center",
)
)
video_frames.append(prep_image(frame))
video_frame_reversed = video_frames[::-1][1:-1]
video_frames.extend(video_frame_reversed)
imageio.mimsave(output_filename, video_frames, fps=data_dict["fps"])
@torch.no_grad()
def make_video_vis(
dataset,
model,
device,
output_filename,
scene_id=None,
data_dict=None,
args=None,
input_dict=None,
target_dict=None,
pred_dict=None,
time_step=10,
):
data_dict = dataset.__getitem__(scene_id, time_step, return_all=True)
data_dict = to_batch_tensor(data_dict)
input_dict, target_dict = prepare_inputs_and_targets(
# input_dict, target_dict = prepare_inputs_and_targets_novel_view(
data_dict,
device,
)
model = model.eval()
pred_dict = model(input_dict)
B, context_t, context_v, _, H, W = input_dict["context_image"].shape
_, target_t, target_v, _, H_tgt, W_tgt = target_dict["target_image"].shape
device = input_dict["context_image"].device
mean = torch.tensor([[MEAN]], device=device)
std = torch.tensor([[STD]], device=device)
def denormalize(x, already_channel_last=False):
if not already_channel_last:
x = rearrange(x, "t v c h w -> t v h w c")
x = (x * std + mean).clamp(0.0, 1.0)
return rearrange(x, "t v h w c -> t v c h w")
# t, v, c, h, w
context_images = input_dict["context_image"][0]
context_images = denormalize(context_images)
if context_v <= 3:
n_ctx_per_row = 2
else:
n_ctx_per_row = 1
# concate context images horizontally
context_frames = []
for t in range(context_t):
current_frame_idx = int(input_dict["context_frame_idx"][0][t].item())
row = add_label(
hcat(*[context_images[t][v_id] for v_id in range(context_v)]),
f"Context RGB (t={current_frame_idx})",
font_size=24,
align="center",
)
context_frames.append(row)
num_rows = max(1, len(context_frames) // n_ctx_per_row)
context_frames = vcat(
*[
hcat(
*context_frames[row * n_ctx_per_row : (row + 1) * n_ctx_per_row],
gap=24,
)
for row in range(num_rows)
]
)
target_images = target_dict["target_image"][0]
target_images = denormalize(target_images)
render_results = pred_dict["render_results"]
pred_images = render_results[render_results["rgb_key"]][0]
pred_images = denormalize(pred_images, already_channel_last=True)
if "rendered_motion_seg" in render_results:
# Get the max index (clusters) from the rendered results
max_idx = render_results["rendered_motion_seg"][0]
# Identify unique clusters
unique_clusters = torch.unique(max_idx)
velocities = pred_dict["gs_params"]["motion_bases"][0][unique_clusters]
velocity_norm = torch.norm(velocities, dim=-1)
# Sort the unique clusters according to velocity norm (lowest first)
sorted_indices = torch.argsort(velocity_norm)
sorted_clusters = unique_clusters[sorted_indices]
# Number of unique clusters
num_unique_clusters = len(sorted_clusters)
# Create a new colormap based on the sorted unique clusters
cmap = cm.get_cmap("rainbow", num_unique_clusters)
# Map sorted unique clusters to new colors
cluster_to_color_map = torch.tensor([cmap(i) for i in range(num_unique_clusters)])[
:, :3
].to(max_idx.device)
# Create a mapping from original clusters to the reassigned clusters
cluster_mapping = torch.zeros_like(max_idx)
# Map each pixel in max_idx to the new cluster index based on sorted clusters
for new_cluster_idx, original_cluster in enumerate(sorted_clusters):
cluster_mapping[max_idx == original_cluster] = new_cluster_idx
# Assign the new colors to the cluster image
cluster_image = cluster_to_color_map[cluster_mapping]
if cluster_image.shape[-3] != H_tgt or cluster_image.shape[-2] != W_tgt:
cluster_image = F.interpolate(
rearrange(cluster_image, "t v h w c -> (t v) c h w"),
size=(H_tgt, W_tgt),
mode="nearest",
)
cluster_image = rearrange(
cluster_image, "(t v) c h w -> t v h w c", t=target_t, v=target_v
)
else:
cluster_image = None
video_frames = []
for t in range(target_t):
frame_list = []
current_frame_idx = int(target_dict["target_frame_idx"][0][t].item())
pred_rgb = add_label(
hcat(*[pred_images[t][v_id] for v_id in range(target_v)]),
f"Predicted RGB (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(pred_rgb)
gt_rgb = add_label(
hcat(*[target_images[t][v_id] for v_id in range(target_v)]),
f"Target GT RGB (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(gt_rgb)
if render_results["decoder_depth_key"] is not None:
# this is a decoder depth map
depth_image = render_results[render_results["decoder_depth_key"]][0][t]
alpha_image = None
depth_image = depth_image.detach().cpu().numpy()
depth_image = depth_visualizer(depth_image, alpha_image)
depth_image = torch.from_numpy(depth_image)
depth_image = rearrange(depth_image, "v h w c -> v c h w")
pred_depth = add_label(
hcat(*depth_image),
f"Predicted Decoder Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(pred_depth)
if render_results["depth_key"] is not None:
# actual gs depth map
depth_image = render_results[render_results["depth_key"]][0][t]
alpha_image = render_results[render_results["alpha_key"]][0][t]
if depth_image.shape[-2] != H_tgt or depth_image.shape[-1] != W_tgt:
depth_image = F.interpolate(
depth_image.unsqueeze(-3),
size=(H_tgt, W_tgt),
mode="bilinear",
align_corners=False,
).squeeze(-3)
alpha_image = F.interpolate(
alpha_image.unsqueeze(-3),
size=(H_tgt, W_tgt),
mode="bilinear",
align_corners=False,
).squeeze(-3)
depth_image = depth_image.detach().cpu().numpy()
alpha_image = alpha_image.detach().cpu().numpy()
depth_image = depth_visualizer(depth_image, alpha_image)
depth_image = torch.from_numpy(depth_image)
depth_image = rearrange(depth_image, "v h w c -> v c h w")
pred_depth = add_label(
hcat(*depth_image),
f"Predicted Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(pred_depth)
if render_results["depth_key"] is not None:
alpha_image = torch.from_numpy(alpha_image).unsqueeze(1)
alpha_image = alpha_image.repeat(1, 3, 1, 1)
alpha_image = add_label(
hcat(*alpha_image),
f"Predicted Opacity (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(alpha_image)
if render_results["flow_key"] is not None:
flow_image = render_results[render_results["flow_key"]][0][t]
flow_image = scene_flow_to_rgb(flow_image, flow_max_radius=15)
flow_image = rearrange(flow_image, "v h w c -> v c h w")
if flow_image.shape[-2] != H_tgt or flow_image.shape[-1] != W_tgt:
flow_image = F.interpolate(
flow_image,
size=(H_tgt, W_tgt),
mode="bilinear",
align_corners=False,
)
flow_image = add_label(
hcat(*flow_image),
f"Predicted Flow (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(flow_image)
if "target_flow" in target_dict.keys():
gt_flow = target_dict["target_flow"][0][t]
gt_flow = scene_flow_to_rgb(gt_flow, flow_max_radius=15)
gt_flow = rearrange(gt_flow, "v h w c -> v c h w")
gt_flow = add_label(
hcat(*gt_flow),
f"GT Flow (t={current_frame_idx}) (Not used as supervision)",
font_size=24,
align="center",
)
frame_list.append(gt_flow)
if cluster_image is not None:
cluster_image_t = cluster_image[t]
cluster_image_t = rearrange(cluster_image_t, "v h w c -> v c h w")
cluster_image_t = add_label(
hcat(*cluster_image_t),
f"Motion Segmentation (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(cluster_image_t)
num_rows = len(frame_list) // n_ctx_per_row
frame = vcat(
context_frames,
vcat(
*[
hcat(
*frame_list[row * n_ctx_per_row : (row + 1) * n_ctx_per_row],
gap=24,
)
for row in range(num_rows)
]
),
)
# if there's a residual, we add it to the end
if len(frame_list) % n_ctx_per_row != 0:
frame = vcat(
frame,
hcat(
*frame_list[num_rows * n_ctx_per_row :],
gap=24,
),
)
frame = add_border(
add_label(
frame,
f"Scene{input_dict['scene_id']:03d}-{input_dict['scene_name'][:15]}",
font_size=24,
align="center",
)
)
video_frames.append(prep_image(frame))
video_frame_reversed = video_frames[::-1][1:-1]
video_frames.extend(video_frame_reversed)
imageio.mimsave(output_filename, video_frames, fps=data_dict["fps"])
@torch.no_grad()
def make_video_av2(
dataset,
model,
device,
output_filename,
scene_id=None,
skip_plot_gt_depth_and_flow: bool = False,
):
if scene_id is None:
scene_id = np.random.randint(0, len(dataset))
data_dict = dataset.__getitem__(scene_id, 10, return_all=True)
data_dict = to_batch_tensor(data_dict)
input_dict, target_dict = prepare_inputs_and_targets(
data_dict,
device,
)
with torch.no_grad():
pred_dict = model(input_dict)
B, context_t, context_v, _, H, W = input_dict["context_image"].shape
_, target_t, target_v, _, H_tgt, W_tgt = target_dict["target_image"].shape
device = input_dict["context_image"].device
mean = torch.tensor([[MEAN]], device=device)
std = torch.tensor([[STD]], device=device)
def denormalize(x, already_channel_last=False):
if not already_channel_last:
x = rearrange(x, "t v c h w -> t v h w c")
x = (x * std + mean).clamp(0.0, 1.0)
return rearrange(x, "t v h w c -> t v c h w")
# t, v, c, h, w
context_images = input_dict["context_image"][0]
context_images = denormalize(context_images)
def resize(input, size, mode="bilinear"):
if len(input.shape) == 3:
input = input.unsqueeze(0)
elif len(input.shape) == 2:
input = input.unsqueeze(0).unsqueeze(0)
output = F.interpolate(input, size=size, mode=mode, align_corners=False)
return output.squeeze()
reduct_mat = None
if context_v <= 3:
n_ctx_per_row = 2
else:
n_ctx_per_row = 1
# concate context images horizontally
context_frames = []
for t in range(context_t):
current_frame_idx = int(input_dict["context_frame_idx"][0][t].item())
row = add_label(
hcat(
*[
(
context_images[t][v_id]
if v_id != context_v // 2
else resize(context_images[t][v_id], (W, H))
)
for v_id in range(context_v)
],
align="bottom",
),
f"Context RGB (t={current_frame_idx})",
font_size=24,
align="center",
)
context_frames.append(row)
num_rows = max(1, len(context_frames) // n_ctx_per_row)
context_frames = vcat(
*[
hcat(
*context_frames[row * n_ctx_per_row : (row + 1) * n_ctx_per_row],
gap=24,
)
for row in range(num_rows)
]
)
target_images = target_dict["target_image"][0]
target_images = denormalize(target_images)
render_results = pred_dict["render_results"]
pred_images = render_results[render_results["rgb_key"]][0]
pred_images = denormalize(pred_images, already_channel_last=True)
video_frames = []
for t in range(target_t):
frame_list = []
current_frame_idx = int(target_dict["target_frame_idx"][0][t].item())
pred_rgb = add_label(
hcat(
*[
(
pred_images[t][v_id]
if v_id != target_v // 2
else resize(pred_images[t][v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*[pred_images[t][v_id] for v_id in range(target_v)]),
f"Predicted RGB (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(pred_rgb)
gt_rgb = add_label(
hcat(
*[
(
target_images[t][v_id]
if v_id != target_v // 2
else resize(target_images[t][v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*[target_images[t][v_id] for v_id in range(target_v)]),
f"Target GT RGB (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(gt_rgb)
if render_results["decoder_depth_key"] is not None:
# this is a decoder depth map
depth_image = render_results[render_results["decoder_depth_key"]][0][t]
alpha_image = None
depth_image = depth_image.detach().cpu().numpy()
depth_image = depth_visualizer(depth_image, alpha_image)
depth_image = torch.from_numpy(depth_image)
depth_image = rearrange(depth_image, "v h w c -> v c h w")
pred_depth = add_label(
hcat(
*[
(
depth_image[v_id]
if v_id != target_v // 2
else resize(depth_image[v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*depth_image),
f"Predicted Decoder Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(pred_depth)
if render_results["depth_key"] is not None:
# actual gs depth map
depth_image = render_results[render_results["depth_key"]][0][t]
alpha_image = render_results[render_results["alpha_key"]][0][t]
if depth_image.shape[-2] != H_tgt or depth_image.shape[-1] != W_tgt:
depth_image = F.interpolate(
depth_image.unsqueeze(-3),
size=(H_tgt, W_tgt),
mode="bilinear",
align_corners=False,
).squeeze(-3)
alpha_image = F.interpolate(
alpha_image.unsqueeze(-3),
size=(H_tgt, W_tgt),
mode="bilinear",
align_corners=False,
).squeeze(-3)
depth_image = depth_image.detach().cpu().numpy()
alpha_image = alpha_image.detach().cpu().numpy()
depth_image = depth_visualizer(depth_image, alpha_image)
depth_image = torch.from_numpy(depth_image)
depth_image = rearrange(depth_image, "v h w c -> v c h w")
pred_depth = add_label(
hcat(
*[
(
depth_image[v_id]
if v_id != target_v // 2
else resize(depth_image[v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*depth_image),
f"Predicted Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(pred_depth)
if "target_depth" in target_dict.keys():
gt_depth = target_dict["target_depth"][0][t]
gt_depth = gt_depth.detach().cpu().numpy()
gt_depth = depth_visualizer(gt_depth, gt_depth > 0)
gt_depth = torch.from_numpy(gt_depth)
gt_depth = rearrange(gt_depth, "v h w c -> v c h w")
gt_depth = add_label(
hcat(
*[
(
gt_depth[v_id]
if v_id != target_v // 2
else resize(gt_depth[v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*gt_depth),
f"Target GT Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(gt_depth)
else:
if not skip_plot_gt_depth_and_flow:
gt_depth = torch.full((target_v, 3, H_tgt, W_tgt), 0.5)
gt_depth = add_label(
hcat(
*[
(
gt_depth[v_id]
if v_id != target_v // 2
else resize(gt_depth[v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*gt_depth),
f"Target GT Depth (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(gt_depth)
if render_results["flow_key"] is not None:
flow_image = render_results[render_results["flow_key"]][0][t]
flow_image = scene_flow_to_rgb(flow_image, flow_max_radius=15)
flow_image = rearrange(flow_image, "v h w c -> v c h w")
if flow_image.shape[-2] != H_tgt or flow_image.shape[-1] != W_tgt:
flow_image = F.interpolate(
flow_image,
size=(H_tgt, W_tgt),
mode="bilinear",
align_corners=False,
)
flow_image = add_label(
hcat(
*[
(
flow_image[v_id]
if v_id != target_v // 2
else resize(flow_image[v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*flow_image),
f"Predicted Flow (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(flow_image)
if "target_flow" in target_dict.keys():
gt_flow = target_dict["target_flow"][0][t]
gt_flow = scene_flow_to_rgb(gt_flow, flow_max_radius=15)
gt_flow = rearrange(gt_flow, "v h w c -> v c h w")
gt_flow = add_label(
hcat(
*[
(
gt_flow[v_id]
if v_id != target_v // 2
else resize(gt_flow[v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*gt_flow),
f"Target GT Flow (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(gt_flow)
if render_results["depth_key"] is not None:
alpha_image = torch.from_numpy(alpha_image).unsqueeze(1)
alpha_image = alpha_image.repeat(1, 3, 1, 1)
alpha_image = add_label(
hcat(
*[
(
alpha_image[v_id]
if v_id != target_v // 2
else resize(alpha_image[v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*alpha_image),
f"Predicted Opacity (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(alpha_image)
if "target_sky_masks" in target_dict.keys():
sky_mask = target_dict["target_sky_masks"][0][t].unsqueeze(1)
sky_mask = sky_mask.repeat(1, 3, 1, 1)
sky_mask = add_label(
hcat(
*[
(
sky_mask[v_id]
if v_id != target_v // 2
else resize(sky_mask[v_id], (W, H))
)
for v_id in range(target_v)
],
align="bottom",
),
# hcat(*sky_mask),
f"GT Sky&/Road Mask (t={current_frame_idx})",
font_size=24,
align="center",
)
frame_list.append(sky_mask)
num_rows = len(frame_list) // n_ctx_per_row
frame = vcat(
context_frames,
vcat(
*[
hcat(
*frame_list[row * n_ctx_per_row : (row + 1) * n_ctx_per_row],
gap=24,
)
for row in range(num_rows)
]
),
)
# if there's a residual, we add it to the end
if len(frame_list) % n_ctx_per_row != 0:
frame = vcat(
frame,
hcat(
*frame_list[num_rows * n_ctx_per_row :],
gap=24,
),
)
frame = add_border(
add_label(
frame,
f"Scene{input_dict['scene_id']:03d}-{input_dict['scene_name'][:15]}",
font_size=24,
align="center",
)
)
video_frames.append(prep_image(frame))
video_frame_reversed = video_frames[::-1][1:-1]
video_frames.extend(video_frame_reversed)
imageio.mimsave(output_filename, video_frames, fps=data_dict["fps"])
return output_filename
================================================
FILE: storm/visualization/visualization_tools.py
================================================
import logging
import os
from collections import namedtuple
from itertools import accumulate
from typing import Optional, Union
import matplotlib.cm as cm
import matplotlib.pyplot as plt
import numpy as np
import torch
import torch.nn.functional as F
from torch import Tensor
DEFAULT_TRANSITIONS = (15, 6, 4, 11, 13, 6)
logger = logging.getLogger("STORM")
turbo_cmap = cm.get_cmap("turbo")
depth_visualizer = lambda frame, opacity: visualize_depth(
frame,
opacity,
lo=4.0,
hi=120,
depth_curve_fn=lambda x: -np.log(x + 1e-6),
)
flow_visualizer = (
lambda frame: scene_flow_to_rgb(
frame,
background="bright",
flow_max_radius=1.0,
)
.cpu()
.numpy()
)
def to8b(x):
if isinstance(x, torch.Tensor):
x = x.detach().cpu().numpy()
return (255 * np.clip(x, 0, 1)).astype(np.uint8)
def sinebow(h):
"""A cyclic and uniform colormap, see http://basecase.org/env/on-rainbows."""
f = lambda x: np.sin(np.pi * x) ** 2
return np.stack([f(3 / 6 - h), f(5 / 6 - h), f(7 / 6 - h)], -1)
def matte(vis, acc, dark=0.8, light=1.0, width=8):
"""Set non-accumulated pixels to a Photoshop-esque checker pattern."""
bg_mask = np.logical_xor(
(np.arange(acc.shape[0]) % (2 * width) // width)[:, None],
(np.arange(acc.shape[1]) % (2 * width) // width)[None, :],
)
bg = np.where(bg_mask, light, dark)
return vis * acc[:, :, None] + (bg * (1 - acc))[:, :, None]
def weighted_percentile(x, w, ps, assume_sorted=False):
"""Compute the weighted percentile(s) of a single vector."""
x = x.reshape([-1])
w = w.reshape([-1])
if not assume_sorted:
sortidx = np.argsort(x)
x, w = x[sortidx], w[sortidx]
acc_w = np.cumsum(w)
return np.interp(np.array(ps) * (acc_w[-1] / 100), acc_w, x)
def visualize_cmap(
value,
weight,
colormap,
lo=None,
hi=None,
percentile=99.0,
curve_fn=lambda x: x,
modulus=None,
matte_background=True,
):
"""Visualize a 1D image and a 1D weighting according to some colormap.
from mipnerf
Args:
value: A 1D image.
weight: A weight map, in [0, 1].
colormap: A colormap function.
lo: The lower bound to use when rendering, if None then use a percentile.
hi: The upper bound to use when rendering, if None then use a percentile.
percentile: What percentile of the value map to crop to when automatically
generating `lo` and `hi`. Depends on `weight` as well as `value'.
curve_fn: A curve function that gets applied to `value`, `lo`, and `hi`
before the rest of visualization. Good choices: x, 1/(x+eps), log(x+eps).
modulus: If not None, mod the normalized value by `modulus`. Use (0, 1]. If
`modulus` is not None, `lo`, `hi` and `percentile` will have no effect.
matte_background: If True, matte the image over a checkerboard.
Returns:
A colormap rendering.
"""
# Identify the values that bound the middle of `value' according to `weight`.
if lo is None or hi is None:
lo_auto, hi_auto = weighted_percentile(
value, weight, [50 - percentile / 2, 50 + percentile / 2]
)
# If `lo` or `hi` are None, use the automatically-computed bounds above.
eps = np.finfo(np.float32).eps
lo = lo or (lo_auto - eps)
hi = hi or (hi_auto + eps)
# Curve all values.
value, lo, hi = [curve_fn(x) for x in [value, lo, hi]]
# Wrap the values around if requested.
if modulus:
value = np.mod(value, modulus) / modulus
else:
# Otherwise, just scale to [0, 1].
value = np.nan_to_num(np.clip((value - np.minimum(lo, hi)) / np.abs(hi - lo), 0, 1))
if weight is not None:
value *= weight
else:
weight = np.ones_like(value)
if colormap:
colorized = colormap(value)[..., :3]
else:
assert len(value.shape) == 3 and value.shape[-1] == 3
colorized = value
return matte(colorized, weight) if matte_background else colorized
def visualize_depth(x, acc=None, lo=None, hi=None, depth_curve_fn=lambda x: -np.log(x + 1e-6)):
"""Visualizes depth maps."""
return visualize_cmap(
x,
acc,
cm.get_cmap("turbo"),
curve_fn=depth_curve_fn,
lo=lo,
hi=hi,
matte_background=False,
)
def _make_colorwheel(
transitions: tuple = DEFAULT_TRANSITIONS, backend="torch"
) -> Union[np.ndarray, torch.Tensor]:
"""Creates a colorwheel (borrowed/modified from flowpy).
A colorwheel defines the transitions between the six primary hues:
Red(255, 0, 0), Yellow(255, 255, 0), Green(0, 255, 0), Cyan(0, 255, 255), Blue(0, 0, 255) and Magenta(255, 0, 255).
Args:
transitions: Contains the length of the six transitions, based on human color perception.
Returns:
colorwheel: The RGB values of the transitions in the color space.
Notes:
For more information, see:
https://web.archive.org/web/20051107102013/http://members.shaw.ca/quadibloc/other/colint.htm
http://vision.middlebury.edu/flow/flowEval-iccv07.pdf
"""
colorwheel_length = sum(transitions)
# The red hue is repeated to make the colorwheel cyclic
base_hues = map(
np.array,
(
[255, 0, 0],
[255, 255, 0],
[0, 255, 0],
[0, 255, 255],
[0, 0, 255],
[255, 0, 255],
[255, 0, 0],
),
)
colorwheel = np.zeros((colorwheel_length, 3), dtype="uint8")
hue_from = next(base_hues)
start_index = 0
for hue_to, end_index in zip(base_hues, accumulate(transitions)):
transition_length = end_index - start_index
colorwheel[start_index:end_index] = np.linspace(
hue_from, hue_to, transition_length, endpoint=False
)
hue_from = hue_to
start_index = end_index
if backend == "torch":
return torch.FloatTensor(colorwheel)
else:
return colorwheel
WHEEL = _make_colorwheel()
N_COLS = len(WHEEL)
WHEEL = torch.vstack((WHEEL, WHEEL[0])) # Make the wheel cyclic for interpolation
def scene_flow_to_rgb(
flow: torch.Tensor,
flow_max_radius: Optional[float] = None,
background: Optional[str] = "bright",
) -> Union[torch.Tensor, np.ndarray]:
"""Creates a RGB representation of an optical flow (borrowed/modified from flowpy).
Adapted from https://github.com/Lilac-Lee/Neural_Scene_Flow_Prior/blob/main/visualize.py
Args:
flow: scene flow.
flow[..., 0] should be the x-displacement
flow[..., 1] should be the y-displacement
flow[..., 2] should be the z-displacement
flow_max_radius: Set the radius that gives the maximum color intensity, useful for comparing different flows.
Default: The normalization is based on the input flow maximum radius.
background: States if zero-valued flow should look 'bright' or 'dark'.
Returns: An array of RGB colors.
"""
valid_backgrounds = ("bright", "dark")
if background not in valid_backgrounds:
raise ValueError(
f"background should be one the following: {valid_backgrounds}, not {background}."
)
if isinstance(flow, np.ndarray):
backend = "np"
op = np
else:
backend = "torch"
op = torch
# For scene flow, it's reasonable to assume displacements in x and y directions only for visualization pursposes.
complex_flow = flow[..., 0] + 1j * flow[..., 1]
radius, angle = op.abs(complex_flow), op.angle(complex_flow)
if flow_max_radius is None:
# flow_max_radius = torch.max(radius)
flow_max_radius = op.quantile(radius, 0.99)
if flow_max_radius > 0:
radius /= flow_max_radius
# Map the angles from (-pi, pi] to [0, 2pi) to [0, ncols - 1)
wheel = _make_colorwheel(backend=backend)
n_cols = len(wheel)
wheel = op.vstack((wheel, wheel[0]))
angle[angle < 0] += 2 * np.pi
angle = angle * ((n_cols - 1) / (2 * np.pi))
# Interpolate the hues
angle_fractional, angle_floor, angle_ceil = (
op.fmod(angle, 1),
op.trunc(angle),
op.ceil(angle),
)
angle_fractional = angle_fractional[..., None]
if backend == "torch":
_wheel = wheel.to(angle_floor.device)
float_hue = (
_wheel[angle_floor.long()] * (1 - angle_fractional)
+ _wheel[angle_ceil.long()] * angle_fractional
)
else:
float_hue = (
wheel[angle_floor.astype(op.int64)] * (1 - angle_fractional)
+ wheel[angle_ceil.astype(op.int64)] * angle_fractional
)
ColorizationArgs = namedtuple(
"ColorizationArgs",
["move_hue_valid_radius", "move_hue_oversized_radius", "invalid_color"],
)
def move_hue_on_V_axis(hues, factors):
return hues * factors[..., None]
def move_hue_on_S_axis(hues, factors):
return 255.0 - factors[..., None] * (255.0 - hues)
if background == "dark":
parameters = ColorizationArgs(
move_hue_on_V_axis, move_hue_on_S_axis, op.array([255.0, 255.0, 255.0])
)
else:
parameters = ColorizationArgs(move_hue_on_S_axis, move_hue_on_V_axis, op.zeros(3))
colors = parameters.move_hue_valid_radius(float_hue, radius)
oversized_radius_mask = radius > 1
colors[oversized_radius_mask] = parameters.move_hue_oversized_radius(
float_hue[oversized_radius_mask], 1 / radius[oversized_radius_mask]
)
colors = colors / 255.0
return colors
def get_robust_pca(features: Tensor, m: float = 2, remove_first_component=False):
# features: (N, C)
# m: a hyperparam controlling how many std dev outside for outliers
assert len(features.shape) == 2, "features should be (N, C)"
reduction_mat = torch.pca_lowrank(features, q=3, niter=20)[2]
colors = features @ reduction_mat
if remove_first_component:
colors_min = colors.min(dim=0).values
colors_max = colors.max(dim=0).values
tmp_colors = (colors - colors_min) / (colors_max - colors_min)
fg_mask = tmp_colors[..., 0] < 0.2
reduction_mat = torch.pca_lowrank(features[fg_mask], q=3, niter=20)[2]
colors = features @ reduction_mat
else:
fg_mask = torch.ones_like(colors[:, 0]).bool()
d = torch.abs(colors[fg_mask] - torch.median(colors[fg_mask], dim=0).values)
mdev = torch.median(d, dim=0).values
s = d / mdev
try:
rins = colors[fg_mask][s[:, 0] < m, 0]
gins = colors[fg_mask][s[:, 1] < m, 1]
bins = colors[fg_mask][s[:, 2] < m, 2]
rgb_min = torch.tensor([rins.min(), gins.min(), bins.min()])
rgb_max = torch.tensor([rins.max(), gins.max(), bins.max()])
except:
rins = colors
gins = colors
bins = colors
rgb_min = torch.tensor([rins.min(), gins.min(), bins.min()])
rgb_max = torch.tensor([rins.max(), gins.max(), bins.max()])
return reduction_mat, rgb_min.to(reduction_mat), rgb_max.to(reduction_mat)
def get_pca_map(
feature_map: torch.Tensor,
img_size,
interpolation="nearest",
return_pca_stats=False,
pca_stats=None,
):
"""
feature_map: (1, h, w, C) is the feature map of a single image.
"""
if len(feature_map.shape) != 4:
# make it (1, h, w, C)
feature_map = feature_map[None]
batch_size = feature_map.shape[0]
if pca_stats is None:
pca_stats = []
for i in range(batch_size):
reduct_mat, color_min, color_max = get_robust_pca(
feature_map[i].reshape(-1, feature_map.shape[-1])
)
pca_stats.append((reduct_mat, color_min, color_max))
pca_colors = []
for i in range(batch_size):
reduct_mat, color_min, color_max = pca_stats[i]
pca_color = feature_map[i] @ reduct_mat
pca_color = (pca_color - color_min) / (color_max - color_min)
pca_color = pca_color.clamp(0, 1)
pca_color = F.interpolate(
pca_color.permute(2, 0, 1)[None],
size=img_size,
mode=interpolation,
)
pca_color = pca_color.permute(0, 2, 3, 1)
pca_colors.append(pca_color)
pca_color = torch.cat(pca_colors, dim=0)
if return_pca_stats:
return pca_color.cpu(), pca_stats
return pca_color.cpu()
def get_scale_map(
scalar_map: torch.Tensor,
img_size,
interpolation="nearest",
):
"""
scalar_map: (1, h, w, C) is the feature map of a single image.
"""
if len(scalar_map.shape) != 1:
scalar_map = scalar_map[None]
scalar_map = (scalar_map - scalar_map.min(dim=-1).values) / (
scalar_map.max(dim=-1).values - scalar_map.min(dim=-1).values + 1e-6
)
scalar_map = F.interpolate(
scalar_map.permute(0, 3, 1, 2),
size=img_size,
mode=interpolation,
).permute(0, 2, 3, 1)
# cmap = plt.get_cmap("viridis")
# scalar_map = cmap(scalar_map)[..., :3]
# make it 3 channels
scalar_map = torch.cat([scalar_map] * 3, dim=-1)
return scalar_map.cpu()
def get_similarity_map(features: Tensor, img_size=(224, 224)):
"""
compute the similarity map of the central patch to the rest of the image
"""
assert len(features.shape) == 4, "features should be (1, C, H, W)"
H, W, C = features.shape[1:]
center_patch_feature = features[0, H // 2, W // 2, :]
center_patch_feature_normalized = center_patch_feature / center_patch_feature.norm()
center_patch_feature_normalized = center_patch_feature_normalized.unsqueeze(1)
# Reshape and normalize the entire feature tensor
features_flat = features.view(-1, C)
features_normalized = features_flat / features_flat.norm(dim=1, keepdim=True)
similarity_map_flat = features_normalized @ center_patch_feature_normalized
# Reshape the flat similarity map back to the spatial dimensions (H, W)
similarity_map = similarity_map_flat.view(H, W)
# Normalize the similarity map to be in the range [0, 1] for visualization
similarity_map = (similarity_map - similarity_map.min()) / (
similarity_map.max() - similarity_map.min()
)
# we don't want the center patch to be the most similar
similarity_map[H // 2, W // 2] = -1.0
similarity_map = (
F.interpolate(
similarity_map.unsqueeze(0).unsqueeze(0),
size=img_size,
mode="bilinear",
)
.squeeze(0)
.squeeze(0)
)
similarity_map_np = similarity_map.cpu().numpy()
negative_mask = similarity_map_np < 0
colormap = plt.get_cmap("turbo")
# Apply the colormap directly to the normalized similarity map and multiply by 255 to get RGB values
similarity_map_rgb = colormap(similarity_map_np)[..., :3]
similarity_map_rgb[negative_mask] = [1.0, 0.0, 0.0]
return similarity_map_rgb.cpu()
================================================
FILE: third_party/depth_anything_v2/dinov2.py
================================================
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the Apache License, Version 2.0
# found in the LICENSE file in the root directory of this source tree.
# References:
# https://github.com/facebookresearch/dino/blob/main/vision_transformer.py
# https://github.com/rwightman/pytorch-image-models/tree/master/timm/models/vision_transformer.py
import logging
import math
from functools import partial
from typing import Callable, Sequence, Tuple, Union
import torch
import torch.nn as nn
import torch.utils.checkpoint
from torch.nn.init import trunc_normal_
from .dinov2_layers import MemEffAttention, Mlp
from .dinov2_layers import NestedTensorBlock as Block
from .dinov2_layers import PatchEmbed, SwiGLUFFNFused
logger = logging.getLogger("dinov2")
def named_apply(
fn: Callable, module: nn.Module, name="", depth_first=True, include_root=False
) -> nn.Module:
if not depth_first and include_root:
fn(module=module, name=name)
for child_name, child_module in module.named_children():
child_name = ".".join((name, child_name)) if name else child_name
named_apply(
fn=fn, module=child_module, name=child_name, depth_first=depth_first, include_root=True
)
if depth_first and include_root:
fn(module=module, name=name)
return module
class BlockChunk(nn.ModuleList):
def forward(self, x):
for b in self:
x = b(x)
return x
class DinoVisionTransformer(nn.Module):
def __init__(
self,
img_size=224,
patch_size=16,
in_chans=3,
embed_dim=768,
depth=12,
num_heads=12,
mlp_ratio=4.0,
qkv_bias=True,
ffn_bias=True,
proj_bias=True,
drop_path_rate=0.0,
drop_path_uniform=False,
init_values=None, # for layerscale: None or 0 => no layerscale
embed_layer=PatchEmbed,
act_layer=nn.GELU,
block_fn=Block,
ffn_layer="mlp",
block_chunks=1,
num_register_tokens=0,
interpolate_antialias=False,
interpolate_offset=0.1,
):
"""
Args:
img_size (int, tuple): input image size
patch_size (int, tuple): patch size
in_chans (int): number of input channels
embed_dim (int): embedding dimension
depth (int): depth of transformer
num_heads (int): number of attention heads
mlp_ratio (int): ratio of mlp hidden dim to embedding dim
qkv_bias (bool): enable bias for qkv if True
proj_bias (bool): enable bias for proj in attn if True
ffn_bias (bool): enable bias for ffn if True
drop_path_rate (float): stochastic depth rate
drop_path_uniform (bool): apply uniform drop rate across blocks
weight_init (str): weight init scheme
init_values (float): layer-scale init values
embed_layer (nn.Module): patch embedding layer
act_layer (nn.Module): MLP activation layer
block_fn (nn.Module): transformer block class
ffn_layer (str): "mlp", "swiglu", "swiglufused" or "identity"
block_chunks: (int) split block sequence into block_chunks units for FSDP wrap
num_register_tokens: (int) number of extra cls tokens (so-called "registers")
interpolate_antialias: (str) flag to apply anti-aliasing when interpolating positional embeddings
interpolate_offset: (float) work-around offset to apply when interpolating positional embeddings
"""
super().__init__()
norm_layer = partial(nn.LayerNorm, eps=1e-6)
self.num_features = self.embed_dim = (
embed_dim # num_features for consistency with other models
)
self.num_tokens = 1
self.n_blocks = depth
self.num_heads = num_heads
self.patch_size = patch_size
self.num_register_tokens = num_register_tokens
self.interpolate_antialias = interpolate_antialias
self.interpolate_offset = interpolate_offset
self.patch_embed = embed_layer(
img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim
)
num_patches = self.patch_embed.num_patches
self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim))
self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + self.num_tokens, embed_dim))
assert num_register_tokens >= 0
self.register_tokens = (
nn.Parameter(torch.zeros(1, num_register_tokens, embed_dim))
if num_register_tokens
else None
)
if drop_path_uniform is True:
dpr = [drop_path_rate] * depth
else:
dpr = [
x.item() for x in torch.linspace(0, drop_path_rate, depth)
] # stochastic depth decay rule
if ffn_layer == "mlp":
logger.info("using MLP layer as FFN")
ffn_layer = Mlp
elif ffn_layer == "swiglufused" or ffn_layer == "swiglu":
logger.info("using SwiGLU layer as FFN")
ffn_layer = SwiGLUFFNFused
elif ffn_layer == "identity":
logger.info("using Identity layer as FFN")
def f(*args, **kwargs):
return nn.Identity()
ffn_layer = f
else:
raise NotImplementedError
blocks_list = [
block_fn(
dim=embed_dim,
num_heads=num_heads,
mlp_ratio=mlp_ratio,
qkv_bias=qkv_bias,
proj_bias=proj_bias,
ffn_bias=ffn_bias,
drop_path=dpr[i],
norm_layer=norm_layer,
act_layer=act_layer,
ffn_layer=ffn_layer,
init_values=init_values,
)
for i in range(depth)
]
if block_chunks > 0:
self.chunked_blocks = True
chunked_blocks = []
chunksize = depth // block_chunks
for i in range(0, depth, chunksize):
# this is to keep the block index consistent if we chunk the block list
chunked_blocks.append([nn.Identity()] * i + blocks_list[i : i + chunksize])
self.blocks = nn.ModuleList([BlockChunk(p) for p in chunked_blocks])
else:
self.chunked_blocks = False
self.blocks = nn.ModuleList(blocks_list)
self.norm = norm_layer(embed_dim)
self.head = nn.Identity()
self.mask_token = nn.Parameter(torch.zeros(1, embed_dim))
self.init_weights()
def init_weights(self):
trunc_normal_(self.pos_embed, std=0.02)
nn.init.normal_(self.cls_token, std=1e-6)
if self.register_tokens is not None:
nn.init.normal_(self.register_tokens, std=1e-6)
named_apply(init_weights_vit_timm, self)
def interpolate_pos_encoding(self, x, w, h):
previous_dtype = x.dtype
npatch = x.shape[1] - 1
N = self.pos_embed.shape[1] - 1
if npatch == N and w == h:
return self.pos_embed
pos_embed = self.pos_embed.float()
class_pos_embed = pos_embed[:, 0]
patch_pos_embed = pos_embed[:, 1:]
dim = x.shape[-1]
w0 = w // self.patch_size
h0 = h // self.patch_size
# we add a small number to avoid floating point error in the interpolation
# see discussion at https://github.com/facebookresearch/dino/issues/8
# DINOv2 with register modify the interpolate_offset from 0.1 to 0.0
w0, h0 = w0 + self.interpolate_offset, h0 + self.interpolate_offset
# w0, h0 = w0 + 0.1, h0 + 0.1
sqrt_N = math.sqrt(N)
sx, sy = float(w0) / sqrt_N, float(h0) / sqrt_N
patch_pos_embed = nn.functional.interpolate(
patch_pos_embed.reshape(1, int(sqrt_N), int(sqrt_N), dim).permute(0, 3, 1, 2),
scale_factor=(sx, sy),
# (int(w0), int(h0)), # to solve the upsampling shape issue
mode="bicubic",
antialias=self.interpolate_antialias,
)
assert int(w0) == patch_pos_embed.shape[-2]
assert int(h0) == patch_pos_embed.shape[-1]
patch_pos_embed = patch_pos_embed.permute(0, 2, 3, 1).view(1, -1, dim)
return torch.cat((class_pos_embed.unsqueeze(0), patch_pos_embed), dim=1).to(previous_dtype)
def prepare_tokens_with_masks(self, x, masks=None):
B, nc, w, h = x.shape
x = self.patch_embed(x)
if masks is not None:
x = torch.where(masks.unsqueeze(-1), self.mask_token.to(x.dtype).unsqueeze(0), x)
x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1)
x = x + self.interpolate_pos_encoding(x, w, h)
if self.register_tokens is not None:
x = torch.cat(
(
x[:, :1],
self.register_tokens.expand(x.shape[0], -1, -1),
x[:, 1:],
),
dim=1,
)
return x
def forward_features_list(self, x_list, masks_list):
x = [self.prepare_tokens_with_masks(x, masks) for x, masks in zip(x_list, masks_list)]
for blk in self.blocks:
x = blk(x)
all_x = x
output = []
for x, masks in zip(all_x, masks_list):
x_norm = self.norm(x)
output.append(
{
"x_norm_clstoken": x_norm[:, 0],
"x_norm_regtokens": x_norm[:, 1 : self.num_register_tokens + 1],
"x_norm_patchtokens": x_norm[:, self.num_register_tokens + 1 :],
"x_prenorm": x,
"masks": masks,
}
)
return output
def forward_features(self, x, masks=None):
if isinstance(x, list):
return self.forward_features_list(x, masks)
x = self.prepare_tokens_with_masks(x, masks)
for blk in self.blocks:
x = blk(x)
x_norm = self.norm(x)
return {
"x_norm_clstoken": x_norm[:, 0],
"x_norm_regtokens": x_norm[:, 1 : self.num_register_tokens + 1],
"x_norm_patchtokens": x_norm[:, self.num_register_tokens + 1 :],
"x_prenorm": x,
"masks": masks,
}
def _get_intermediate_layers_not_chunked(self, x, n=1):
x = self.prepare_tokens_with_masks(x)
# If n is an int, take the n last blocks. If it's a list, take them
output, total_block_len = [], len(self.blocks)
blocks_to_take = range(total_block_len - n, total_block_len) if isinstance(n, int) else n
for i, blk in enumerate(self.blocks):
x = blk(x)
if i in blocks_to_take:
output.append(x)
assert len(output) == len(
blocks_to_take
), f"only {len(output)} / {len(blocks_to_take)} blocks found"
return output
def _get_intermediate_layers_chunked(self, x, n=1):
x = self.prepare_tokens_with_masks(x)
output, i, total_block_len = [], 0, len(self.blocks[-1])
# If n is an int, take the n last blocks. If it's a list, take them
blocks_to_take = range(total_block_len - n, total_block_len) if isinstance(n, int) else n
for block_chunk in self.blocks:
for blk in block_chunk[i:]: # Passing the nn.Identity()
x = blk(x)
if i in blocks_to_take:
output.append(x)
i += 1
assert len(output) == len(
blocks_to_take
), f"only {len(output)} / {len(blocks_to_take)} blocks found"
return output
def get_intermediate_layers(
self,
x: torch.Tensor,
n: Union[int, Sequence] = 1, # Layers or n last layers to take
reshape: bool = False,
return_class_token: bool = False,
norm=True,
) -> Tuple[Union[torch.Tensor, Tuple[torch.Tensor]]]:
if self.chunked_blocks:
outputs = self._get_intermediate_layers_chunked(x, n)
else:
outputs = self._get_intermediate_layers_not_chunked(x, n)
if norm:
outputs = [self.norm(out) for out in outputs]
class_tokens = [out[:, 0] for out in outputs]
outputs = [out[:, 1 + self.num_register_tokens :] for out in outputs]
if reshape:
B, _, w, h = x.shape
outputs = [
out.reshape(B, w // self.patch_size, h // self.patch_size, -1)
.permute(0, 3, 1, 2)
.contiguous()
for out in outputs
]
if return_class_token:
return tuple(zip(outputs, class_tokens))
return tuple(outputs)
def forward(self, *args, is_training=False, **kwargs):
ret = self.forward_features(*args, **kwargs)
if is_training:
return ret
else:
return self.head(ret["x_norm_clstoken"])
def init_weights_vit_timm(module: nn.Module, name: str = ""):
"""ViT weight initialization, original timm impl (for reproducibility)"""
if isinstance(module, nn.Linear):
trunc_normal_(module.weight, std=0.02)
if module.bias is not None:
nn.init.zeros_(module.bias)
def vit_small(patch_size=16, num_register_tokens=0, **kwargs):
model = DinoVisionTransformer(
patch_size=patch_size,
embed_dim=384,
depth=12,
num_heads=6,
mlp_ratio=4,
block_fn=partial(Block, attn_class=MemEffAttention),
num_register_tokens=num_register_tokens,
**kwargs,
)
return model
def vit_base(patch_size=16, num_register_tokens=0, **kwargs):
model = DinoVisionTransformer(
patch_size=patch_size,
embed_dim=768,
depth=12,
num_heads=12,
mlp_ratio=4,
block_fn=partial(Block, attn_class=MemEffAttention),
num_register_tokens=num_register_tokens,
**kwargs,
)
return model
def vit_large(patch_size=16, num_register_tokens=0, **kwargs):
model = DinoVisionTransformer(
patch_size=patch_size,
embed_dim=1024,
depth=24,
num_heads=16,
mlp_ratio=4,
block_fn=partial(Block, attn_class=MemEffAttention),
num_register_tokens=num_register_tokens,
**kwargs,
)
return model
def vit_giant2(patch_size=16, num_register_tokens=0, **kwargs):
"""
Close to ViT-giant, with embed-dim 1536 and 24 heads => embed-dim per head 64
"""
model = DinoVisionTransformer(
patch_size=patch_size,
embed_dim=1536,
depth=40,
num_heads=24,
mlp_ratio=4,
block_fn=partial(Block, attn_class=MemEffAttention),
num_register_tokens=num_register_tokens,
**kwargs,
)
return model
def DINOv2(model_name):
model_zoo = {"vits": vit_small, "vitb": vit_base, "vitl": vit_large, "vitg": vit_giant2}
return model_zoo[model_name](
img_size=518,
patch_size=14,
init_values=1.0,
ffn_layer="mlp" if model_name != "vitg" else "swiglufused",
block_chunks=0,
num_register_tokens=0,
interpolate_antialias=False,
interpolate_offset=0.1,
)
================================================
FILE: third_party/depth_anything_v2/dinov2_layers/__init__.py
================================================
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
from .attention import MemEffAttention
from .block import NestedTensorBlock
from .mlp import Mlp
from .patch_embed import PatchEmbed
from .swiglu_ffn import SwiGLUFFN, SwiGLUFFNFused
================================================
FILE: third_party/depth_anything_v2/dinov2_layers/attention.py
================================================
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# References:
# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py
# https://github.com/rwightman/pytorch-image-models/tree/master/timm/models/vision_transformer.py
import logging
from torch import Tensor, nn
logger = logging.getLogger("dinov2")
try:
from xformers.ops import fmha, memory_efficient_attention, unbind
XFORMERS_AVAILABLE = True
except ImportError:
logger.warning("xFormers not available")
XFORMERS_AVAILABLE = False
class Attention(nn.Module):
def __init__(
self,
dim: int,
num_heads: int = 8,
qkv_bias: bool = False,
proj_bias: bool = True,
attn_drop: float = 0.0,
proj_drop: float = 0.0,
) -> None:
super().__init__()
self.num_heads = num_heads
head_dim = dim // num_heads
self.scale = head_dim**-0.5
self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias)
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim, bias=proj_bias)
self.proj_drop = nn.Dropout(proj_drop)
def forward(self, x: Tensor) -> Tensor:
B, N, C = x.shape
qkv = (
self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4)
)
q, k, v = qkv.unbind(dim=0)
x = nn.functional.scaled_dot_product_attention(
q,
k,
v,
dropout_p=self.attn_drop.p if self.training else 0.0,
)
x = x.transpose(1, 2).reshape(B, N, C)
x = self.proj(x)
x = self.proj_drop(x)
return x
class MemEffAttention(Attention):
def forward(self, x: Tensor, attn_bias=None) -> Tensor:
if not XFORMERS_AVAILABLE:
assert attn_bias is None, "xFormers is required for nested tensors usage"
return super().forward(x)
B, N, C = x.shape
qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads)
q, k, v = unbind(qkv, 2)
x = memory_efficient_attention(q, k, v, attn_bias=attn_bias)
x = x.reshape([B, N, C])
x = self.proj(x)
x = self.proj_drop(x)
return x
================================================
FILE: third_party/depth_anything_v2/dinov2_layers/block.py
================================================
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# References:
# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py
# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/patch_embed.py
import logging
from typing import Any, Callable, Dict, List, Tuple
import torch
from torch import Tensor, nn
from .attention import Attention, MemEffAttention
from .drop_path import DropPath
from .layer_scale import LayerScale
from .mlp import Mlp
logger = logging.getLogger("dinov2")
try:
from xformers.ops import fmha, index_select_cat, scaled_index_add
XFORMERS_AVAILABLE = True
except ImportError:
logger.warning("xFormers not available")
XFORMERS_AVAILABLE = False
class Block(nn.Module):
def __init__(
self,
dim: int,
num_heads: int,
mlp_ratio: float = 4.0,
qkv_bias: bool = False,
proj_bias: bool = True,
ffn_bias: bool = True,
drop: float = 0.0,
attn_drop: float = 0.0,
init_values=None,
drop_path: float = 0.0,
act_layer: Callable[..., nn.Module] = nn.GELU,
norm_layer: Callable[..., nn.Module] = nn.LayerNorm,
attn_class: Callable[..., nn.Module] = Attention,
ffn_layer: Callable[..., nn.Module] = Mlp,
) -> None:
super().__init__()
# print(f"biases: qkv: {qkv_bias}, proj: {proj_bias}, ffn: {ffn_bias}")
self.norm1 = norm_layer(dim)
self.attn = attn_class(
dim,
num_heads=num_heads,
qkv_bias=qkv_bias,
proj_bias=proj_bias,
attn_drop=attn_drop,
proj_drop=drop,
)
self.ls1 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity()
self.drop_path1 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = ffn_layer(
in_features=dim,
hidden_features=mlp_hidden_dim,
act_layer=act_layer,
drop=drop,
bias=ffn_bias,
)
self.ls2 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity()
self.drop_path2 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity()
self.sample_drop_ratio = drop_path
def forward(self, x: Tensor) -> Tensor:
def attn_residual_func(x: Tensor) -> Tensor:
return self.ls1(self.attn(self.norm1(x)))
def ffn_residual_func(x: Tensor) -> Tensor:
return self.ls2(self.mlp(self.norm2(x)))
if self.training and self.sample_drop_ratio > 0.1:
# the overhead is compensated only for a drop path rate larger than 0.1
x = drop_add_residual_stochastic_depth(
x,
residual_func=attn_residual_func,
sample_drop_ratio=self.sample_drop_ratio,
)
x = drop_add_residual_stochastic_depth(
x,
residual_func=ffn_residual_func,
sample_drop_ratio=self.sample_drop_ratio,
)
elif self.training and self.sample_drop_ratio > 0.0:
x = x + self.drop_path1(attn_residual_func(x))
x = x + self.drop_path1(ffn_residual_func(x)) # FIXME: drop_path2
else:
x = x + attn_residual_func(x)
x = x + ffn_residual_func(x)
return x
def drop_add_residual_stochastic_depth(
x: Tensor,
residual_func: Callable[[Tensor], Tensor],
sample_drop_ratio: float = 0.0,
) -> Tensor:
# 1) extract subset using permutation
b, n, d = x.shape
sample_subset_size = max(int(b * (1 - sample_drop_ratio)), 1)
brange = (torch.randperm(b, device=x.device))[:sample_subset_size]
x_subset = x[brange]
# 2) apply residual_func to get residual
residual = residual_func(x_subset)
x_flat = x.flatten(1)
residual = residual.flatten(1)
residual_scale_factor = b / sample_subset_size
# 3) add the residual
x_plus_residual = torch.index_add(
x_flat, 0, brange, residual.to(dtype=x.dtype), alpha=residual_scale_factor
)
return x_plus_residual.view_as(x)
def get_branges_scales(x, sample_drop_ratio=0.0):
b, n, d = x.shape
sample_subset_size = max(int(b * (1 - sample_drop_ratio)), 1)
brange = (torch.randperm(b, device=x.device))[:sample_subset_size]
residual_scale_factor = b / sample_subset_size
return brange, residual_scale_factor
def add_residual(x, brange, residual, residual_scale_factor, scaling_vector=None):
if scaling_vector is None:
x_flat = x.flatten(1)
residual = residual.flatten(1)
x_plus_residual = torch.index_add(
x_flat, 0, brange, residual.to(dtype=x.dtype), alpha=residual_scale_factor
)
else:
x_plus_residual = scaled_index_add(
x,
brange,
residual.to(dtype=x.dtype),
scaling=scaling_vector,
alpha=residual_scale_factor,
)
return x_plus_residual
attn_bias_cache: Dict[Tuple, Any] = {}
def get_attn_bias_and_cat(x_list, branges=None):
"""
this will perform the index select, cat the tensors, and provide the attn_bias from cache
"""
batch_sizes = (
[b.shape[0] for b in branges] if branges is not None else [x.shape[0] for x in x_list]
)
all_shapes = tuple((b, x.shape[1]) for b, x in zip(batch_sizes, x_list))
if all_shapes not in attn_bias_cache.keys():
seqlens = []
for b, x in zip(batch_sizes, x_list):
for _ in range(b):
seqlens.append(x.shape[1])
attn_bias = fmha.BlockDiagonalMask.from_seqlens(seqlens)
attn_bias._batch_sizes = batch_sizes
attn_bias_cache[all_shapes] = attn_bias
if branges is not None:
cat_tensors = index_select_cat([x.flatten(1) for x in x_list], branges).view(
1, -1, x_list[0].shape[-1]
)
else:
tensors_bs1 = tuple(x.reshape([1, -1, *x.shape[2:]]) for x in x_list)
cat_tensors = torch.cat(tensors_bs1, dim=1)
return attn_bias_cache[all_shapes], cat_tensors
def drop_add_residual_stochastic_depth_list(
x_list: List[Tensor],
residual_func: Callable[[Tensor, Any], Tensor],
sample_drop_ratio: float = 0.0,
scaling_vector=None,
) -> Tensor:
# 1) generate random set of indices for dropping samples in the batch
branges_scales = [get_branges_scales(x, sample_drop_ratio=sample_drop_ratio) for x in x_list]
branges = [s[0] for s in branges_scales]
residual_scale_factors = [s[1] for s in branges_scales]
# 2) get attention bias and index+concat the tensors
attn_bias, x_cat = get_attn_bias_and_cat(x_list, branges)
# 3) apply residual_func to get residual, and split the result
residual_list = attn_bias.split(residual_func(x_cat, attn_bias=attn_bias)) # type: ignore
outputs = []
for x, brange, residual, residual_scale_factor in zip(
x_list, branges, residual_list, residual_scale_factors
):
outputs.append(
add_residual(x, brange, residual, residual_scale_factor, scaling_vector).view_as(x)
)
return outputs
class NestedTensorBlock(Block):
def forward_nested(self, x_list: List[Tensor]) -> List[Tensor]:
"""
x_list contains a list of tensors to nest together and run
"""
assert isinstance(self.attn, MemEffAttention)
if self.training and self.sample_drop_ratio > 0.0:
def attn_residual_func(x: Tensor, attn_bias=None) -> Tensor:
return self.attn(self.norm1(x), attn_bias=attn_bias)
def ffn_residual_func(x: Tensor, attn_bias=None) -> Tensor:
return self.mlp(self.norm2(x))
x_list = drop_add_residual_stochastic_depth_list(
x_list,
residual_func=attn_residual_func,
sample_drop_ratio=self.sample_drop_ratio,
scaling_vector=self.ls1.gamma if isinstance(self.ls1, LayerScale) else None,
)
x_list = drop_add_residual_stochastic_depth_list(
x_list,
residual_func=ffn_residual_func,
sample_drop_ratio=self.sample_drop_ratio,
scaling_vector=self.ls2.gamma if isinstance(self.ls1, LayerScale) else None,
)
return x_list
else:
def attn_residual_func(x: Tensor, attn_bias=None) -> Tensor:
return self.ls1(self.attn(self.norm1(x), attn_bias=attn_bias))
def ffn_residual_func(x: Tensor, attn_bias=None) -> Tensor:
return self.ls2(self.mlp(self.norm2(x)))
attn_bias, x = get_attn_bias_and_cat(x_list)
x = x + attn_residual_func(x, attn_bias=attn_bias)
x = x + ffn_residual_func(x)
return attn_bias.split(x)
def forward(self, x_or_x_list):
if isinstance(x_or_x_list, Tensor):
return super().forward(x_or_x_list)
elif isinstance(x_or_x_list, list):
assert XFORMERS_AVAILABLE, "Please install xFormers for nested tensors usage"
return self.forward_nested(x_or_x_list)
else:
raise AssertionError
================================================
FILE: third_party/depth_anything_v2/dinov2_layers/drop_path.py
================================================
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# References:
# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py
# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/drop.py
from torch import nn
def drop_path(x, drop_prob: float = 0.0, training: bool = False):
if drop_prob == 0.0 or not training:
return x
keep_prob = 1 - drop_prob
shape = (x.shape[0],) + (1,) * (x.ndim - 1) # work with diff dim tensors, not just 2D ConvNets
random_tensor = x.new_empty(shape).bernoulli_(keep_prob)
if keep_prob > 0.0:
random_tensor.div_(keep_prob)
output = x * random_tensor
return output
class DropPath(nn.Module):
"""Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks)."""
def __init__(self, drop_prob=None):
super(DropPath, self).__init__()
self.drop_prob = drop_prob
def forward(self, x):
return drop_path(x, self.drop_prob, self.training)
================================================
FILE: third_party/depth_anything_v2/dinov2_layers/layer_scale.py
================================================
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# Modified from: https://github.com/huggingface/pytorch-image-models/blob/main/timm/models/vision_transformer.py#L103-L110
from typing import Union
import torch
from torch import Tensor, nn
class LayerScale(nn.Module):
def __init__(
self,
dim: int,
init_values: Union[float, Tensor] = 1e-5,
inplace: bool = False,
) -> None:
super().__init__()
self.inplace = inplace
self.gamma = nn.Parameter(init_values * torch.ones(dim))
def forward(self, x: Tensor) -> Tensor:
return x.mul_(self.gamma) if self.inplace else x * self.gamma
================================================
FILE: third_party/depth_anything_v2/dinov2_layers/mlp.py
================================================
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# References:
# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py
# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/mlp.py
from typing import Callable, Optional
from torch import Tensor, nn
class Mlp(nn.Module):
def __init__(
self,
in_features: int,
hidden_features: Optional[int] = None,
out_features: Optional[int] = None,
act_layer: Callable[..., nn.Module] = nn.GELU,
drop: float = 0.0,
bias: bool = True,
) -> None:
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.fc1 = nn.Linear(in_features, hidden_features, bias=bias)
self.act = act_layer()
self.fc2 = nn.Linear(hidden_features, out_features, bias=bias)
self.drop = nn.Dropout(drop)
def forward(self, x: Tensor) -> Tensor:
x = self.fc1(x)
x = self.act(x)
x = self.drop(x)
x = self.fc2(x)
x = self.drop(x)
return x
================================================
FILE: third_party/depth_anything_v2/dinov2_layers/patch_embed.py
================================================
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# References:
# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py
# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/patch_embed.py
from typing import Callable, Optional, Tuple, Union
import torch.nn as nn
from torch import Tensor
def make_2tuple(x):
if isinstance(x, tuple):
assert len(x) == 2
return x
assert isinstance(x, int)
return (x, x)
class PatchEmbed(nn.Module):
"""
2D image to patch embedding: (B,C,H,W) -> (B,N,D)
Args:
img_size: Image size.
patch_size: Patch token size.
in_chans: Number of input image channels.
embed_dim: Number of linear projection output channels.
norm_layer: Normalization layer.
"""
def __init__(
self,
img_size: Union[int, Tuple[int, int]] = 224,
patch_size: Union[int, Tuple[int, int]] = 16,
in_chans: int = 3,
embed_dim: int = 768,
norm_layer: Optional[Callable] = None,
flatten_embedding: bool = True,
) -> None:
super().__init__()
image_HW = make_2tuple(img_size)
patch_HW = make_2tuple(patch_size)
patch_grid_size = (
image_HW[0] // patch_HW[0],
image_HW[1] // patch_HW[1],
)
self.img_size = image_HW
self.patch_size = patch_HW
self.patches_resolution = patch_grid_size
self.num_patches = patch_grid_size[0] * patch_grid_size[1]
self.in_chans = in_chans
self.embed_dim = embed_dim
self.flatten_embedding = flatten_embedding
self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_HW, stride=patch_HW)
self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity()
def forward(self, x: Tensor) -> Tensor:
_, _, H, W = x.shape
patch_H, patch_W = self.patch_size
assert (
H % patch_H == 0
), f"Input image height {H} is not a multiple of patch height {patch_H}"
assert (
W % patch_W == 0
), f"Input image width {W} is not a multiple of patch width: {patch_W}"
x = self.proj(x) # B C H W
H, W = x.size(2), x.size(3)
x = x.flatten(2).transpose(1, 2) # B HW C
x = self.norm(x)
if not self.flatten_embedding:
x = x.reshape(-1, H, W, self.embed_dim) # B H W C
return x
def flops(self) -> float:
Ho, Wo = self.patches_resolution
flops = Ho * Wo * self.embed_dim * self.in_chans * (self.patch_size[0] * self.patch_size[1])
if self.norm is not None:
flops += Ho * Wo * self.embed_dim
return flops
================================================
FILE: third_party/depth_anything_v2/dinov2_layers/swiglu_ffn.py
================================================
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
from typing import Callable, Optional
import torch.nn.functional as F
from torch import Tensor, nn
class SwiGLUFFN(nn.Module):
def __init__(
self,
in_features: int,
hidden_features: Optional[int] = None,
out_features: Optional[int] = None,
act_layer: Callable[..., nn.Module] = None,
drop: float = 0.0,
bias: bool = True,
) -> None:
super().__init__()
out_features = out_features or in_features
hidden_features = hidden_features or in_features
self.w12 = nn.Linear(in_features, 2 * hidden_features, bias=bias)
self.w3 = nn.Linear(hidden_features, out_features, bias=bias)
def forward(self, x: Tensor) -> Tensor:
x12 = self.w12(x)
x1, x2 = x12.chunk(2, dim=-1)
hidden = F.silu(x1) * x2
return self.w3(hidden)
try:
from xformers.ops import SwiGLU
XFORMERS_AVAILABLE = True
except ImportError:
SwiGLU = SwiGLUFFN
XFORMERS_AVAILABLE = False
class SwiGLUFFNFused(SwiGLU):
def __init__(
self,
in_features: int,
hidden_features: Optional[int] = None,
out_features: Optional[int] = None,
act_layer: Callable[..., nn.Module] = None,
drop: float = 0.0,
bias: bool = True,
) -> None:
out_features = out_features or in_features
hidden_features = hidden_features or in_features
hidden_features = (int(hidden_features * 2 / 3) + 7) // 8 * 8
super().__init__(
in_features=in_features,
hidden_features=hidden_features,
out_features=out_features,
bias=bias,
)
================================================
FILE: third_party/depth_anything_v2/dpt.py
================================================
import cv2
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision.transforms import Compose
from .dinov2 import DINOv2
from .util.blocks import FeatureFusionBlock, _make_scratch
from .util.transform import NormalizeImage, PrepareForNet, Resize
def _make_fusion_block(features, use_bn, size=None):
return FeatureFusionBlock(
features,
nn.ReLU(False),
deconv=False,
bn=use_bn,
expand=False,
align_corners=True,
size=size,
)
class ConvBlock(nn.Module):
def __init__(self, in_feature, out_feature):
super().__init__()
self.conv_block = nn.Sequential(
nn.Conv2d(in_feature, out_feature, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(out_feature),
nn.ReLU(True),
)
def forward(self, x):
return self.conv_block(x)
class DPTHead(nn.Module):
def __init__(
self,
in_channels,
features=256,
use_bn=False,
out_channels=[256, 512, 1024, 1024],
use_clstoken=False,
):
super(DPTHead, self).__init__()
self.use_clstoken = use_clstoken
self.projects = nn.ModuleList(
[
nn.Conv2d(
in_channels=in_channels,
out_channels=out_channel,
kernel_size=1,
stride=1,
padding=0,
)
for out_channel in out_channels
]
)
self.resize_layers = nn.ModuleList(
[
nn.ConvTranspose2d(
in_channels=out_channels[0],
out_channels=out_channels[0],
kernel_size=4,
stride=4,
padding=0,
),
nn.ConvTranspose2d(
in_channels=out_channels[1],
out_channels=out_channels[1],
kernel_size=2,
stride=2,
padding=0,
),
nn.Identity(),
nn.Conv2d(
in_channels=out_channels[3],
out_channels=out_channels[3],
kernel_size=3,
stride=2,
padding=1,
),
]
)
if use_clstoken:
self.readout_projects = nn.ModuleList()
for _ in range(len(self.projects)):
self.readout_projects.append(
nn.Sequential(nn.Linear(2 * in_channels, in_channels), nn.GELU())
)
self.scratch = _make_scratch(
out_channels,
features,
groups=1,
expand=False,
)
self.scratch.stem_transpose = None
self.scratch.refinenet1 = _make_fusion_block(features, use_bn)
self.scratch.refinenet2 = _make_fusion_block(features, use_bn)
self.scratch.refinenet3 = _make_fusion_block(features, use_bn)
self.scratch.refinenet4 = _make_fusion_block(features, use_bn)
head_features_1 = features
head_features_2 = 32
self.scratch.output_conv1 = nn.Conv2d(
head_features_1, head_features_1 // 2, kernel_size=3, stride=1, padding=1
)
self.scratch.output_conv2 = nn.Sequential(
nn.Conv2d(head_features_1 // 2, head_features_2, kernel_size=3, stride=1, padding=1),
nn.ReLU(True),
nn.Conv2d(head_features_2, 1, kernel_size=1, stride=1, padding=0),
nn.ReLU(True),
nn.Identity(),
)
def forward(self, out_features, patch_h, patch_w):
out = []
for i, x in enumerate(out_features):
if self.use_clstoken:
x, cls_token = x[0], x[1]
readout = cls_token.unsqueeze(1).expand_as(x)
x = self.readout_projects[i](torch.cat((x, readout), -1))
else:
x = x[0]
x = x.permute(0, 2, 1).reshape((x.shape[0], x.shape[-1], patch_h, patch_w))
x = self.projects[i](x)
x = self.resize_layers[i](x)
out.append(x)
layer_1, layer_2, layer_3, layer_4 = out
layer_1_rn = self.scratch.layer1_rn(layer_1)
layer_2_rn = self.scratch.layer2_rn(layer_2)
layer_3_rn = self.scratch.layer3_rn(layer_3)
layer_4_rn = self.scratch.layer4_rn(layer_4)
path_4 = self.scratch.refinenet4(layer_4_rn, size=layer_3_rn.shape[2:])
path_3 = self.scratch.refinenet3(path_4, layer_3_rn, size=layer_2_rn.shape[2:])
path_2 = self.scratch.refinenet2(path_3, layer_2_rn, size=layer_1_rn.shape[2:])
path_1 = self.scratch.refinenet1(path_2, layer_1_rn)
out = self.scratch.output_conv1(path_1)
out = F.interpolate(
out, (int(patch_h * 14), int(patch_w * 14)), mode="bilinear", align_corners=True
)
out = self.scratch.output_conv2(out)
return out
class DepthAnythingV2(nn.Module):
def __init__(
self,
encoder="vitl",
features=256,
out_channels=[256, 512, 1024, 1024],
use_bn=False,
use_clstoken=False,
):
super(DepthAnythingV2, self).__init__()
self.intermediate_layer_idx = {
"vits": [2, 5, 8, 11],
"vitb": [2, 5, 8, 11],
"vitl": [4, 11, 17, 23],
"vitg": [9, 19, 29, 39],
}[encoder]
self.encoder = encoder
self.pretrained = DINOv2(model_name=encoder)
self.depth_head = DPTHead(
self.pretrained.embed_dim,
features,
use_bn,
out_channels=out_channels,
use_clstoken=use_clstoken,
)
@torch.no_grad()
def forward(self, x):
patch_h, patch_w = x.shape[-2] // 14, x.shape[-1] // 14
features = self.pretrained.get_intermediate_layers(
x, self.intermediate_layer_idx, return_class_token=True
)
depth = self.depth_head(features, patch_h, patch_w)
depth = F.relu(depth)
return depth.squeeze(1)
================================================
FILE: third_party/depth_anything_v2/util/blocks.py
================================================
import torch.nn as nn
def _make_scratch(in_shape, out_shape, groups=1, expand=False):
scratch = nn.Module()
out_shape1 = out_shape
out_shape2 = out_shape
out_shape3 = out_shape
if len(in_shape) >= 4:
out_shape4 = out_shape
if expand:
out_shape1 = out_shape
out_shape2 = out_shape * 2
out_shape3 = out_shape * 4
if len(in_shape) >= 4:
out_shape4 = out_shape * 8
scratch.layer1_rn = nn.Conv2d(
in_shape[0], out_shape1, kernel_size=3, stride=1, padding=1, bias=False, groups=groups
)
scratch.layer2_rn = nn.Conv2d(
in_shape[1], out_shape2, kernel_size=3, stride=1, padding=1, bias=False, groups=groups
)
scratch.layer3_rn = nn.Conv2d(
in_shape[2], out_shape3, kernel_size=3, stride=1, padding=1, bias=False, groups=groups
)
if len(in_shape) >= 4:
scratch.layer4_rn = nn.Conv2d(
in_shape[3], out_shape4, kernel_size=3, stride=1, padding=1, bias=False, groups=groups
)
return scratch
class ResidualConvUnit(nn.Module):
"""Residual convolution module."""
def __init__(self, features, activation, bn):
"""Init.
Args:
features (int): number of features
"""
super().__init__()
self.bn = bn
self.groups = 1
self.conv1 = nn.Conv2d(
features, features, kernel_size=3, stride=1, padding=1, bias=True, groups=self.groups
)
self.conv2 = nn.Conv2d(
features, features, kernel_size=3, stride=1, padding=1, bias=True, groups=self.groups
)
if self.bn == True:
self.bn1 = nn.BatchNorm2d(features)
self.bn2 = nn.BatchNorm2d(features)
self.activation = activation
self.skip_add = nn.quantized.FloatFunctional()
def forward(self, x):
"""Forward pass.
Args:
x (tensor): input
Returns:
tensor: output
"""
out = self.activation(x)
out = self.conv1(out)
if self.bn == True:
out = self.bn1(out)
out = self.activation(out)
out = self.conv2(out)
if self.bn == True:
out = self.bn2(out)
if self.groups > 1:
out = self.conv_merge(out)
return self.skip_add.add(out, x)
class FeatureFusionBlock(nn.Module):
"""Feature fusion block."""
def __init__(
self,
features,
activation,
deconv=False,
bn=False,
expand=False,
align_corners=True,
size=None,
):
"""Init.
Args:
features (int): number of features
"""
super(FeatureFusionBlock, self).__init__()
self.deconv = deconv
self.align_corners = align_corners
self.groups = 1
self.expand = expand
out_features = features
if self.expand == True:
out_features = features // 2
self.out_conv = nn.Conv2d(
features, out_features, kernel_size=1, stride=1, padding=0, bias=True, groups=1
)
self.resConfUnit1 = ResidualConvUnit(features, activation, bn)
self.resConfUnit2 = ResidualConvUnit(features, activation, bn)
self.skip_add = nn.quantized.FloatFunctional()
self.size = size
def forward(self, *xs, size=None):
"""Forward pass.
Returns:
tensor: output
"""
output = xs[0]
if len(xs) == 2:
res = self.resConfUnit1(xs[1])
output = self.skip_add.add(output, res)
output = self.resConfUnit2(output)
if (size is None) and (self.size is None):
modifier = {"scale_factor": 2}
elif size is None:
modifier = {"size": self.size}
else:
modifier = {"size": size}
output = nn.functional.interpolate(
output, **modifier, mode="bilinear", align_corners=self.align_corners
)
output = self.out_conv(output)
return output
================================================
FILE: third_party/depth_anything_v2/util/transform.py
================================================
import cv2
import numpy as np
class Resize(object):
"""Resize sample to given size (width, height)."""
def __init__(
self,
width,
height,
resize_target=True,
keep_aspect_ratio=False,
ensure_multiple_of=1,
resize_method="lower_bound",
image_interpolation_method=cv2.INTER_AREA,
):
"""Init.
Args:
width (int): desired output width
height (int): desired output height
resize_target (bool, optional):
True: Resize the full sample (image, mask, target).
False: Resize image only.
Defaults to True.
keep_aspect_ratio (bool, optional):
True: Keep the aspect ratio of the input sample.
Output sample might not have the given width and height, and
resize behaviour depends on the parameter 'resize_method'.
Defaults to False.
ensure_multiple_of (int, optional):
Output width and height is constrained to be multiple of this parameter.
Defaults to 1.
resize_method (str, optional):
"lower_bound": Output will be at least as large as the given size.
"upper_bound": Output will be at max as large as the given size. (Output size might be smaller than given size.)
"minimal": Scale as least as possible. (Output size might be smaller than given size.)
Defaults to "lower_bound".
"""
self.__width = width
self.__height = height
self.__resize_target = resize_target
self.__keep_aspect_ratio = keep_aspect_ratio
self.__multiple_of = ensure_multiple_of
self.__resize_method = resize_method
self.__image_interpolation_method = image_interpolation_method
def constrain_to_multiple_of(self, x, min_val=0, max_val=None):
y = (np.round(x / self.__multiple_of) * self.__multiple_of).astype(int)
if max_val is not None and y > max_val:
y = (np.floor(x / self.__multiple_of) * self.__multiple_of).astype(int)
if y < min_val:
y = (np.ceil(x / self.__multiple_of) * self.__multiple_of).astype(int)
return y
def get_size(self, width, height):
# determine new height and width
scale_height = self.__height / height
scale_width = self.__width / width
if self.__keep_aspect_ratio:
if self.__resize_method == "lower_bound":
# scale such that output size is lower bound
if scale_width > scale_height:
# fit width
scale_height = scale_width
else:
# fit height
scale_width = scale_height
elif self.__resize_method == "upper_bound":
# scale such that output size is upper bound
if scale_width < scale_height:
# fit width
scale_height = scale_width
else:
# fit height
scale_width = scale_height
elif self.__resize_method == "minimal":
# scale as least as possbile
if abs(1 - scale_width) < abs(1 - scale_height):
# fit width
scale_height = scale_width
else:
# fit height
scale_width = scale_height
else:
raise ValueError(f"resize_method {self.__resize_method} not implemented")
if self.__resize_method == "lower_bound":
new_height = self.constrain_to_multiple_of(scale_height * height, min_val=self.__height)
new_width = self.constrain_to_multiple_of(scale_width * width, min_val=self.__width)
elif self.__resize_method == "upper_bound":
new_height = self.constrain_to_multiple_of(scale_height * height, max_val=self.__height)
new_width = self.constrain_to_multiple_of(scale_width * width, max_val=self.__width)
elif self.__resize_method == "minimal":
new_height = self.constrain_to_multiple_of(scale_height * height)
new_width = self.constrain_to_multiple_of(scale_width * width)
else:
raise ValueError(f"resize_method {self.__resize_method} not implemented")
return (new_width, new_height)
def __call__(self, sample):
width, height = self.get_size(sample["image"].shape[1], sample["image"].shape[0])
# resize sample
sample["image"] = cv2.resize(
sample["image"], (width, height), interpolation=self.__image_interpolation_method
)
if self.__resize_target:
if "depth" in sample:
sample["depth"] = cv2.resize(
sample["depth"], (width, height), interpolation=cv2.INTER_NEAREST
)
if "mask" in sample:
sample["mask"] = cv2.resize(
sample["mask"].astype(np.float32),
(width, height),
interpolation=cv2.INTER_NEAREST,
)
return sample
class NormalizeImage(object):
"""Normlize image by given mean and std."""
def __init__(self, mean, std):
self.__mean = mean
self.__std = std
def __call__(self, sample):
sample["image"] = (sample["image"] - self.__mean) / self.__std
return sample
class PrepareForNet(object):
"""Prepare sample for usage as network input."""
def __init__(self):
pass
def __call__(self, sample):
image = np.transpose(sample["image"], (2, 0, 1))
sample["image"] = np.ascontiguousarray(image).astype(np.float32)
if "depth" in sample:
depth = sample["depth"].astype(np.float32)
sample["depth"] = np.ascontiguousarray(depth)
if "mask" in sample:
sample["mask"] = sample["mask"].astype(np.float32)
sample["mask"] = np.ascontiguousarray(sample["mask"])
return sample