main 9c8513c109eb cached
51 files
165.8 KB
50.1k tokens
276 symbols
1 requests
Download .txt
Repository: KosukeFukazawa/CharacterAnimationTools
Branch: main
Commit: 9c8513c109eb
Files: 51
Total size: 165.8 KB

Directory structure:
gitextract_5a6wl_uk/

├── .gitignore
├── .vscode/
│   └── settings.json
├── LICENSE
├── README.md
├── anim/
│   ├── aistpp.py
│   ├── amass.py
│   ├── animation.py
│   ├── blend.py
│   ├── bvh.py
│   ├── inverse_kinematics/
│   │   ├── ccd_ik.py
│   │   ├── fabrik.py
│   │   ├── jacobi_ik.py
│   │   └── two_bone_ik.py
│   ├── keyframe.py
│   ├── motion_matching/
│   │   ├── database.py
│   │   └── mm.py
│   ├── pose.py
│   ├── skel.py
│   └── smpl.py
├── anitaichi/
│   ├── animation/
│   │   ├── anim.py
│   │   ├── anim_loader/
│   │   │   └── bvh.py
│   │   └── skel.py
│   └── transform/
│       ├── quat.py
│       ├── ti_quat.py
│       ├── ti_vec.py
│       └── vec.py
├── blender/
│   └── fbx2bvh.py
├── configs/
│   ├── DeepLearning/
│   │   └── LMM.toml
│   └── skel_smpl_neutral.npz
├── data/
│   └── data.md
├── model/
│   └── LMM/
│       ├── LMM.md
│       ├── decompressor.py
│       ├── preprocessing.py
│       ├── projector.py
│       ├── setting.py
│       ├── stepper.py
│       ├── test.py
│       └── train.py
├── test/
│   ├── bvh_viewer.py
│   ├── calc_matching_time.py
│   ├── character_controller.py
│   ├── inverse_kinematics.py
│   ├── path_following.py
│   ├── plotting.py
│   ├── plotting_cspace.py
│   └── plotting_global.py
├── todo.md
├── update.md
└── util/
    ├── dualquat.py
    ├── load.py
    └── quat.py

================================================
FILE CONTENTS
================================================

================================================
FILE: .gitignore
================================================
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
#  Usually these files are written by a python script from a template
#  before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
.python-version

# pipenv
#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
#   However, in case of collaboration, if having platform-specific dependencies or dependencies
#   having no cross-platform support, pipenv may install dependencies that don't work, or not
#   install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

# others
.DS_Store
data/*
!data/data.md

# under construction
anim/SE3/*
anim/SO3/*
makefile
*.ipynb

================================================
FILE: .vscode/settings.json
================================================
{
    // "python.linting.mypyEnabled": true,
    
    "files.associations": {
        "__bit_reference": "cpp",
        "__bits": "cpp",
        "__config": "cpp",
        "__debug": "cpp",
        "__errc": "cpp",
        "__locale": "cpp",
        "__mutex_base": "cpp",
        "__nullptr": "cpp",
        "__split_buffer": "cpp",
        "__string": "cpp",
        "__threading_support": "cpp",
        "__tuple": "cpp",
        "atomic": "cpp",
        "bitset": "cpp",
        "cctype": "cpp",
        "chrono": "cpp",
        "clocale": "cpp",
        "compare": "cpp",
        "concepts": "cpp",
        "cstdarg": "cpp",
        "cstddef": "cpp",
        "cstdint": "cpp",
        "cstdio": "cpp",
        "cstdlib": "cpp",
        "cstring": "cpp",
        "ctime": "cpp",
        "cwchar": "cpp",
        "cwctype": "cpp",
        "exception": "cpp",
        "initializer_list": "cpp",
        "ios": "cpp",
        "iosfwd": "cpp",
        "iostream": "cpp",
        "istream": "cpp",
        "limits": "cpp",
        "locale": "cpp",
        "memory": "cpp",
        "mutex": "cpp",
        "new": "cpp",
        "ostream": "cpp",
        "ratio": "cpp",
        "stdexcept": "cpp",
        "streambuf": "cpp",
        "string": "cpp",
        "string_view": "cpp",
        "system_error": "cpp",
        "tuple": "cpp",
        "type_traits": "cpp",
        "typeinfo": "cpp",
        "variant": "cpp",
        "vector": "cpp",
        "array": "cpp",
        "list": "cpp"
    }
}

================================================
FILE: LICENSE
================================================
MIT License

Copyright (c) 2023 Kosuke Fukazawa

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.


================================================
FILE: README.md
================================================
<p align="center">
    <picture>
    <img alt="cat" src="figs/icon.png" width=512>
    </picture>
</p>

<h2 align="center">
CAT: Character Animation Tools for Python
</h2>

This repository includes scripts for Character Animation.
All the code is written entirely in python.

It is useful for pre-processing and post-processing motions in **Deep Learning**.  
It will be also useful for create character animations.

## :star: Requirements
I tested on python3.10 (for match-case syntax).

### Package
* NumPy
* SciPy
* matplotlib
* chumpy (if you use vanilla SMPL for AIST++).
* easydict
<!-- 
TBD
* PyTorch
* JAX
* taichi 
* faiss-gpu
-->

### Motion Data
Some of the scripts in this repository need motion data below.  
Please download them and place them in [`data/`](data) or link them as symbolic links at [`data/`](data). For more information please see [`data/data.md`](data/data.md).
* [Ubisoft La Forge Animation Dataset (LAFAN1)](https://github.com/ubisoft/ubisoft-laforge-animation-dataset)
* [SMPL](https://smpl.is.tue.mpg.de/)
* [SMPL+H](https://mano.is.tue.mpg.de/)
* [SMPL-X](https://smpl-x.is.tue.mpg.de/)
* [AMASS](https://amass.is.tue.mpg.de/)
* [AIST++](https://google.github.io/aistplusplus_dataset/factsfigures.html)


## :question: How to use?

### 1. Load and Save Animation
<details>
<summary>open</summary>  

#### 1.1 Load Animation from [bvh](https://research.cs.wisc.edu/graphics/Courses/cs-838-1999/Jeff/BVH.html) file.
```Python
from anim import bvh
from anim.animation import Animation
anim_bvh: Animation = bvh.load(filepath="data/**.bvh")
```

#### 1.2 Load Animation from [AIST++](https://google.github.io/aistplusplus_dataset/factsfigures.html).
You need to install chumpy to use vanilla SMPL model.
```Python
from anim import aistpp
anim: Animation = aistpp.load(
    aistpp_motion_path="data/aistpp/**.pkl",
    smpl_path="data/smpl/neutral/model.pkl"
)
``` 

#### 1.3 Load Animation from [AMASS](https://amass.is.tue.mpg.de/).
I recommend you to download extended SMPL+H model (16 beta components).
```Python
from anim import amass
anim: Animation = amass.load(
    amass_motion_path="data/amass/**.npz",
    smplh_path="data/smplh/neutral/model.npz"
)
```

#### 1.4 Save as bvh.
You can convert SMPL based motion files (AIST++, AMASS) to BVH files.
```Python
from anim import bvh
from anim.animation import Animation

...

anim: Animation
bvh.save(
    filepath="data/***.bvh",
    anim=anim
)

```

<!-- 
#### 1.5 Other animation format
TBD  
-->

</details>


### 2. Get motion features
<details>
<summary>open</summary>

#### 2.1 Get positions (global, root-centric, character space).
```Python
import numpy as np
from anim.animation import Animation

...

anim: Animation
global_positions: np.ndarray = anim.gpos
rcentric_positions: np.ndarray = anim.rtpos
cspace_positions: np.ndarray = anim.cpos
```

#### 2.2 Get velocities(positions, rotations).
```Python
anim: Animation
pos_velocities: np.ndarray = anim.gposvel
rot_velocities: np.ndarray = anim.lrotvel
``` 

#### 2.3 Get mirrored Animation.  
(**caution**: Skel offsets must be symmetric.)
```Python
anim: Animation
anim_M: Animation = anim.mirror()
```
</details>


### 3. Inverse Kinematics
<details>
<summary>open</summary>

#### 3.1 Two bone IK
Analytical method of foot IK example (define heels positon and knees forward vector).
```bash
python anim/inverse_kinematics/two_bone_ik.py
```
![two_bone_ik](figs/two_bone_ik.png)

#### 3.2 CCD-IK
Simple demo.
```bash
python anim/inverse_kinematics/ccd_ik.py
```
![ccd_ik](figs/ccd_ik.png)

#### 3.3 FABRIK
Simple demo.
```bash
python anim/inverse_kinematics/fabrik.py
```
![fabrik](figs/fabrik.png)

<!-- 
#### 3.4 Jacobian IK
TBD 
-->

</details>


### 4. Motion Blending
<details>
<summary>open</summary>

#### 4.1 Linear blending for pose.
TBD

</details>


### 5. Motion Matching
<details>
<summary>open</summary>

#### 5.1 Character Control by predefined trajectories

```python
python test/path_following.py
```
![sim_motion](figs/sim_motion.gif)
![sim_sidestep](figs/sim_motion_sidestep.gif)

<!-- 
#### 5.2 Character Control by user input
```python
python test/character_controller.py
``` 
-->

</details>

<!--
### 6. Graph animation
<details>
<summary>open</summary>

#### Motion Graphs
TBD

#### State Machine
TBD

</details>


### 7. KeyFrame Animation
<details>
<summary>open</summary>

#### Extract keys from Animation
TBD

#### KeyFrame reduction
TBD

#### Create Animation from keyframes
TBD

</details>


### 8. Crowd Simulation
<details>
<summary>open</summary>

#### Collision avoidance
TBD

#### Following path
TBD

</details>


### 9. Skinning
<details>
<summary>open</summary>

#### Linear Blend Skinning
TBD

#### Dual-Quaternions Blend Skinning
TBD

</details>


### 10. Animation Retarget
<details>
<summary>open</summary>

#### Rotation copy
TBD

#### IK-based retarget
TBD

</details>
-->


<!-- 
## :zap: Deep Learning 
-->


## :eyes: Notification
* [`util/quat.py`](util/quat.py) inspired by [Motion-Matching](https://github.com/orangeduck/Motion-Matching).
* This repository is MIT licensed, but some datasets requires a separate license. Please check them.

## :speech_balloon: Contact
This repository is under construction.  
Feel free to contact me on [issue](https://github.com/KosukeFukazawa/CharacterAnimationTools/issues).

## :books:License
This code is distributed under an [MIT LICENSE](LICENSE).

================================================
FILE: anim/aistpp.py
================================================
"""
aistpp.py
AIST++ : 
    format: ***.pkl (pickle file)
    Parameters :
        "smpl_loss" (float): I don't know how to use it.
        "smpl_poses" (np.ndarray): SMPL pose paramters (rotations). shape: (num_frame, num_J * 3 = 72).
        "smpl_scaling" (float): Scaling parameters of the skeleton.
        "smpl_trans" (np.ndarray): Root translations. shape: (num_frame, 3).
"""
# loading AIST++ data.

from __future__ import annotations

from pathlib import Path
import numpy as np
from anim.skel import Skel
from anim.animation import Animation
from anim.smpl import load_model, calc_skel_offsets, SMPL_JOINT_NAMES
from util import quat
from util.load import pickle_load

def load(
    aistpp_motion_path: Path,
    skel: Skel=None,
    smpl_path: Path=Path("data/smpl/neutral/model.pkl"),
    scale: float=100.0,
    fps: int=30,
) -> Animation:
    
    NUM_JOINTS = 24
    NUM_BETAS = 10
    
    # Load a motion.
    if isinstance(aistpp_motion_path, str):
        aistpp_motion_path = Path(aistpp_motion_path)
    name = aistpp_motion_path.stem
    aistpp_dict = pickle_load(aistpp_motion_path)
    poses = aistpp_dict["smpl_poses"]
    num_frames = len(poses)
    poses = poses.reshape(num_frames, -1, 3)
    quats = quat.from_axis_angle(poses)
    trans = aistpp_dict["smpl_trans"] / aistpp_dict["smpl_scaling"]

    if skel == None:
        # Load a Skel.
        smpl_dict = load_model(smpl_path)
        parents = list(map(int, smpl_dict["kintree_table"][0][:NUM_JOINTS]))
        parents[0] = -1
        J_regressor = smpl_dict["J_regressor"]
        shapedirs = smpl_dict["shapedirs"]
        v_template = smpl_dict["v_template"]
        betas = np.zeros([NUM_BETAS])
        J_positions = calc_skel_offsets(betas, J_regressor, shapedirs, v_template)[:NUM_JOINTS] * scale
        root_offset = J_positions[0]
        offsets = J_positions - J_positions[parents]
        offsets[0] = root_offset
        names = SMPL_JOINT_NAMES[:NUM_JOINTS]
        skel = Skel.from_names_parents_offsets(names, parents, offsets)

    # We need to apply smpl root offsets to translations.
    trans += root_offset
    
    return Animation(skel=skel, quats=quats, trans=trans, fps=fps, anim_name=name,)

================================================
FILE: anim/amass.py
================================================
"""
amass.py
AMASS :
    Format: ***.npz (numpy binary file)
    Parameters:
        "trans" (np.ndarray): Root translations. shape: (num_frames, 3).
        "gender" (np.ndarray): Gender name. "male", "female", "neutral".
        "mocap_framerate" (np.ndarray): Fps of mocap data.
        "betas" (np.ndarray): PCA based body shape parameters. shape: (num_betas=16).
        "dmpls" (np.ndarray): Dynamic body parameters of DMPL. We do not use. Unused for skeleton. shape: (num_frames, num_dmpls=8).
        "poses" (np.ndarray): SMPLH pose parameters (rotations). shape: (num_frames, num_J * 3 = 156).
"""
# loading amass data.

from __future__ import annotations

from pathlib import Path
import numpy as np
from anim.skel import Skel
from anim.animation import Animation
from anim.smpl import load_model, calc_skel_offsets, SMPL_JOINT_NAMES, SMPLH_JOINT_NAMES
from util import quat

def load(
    amass_motion_path: Path,
    skel: Skel=None,
    smplh_path: Path=Path("data/smplh/neutral/model.npz"),
    remove_betas: bool=False,
    gender: str=None,
    num_betas: int=16,
    scale: float=100.0,
    load_hand=True,
) -> Animation:
    """
    args:
        amass_motion_file (Path): Path to the AMASS motion file.
        smplh_path (Path): Optional. Path to the SMPLH model.
        remove_betas (bool): remove beta parameters from AMASS.
        gender (str): Gender of SMPLH model.
        num_betas (int): Number of betas to use. Defaults to 16.
        num_dmpls (int): Number of dmpl parameters to use. Defaults to 8.
        scale (float): Scaling paramter of the skeleton. Defaults to 100.0.
        load_hand (bool): Whether to use hand joints. Defaults to True.
        load_dmpl (bool): Whether to use DMPL. Defaults to False.
    Return:
        anim (Animation)
    """
    if load_hand: 
        NUM_JOINTS = 52
        names = SMPLH_JOINT_NAMES[:NUM_JOINTS]
    else: 
        NUM_JOINTS = 24
        names = SMPL_JOINT_NAMES[:NUM_JOINTS]

    if not isinstance(amass_motion_path, Path):
        amass_motion_path = Path(amass_motion_path)
    if not isinstance(smplh_path, Path):
        smplh_path = Path(smplh_path)
    
    
    trans_quat = quat.mul(quat.from_angle_axis(-np.pi / 2, [0, 1, 0]), quat.from_angle_axis(-np.pi / 2, [1, 0, 0]))

    # Load AMASS info.
    amass_dict = np.load(amass_motion_path, allow_pickle=True)
    if gender == None: gender = str(amass_dict["gender"])
    fps = int(amass_dict["mocap_framerate"])
    if remove_betas: betas = np.zeros([num_betas])
    else: betas = amass_dict["betas"][:num_betas]
    axangles = amass_dict["poses"]
    num_frames = len(axangles)
    axangles = axangles.reshape([num_frames, -1, 3])[:,:NUM_JOINTS]
    quats = quat.from_axis_angle(axangles)
    root_rot = quat.mul(trans_quat[None], quats[:,0])
    quats[:,0] = root_rot

    if skel == None:
        # Load SMPL parmeters.
        smplh_dict = load_model(smplh_path, gender)
        parents = smplh_dict["kintree_table"][0][:NUM_JOINTS]
        parents[0] = -1
        J_regressor = smplh_dict["J_regressor"]
        shapedirs = smplh_dict["shapedirs"]
        v_template = smplh_dict["v_template"]
        
        J_positions = calc_skel_offsets(betas, J_regressor, shapedirs, v_template)[:NUM_JOINTS] * scale
        root_offset = J_positions[0]
        offsets = J_positions - J_positions[parents]
        offsets[0] = root_offset
        skel = Skel.from_names_parents_offsets(names, parents, offsets, skel_name="SMPLH")
    
    root_pos = skel.offsets[0][None].repeat(len(quats), axis=0)
    trans = amass_dict["trans"] * scale + root_pos
    trans = trans @ quat.to_xform(trans_quat).T
    anim = Animation(skel=skel, quats=quats, trans=trans, fps=fps, anim_name=amass_motion_path.stem)

    return anim

================================================
FILE: anim/animation.py
================================================
"""
animation.py
"""
# Basic animation class. Can use for 
# deep-learning (pre/post)processing 
# and visualization etc.

from __future__ import annotations

import numpy as np
import scipy.ndimage as ndimage
from util import quat
from util import dualquat as dq
from anim.skel import Skel


class Animation:
    def __init__(
        self,
        skel: Skel,
        quats: np.ndarray,
        trans: np.ndarray,
        positions: np.ndarray = None,
        fps: int=30,
        anim_name: str="animation",
    ) -> None:
        """ Class for Motions representations.

        Args:
            skel (Skel): Skelton definition.
            quats (np.ndarray): Joints rotations. shape: (T, J, 4)
            trans (np.ndarray): Root transitions. shape: (T, 3)
            positions (np.ndarray): Joints positions. shape: (T, J, 3). Defaults to None.
            fps (int): Frame per seconds for animation. Defaults to 30.
            anim_name (str, optional): Name of animation. Defaults to "animation".
        """
        self.skel = skel
        self.quats = quats
        self.trans = trans
        self.positions = positions
        self.fps = fps
        self.name = anim_name
    
    def __len__(self) -> int: return len(self.trans)
    
    def __add__(self, other: Animation) -> Animation:
        quats: np.ndarray = np.concatenate([self.quats, other.quats], axis=0)
        trans: np.ndarray = np.concatenate([self.trans, other.trans], axis=0)
        return Animation(
            skel=self.skel,
            quats=quats,
            trans=trans,
            fps=self.fps,
            anim_name=self.name,
        )
    
    def __getitem__(self, index: int | slice) -> Animation:
        if isinstance(index, int):
            if index == -1:
                index = len(self) - 1
            return Animation(
                skel=self.skel,
                quats=self.quats[index:index+1],
                trans=self.trans[index:index+1],
                fps=self.fps,
                anim_name=self.name,
            )
        elif isinstance(index, slice):
            return Animation(
                skel=self.skel,
                quats=self.quats[index],
                trans=self.trans[index],
                fps=self.fps,
                anim_name=self.name,
            )
        else:
            raise TypeError
    
    def cat(self, other: Animation) -> None:
        assert isinstance(other, Animation)

        self.quats = np.concatenate(
            [self.quats, other.quats], axis=0)
        self.trans = np.concatenate(
            [self.trans, other.trans], axis=0
        )
    
    @property
    def parents(self) -> list[int]:
        return self.skel.parents
    
    @property
    def joint_names(self) -> list[str]:
        return self.skel.names
    
    @property
    def offsets(self) -> np.ndarray:
        return self.skel.offsets
    
    # =====================
    #  Rotation conversion
    # =====================
    @property
    def grot(self) -> np.ndarray:
        """global rotations(quaternions) for each joints. shape: (T, J, 4)"""
        return quat.fk_rot(lrot=self.quats, parents=self.parents)
    
    @property
    def crot(self) -> np.ndarray:
        """Projected root(simulation bone) centric global rotations."""
        
        # Root rotations relative to the forward vector.
        root_rot = self.proj_root_rot
        # Root positions projected on the ground (y=0).
        root_pos = self.proj_root_pos()
        # Make relative to simulation bone.
        lrot = self.quats.copy()
        lpos = self.lpos.copy()
        lrot[:, 0] = quat.inv_mul(root_rot, lrot[:, 0])
        lpos[:, 0] = quat.inv_mul_vec(root_rot, lpos[:, 0] - root_pos)
        return quat.fk_rot(lrot, self.parents)
    
    @property
    def axangs(self) -> np.ndarray:
        """axis-angle rotation representations. shape: (T, J, 3)"""
        return quat.to_axis_angle(self.quats)
    
    @property
    def xforms(self) -> np.ndarray:
        """rotation matrix. shape: (T, J, 3, 3)"""
        return quat.to_xform(self.quats)
    
    @property
    def ortho6ds(self) -> np.ndarray:
        return quat.to_xform_xy(self.quats)
    
    @property
    def sw_tws(self) -> tuple[np.ndarray, np.ndarray]:
        """Twist-Swing decomposition.
           This function is based on HybrIK.
           Remove root rotations.
           quat.mul(swings, twists) reproduce original rotations.
        return:
            twists: (T, J-1, 4), except ROOT.
            swings: (T, J-1, 4)
        """
        
        rots: np.ndarray = self.quats.copy()
        pos:  np.ndarray = self.lpos.copy()
        
        children: np.ndarray = \
            -np.ones(len(self.parents), dtype=int)
        for i in range(1, len(self.parents)):
            children[self.parents[i]] = i
        
        swings: list[np.ndarray]  = []
        twists: list[np.ndarray]  = []
        
        for i in range(1, len(self.parents)):
            # ルートに最も近いJointはtwistのみ / only twist for nearest joint to root.
            if children[i] < 0:
                swings.append(quat.eye([len(self), 1]))
                twists.append(rots[:, i:i+1])
                continue
            
            u: np.ndarray = pos[:, children[i]:children[i]+1]
            rot: np.ndarray = rots[:, i:i+1]
            u = u / np.linalg.norm(u, axis=-1, keepdims=True)
            v: np.ndarray = quat.mul_vec(rot, u)
            
            swing: np.ndarray = quat.normalize(quat.between(u, v))
            swings.append(swing)
            twist: np.ndarray = quat.inv_mul(swing, rot)
            twists.append(twist)
        
        swings_np: np.ndarray = np.concatenate(swings, axis=1)
        twists_np: np.ndarray = np.concatenate(twists, axis=1)

        return swings_np, twists_np

    # ==================
    #  Position from FK
    # ==================
    def set_positions_from_fk(self) -> None:
        self.positions = self.gpos
    
    @property
    def lpos(self) -> np.ndarray:
        lpos: np.ndarray = self.offsets[None].repeat(len(self), axis=0)
        lpos[:,0] = self.trans
        return lpos
    
    @property
    def gpos(self) -> np.ndarray:
        """Global space positions."""
        
        _, gpos = quat.fk(
            lrot=self.quats, 
            lpos=self.lpos, 
            parents=self.parents
        )
        return gpos
    
    @property
    def rtpos(self) -> np.ndarray:
        """Root-centric local positions."""
        
        lrots: np.ndarray = self.quats.copy()
        lposs: np.ndarray = self.lpos.copy()
        # ROOT to zero.
        lrots[:,0] = quat.eye([len(self)])
        lposs[:,0] = np.zeros([len(self), 3])
        
        _, rtpos = quat.fk(
            lrot=lrots, 
            lpos=lposs, 
            parents=self.parents
        )
        return rtpos
    
    @property
    def cpos(self) -> np.ndarray:
        """Projected root(simulation bone) centric positions."""
        lrot = self.quats.copy()
        lpos = self.lpos
        c_root_rot, c_root_pos = self.croot()
        lrot[:, 0] = c_root_rot
        lpos[:, 0] = c_root_pos
        
        crot, cpos = quat.fk(lrot, lpos, self.parents)
        return cpos
    
    def croot(self, idx: int=None) -> tuple[np.ndarray, np.ndarray]:
        """return character space info.
        return:
            crot: character space root rotations. [T, 4]
            cpos: character space root positions. [T, 3]
        """
        # Root rotations relative to the forward vector.
        root_rot = self.proj_root_rot
        # Root positions projected on the ground (y=0).
        root_pos = self.proj_root_pos()
        # Make relative to simulation bone.
        crot = quat.inv_mul(root_rot, self.quats[:, 0])
        cpos = quat.inv_mul_vec(root_rot, self.trans - root_pos)
        if idx is not None:
            return crot[idx], cpos[idx]
        else: return crot, cpos
    
    # ============
    #   Velocity
    # ============
    @property
    def gposvel(self) -> np.ndarray:
        gpos : np.ndarray = self.gpos
        gpvel: np.ndarray = np.zeros_like(gpos)
        gpvel[1:] = (gpos[1:] - gpos[:-1]) * self.fps # relative position from previous frame.
        gpvel[0] = gpvel[1] - (gpvel[3] - gpvel[2])
        return gpvel
    
    @property
    def cposvel(self) -> np.ndarray:
        cpos : np.ndarray = self.cpos
        cpvel: np.ndarray = np.zeros_like(cpos)
        cpvel[1:] = (cpos[1:] - cpos[:-1]) * self.fps # relative position from previous frame.
        cpvel[0] = cpvel[1] - (cpvel[3] - cpvel[2])
        return cpvel

    @property
    def lrotvel(self) -> np.ndarray:
        """Calculate rotation velocities with 
           rotation vector style."""    
        
        lrot : np.ndarray = self.quats.copy()
        lrvel: np.ndarray = np.zeros_like(self.lpos)
        lrvel[1:] = quat.to_axis_angle(quat.abs(quat.mul(lrot[1:], quat.inv(lrot[:-1])))) * self.fps
        lrvel[0] = lrvel[1] - (lrvel[3] - lrvel[2])
        return lrvel
    
    # ==============
    #   4x4 matrix
    # ==============
    @property
    def local_transform(self) -> np.ndarray:
        xforms: np.ndarray = self.xforms
        offsets: np.ndarray = self.offsets
        transform: np.ndarray = np.zeros(xforms.shape[:-2] + (4, 4,))
        transform[..., :3, :3] = xforms
        transform[..., :3,  3] = offsets
        transform[..., 3, 3] = 1
        return transform
    
    @property
    def global_transform(self) -> np.ndarray:
        ltrans: np.ndarray = self.local_transform.copy()
        parents: list[int] = self.parents
        
        gtrans = [ltrans[...,:1,:,:]]
        for i in range(1, len(parents)):
            gtrans.append(np.matmul(gtrans[parents[i]],ltrans[...,i:i+1,:,:]))
        
        return np.concatenate(gtrans, axis=-3)
    
    # ==================
    #  dual quaternions
    # ==================
    @property
    def local_dualquat(self) -> np.ndarray:
        return dq.from_rot_and_trans(self.quats, self.lpos)
    
    @property
    def global_dualquat(self) -> np.ndarray:
        return dq.fk(self.local_dualquat, self.parents)
    
    # =============
    #  trajectory 
    # =============
    def proj_root_pos(self, remove_vertical: bool=False) -> np.ndarray:
        """Root position projected on the ground (world space).
        return:
            Projected bone positons as ndarray of shape [len(self), 3] or [len(self), 2](remove_vertical).
        """
        vertical = self.skel.vertical
        if remove_vertical:
            settle_ax = []
            for i, ax in enumerate(vertical):
                if ax == 0:
                    settle_ax.append(i)
            return self.trans[..., settle_ax]
        else:
            return self.trans * np.array([abs(abs(ax) - 1) for ax in vertical])
    
    @property
    def proj_root_rot(self) -> np.ndarray:
        # root rotations relative to the forward on the ground. [len(self), 4]
        forward = self.skel.forward
        return quat.normalize(
            quat.between(np.array(forward), self.root_direction())
        )
    
    def root_direction(self, remove_vertical: bool=False) -> np.ndarray:
        """Forward orientation vectors on the ground (world space).
        return:
            Forward vectors as ndarray of shape [..., 3] or [..., 2](remove_vertical).
        """
        # Calculate forward vectors except vertical axis.
        vertical = self.skel.vertical
        rt_rots = self.quats[..., 0, :]
        forwards = np.zeros(shape=rt_rots.shape[:-1] + (3,))
        forwards[...,] = self.skel.rest_forward
        rt_fwd = quat.mul_vec(rt_rots, forwards) * np.array([abs(abs(ax) - 1) for ax in vertical]) # [T, 3]
        # Normalize vectors.
        norm_rt_fwd = rt_fwd / np.linalg.norm(rt_fwd, axis=-1, keepdims=True)
        if remove_vertical:
            settle_ax = []
            for i, ax in enumerate(vertical):
                if ax == 0:
                    settle_ax.append(i)
            norm_rt_fwd = norm_rt_fwd[..., settle_ax]
        return norm_rt_fwd
    
    # ===================
    #  Future trajectory
    # ===================
    def future_traj_poss(self, frame: int, remove_vertical: bool=True, cspace=True) -> np.ndarray:
        """Calculate future trajectory positions on simulation bone.
        Args:
            frame (int): how many ahead frame to see.
            remove_vertical (bool, optional): remove vertical axis positions. Defaults to True.
            cspace (bool, optional): use local character space. Defaults to True.
        Returns:
            np.ndarray: future trajectories positions. shape=(len(self), 3) or (len(self), 2)
        """
        idxs = self.clamp_future_idxs(frame)
        proj_root_pos = self.proj_root_pos()
        if cspace:
            traj_pos = quat.inv_mul_vec(self.proj_root_rot, proj_root_pos[idxs] - proj_root_pos)
        else:
            traj_pos = proj_root_pos[idxs]

        if remove_vertical:
            vertical = self.skel.vertical
            settle_ax = []
            for i, ax in enumerate(vertical):
                if ax == 0:
                    settle_ax.append(i)
            return traj_pos[..., settle_ax]
        else: 
            return traj_pos

    def future_traj_dirs(self, frame: int, remove_vertical: bool=True, cspace=True) -> np.ndarray:
        """Calculate future trajectory directions on simulation bone (local character space).
        Args:
            frame (int): how many ahead frame to see.
            remove_vertical (bool, optional): remove vertical axis. Defaults to True.
            cspace (bool, optional): use local character space. Defaults to True.
        Returns:
            np.ndarray: future trajectories directions. shape=(len(self), 3) or (len(self), 2)
        """
        idxs = self.clamp_future_idxs(frame)
        root_directions = self.root_direction()
        if cspace:
            traj_dir = quat.inv_mul_vec(self.proj_root_rot, root_directions[idxs])
        else:
            traj_dir = root_directions[idxs]
        
        if remove_vertical:
            vertical = self.skel.vertical
            settle_ax = []
            for i, ax in enumerate(vertical):
                if ax == 0:
                    settle_ax.append(i)
            return traj_dir[..., settle_ax]
        else: 
            return traj_dir
    
    def clamp_future_idxs(self, offset: int) -> np.ndarray:
        """Function to calculate the frame array for `offset` frame ahead.
        If `offset` frame ahead does not exist, 
        return the last frame.
        """
        idxs = np.arange(len(self)) + offset
        idxs[-(offset + 1):] = idxs[-(offset + 1)]
        return idxs
    
    # =====================
    #    Other functions
    # =====================
    def calc_foot_contact(
        self, 
        method: str="velocity",
        threshold: float=0.15,
        left_foot_name: str="LeftToe",
        right_foot_name: str="RightToe",
    ) -> np.ndarray:
        if method == "velocity":
            contact_vel = np.linalg.norm(
                self.gposvel[:, 
                    [self.joint_names.index(left_foot_name), 
                    self.joint_names.index(right_foot_name)]
                ], axis=-1
            )
            contacts = contact_vel < threshold
        elif method == "position":
            # vertical axis position for each frame.
            settle_idx = 0
            inverse = False # Is the negative direction vertical?
            for i, ax in enumerate(self.skel.vertical):
                if ax == 1:
                    settle_idx = i
                if ax == -1:
                    settle_idx = i
                    inverse = True 
            contact_pos = self.gpos[:,
                [self.joint_names.index(left_foot_name), 
                self.joint_names.index(right_foot_name)], settle_idx]
            if inverse:
                contact_pos *= -1
            contacts = contact_pos < threshold
        else:
            raise ValueError("unknown value selected on `method`.")
        for ci in range(contacts.shape[1]):
            contacts[:, ci] = ndimage.median_filter(
                contacts[:, ci],
                size=6,
                mode="nearest"
            )
        return contacts
    
    def mirror(self, dataset: str=None) -> Animation:
        vertical = self.skel.vertical
        forward = self.skel.forward
        mirror_axis = []
        for vert_ax, fwd_ax in zip(vertical, forward):
            if abs(vert_ax) == 1 or abs(fwd_ax) == 1:
                mirror_axis.append(1)
            else:
                mirror_axis.append(-1)
        if dataset == "lafan1":
            quatM, lposM = animation_mirror(
                lrot=self.quats,
                lpos=self.lpos,
                names=self.joint_names,
                parents=self.parents
            )
            transM = lposM[:, 0]
        else:
            quatM, transM = mirror_rot_trans(
                lrot=self.quats,
                trans=self.trans,
                names=self.joint_names,
                parents=self.parents,
                mirror_axis=mirror_axis,
            )
        return Animation(
            skel=self.skel,
            quats=quatM,
            trans=transM,
            fps=self.fps,
            anim_name=self.name+"_M",
        )
    
    @staticmethod
    def no_animation(
        skel: Skel, 
        fps: int=30,
        num_frame: int=1,
        anim_name: str="animation",
    ) -> Animation:
        """Create a unit animation (no rotation, no transition) from Skel"""
        quats = quat.eye([num_frame, len(skel)])
        trans = np.zeros([num_frame, 3])
        return Animation(skel, quats, trans, None, fps, anim_name)


def mirror_rot_trans(
    lrot: np.ndarray,
    trans: np.ndarray,
    names: list[str],
    parents: np.ndarray | list[int],
    mirror_axis: list[int]
) -> tuple[np.ndarray, np.ndarray]:
    
    lower_names = [n.lower() for n in names]
    joints_mirror: np.ndarray = np.array([(
        lower_names.index("left"+n[5:]) if n.startswith("right") else
        lower_names.index("right"+n[4:]) if n.startswith("left") else 
        lower_names.index(n)) for n in lower_names])

    mirror_pos: np.ndarray = np.array(mirror_axis)
    mirror_rot: np.ndarray = np.array([1,] + [-ax for ax in mirror_axis])
    grot: np.ndarray = quat.fk_rot(lrot, parents)
    trans_mirror: np.ndarray = mirror_pos * trans
    grot_mirror: np.ndarray = mirror_rot * grot[:,joints_mirror]
    
    return quat.ik_rot(grot_mirror, parents), trans_mirror

# for LAFAN1 dataset. This function from 
# https://github.com/orangeduck/Motion-Matching/blob/5e57a1352812494cca83396785cfc4985df9170c/resources/generate_database.py#L15
def animation_mirror(lrot, lpos, names, parents):

    joints_mirror = np.array([(
        names.index('Left'+n[5:]) if n.startswith('Right') else (
        names.index('Right'+n[4:]) if n.startswith('Left') else 
        names.index(n))) for n in names])

    mirror_pos = np.array([-1, 1, 1])
    mirror_rot = np.array([[-1, -1, 1], [1, 1, -1], [1, 1, -1]])

    grot, gpos = quat.fk(lrot, lpos, parents)

    gpos_mirror = mirror_pos * gpos[:,joints_mirror]
    grot_mirror = quat.from_xform(mirror_rot * quat.to_xform(grot[:,joints_mirror]))
    
    return quat.ik(grot_mirror, gpos_mirror, parents)

================================================
FILE: anim/blend.py
================================================
from __future__ import annotations

import numpy as np

from anim.animation import Animation
from util import quat

# =====================
#  Basic interpolation
# =====================
# Linear Interpolation (LERP) of objects.
def lerp(x, y, t):
    return (1 - t) * x + t * y

# LERP for quaternions.
def quat_lerp(x: np.ndarray, y: np.ndarray, t: float) -> np.ndarray:
    assert x.shape == y.shape, "Two quats must be the same shape."
    return quat.normalize(lerp(x, y, t))

# Spherical linear interpolation (SLERP) for quaternions.
def slerp(x: np.ndarray, y: np.ndarray, t: float) -> np.ndarray:
    assert x.shape == y.shape, "Two quats must be the same shape."
    if t == 0:
        return x
    elif t == 1:
        return y
    if quat.dot(x, y) < 0:
        y = - y
    ca = quat.dot(x, y)
    theta = np.arccos(np.clip(ca, 0, 1))
    r = quat.normalize(y - x * ca)
    return x * np.cos(theta * t) + r * np.sin(theta * t)


# =========
#  Damping
# =========

# predict `t`

================================================
FILE: anim/bvh.py
================================================
"""
bvh.py
"""
# loading and writing a biovision hierarchy data (BVH).

from __future__ import annotations

from io import TextIOWrapper
from pathlib import Path
import logging
import numpy as np
from anim.skel import Joint, Skel
from anim.animation import Animation
from util import quat

def load(
    filepath: Path | str, 
    start: int=None, 
    end: int=None, 
    order: str=None,
    rest_forward: list[int] = [0, 0, 1],
    rest_vertical: list[int] = [0, 1, 0],
    forward_axis: str = "z",
    vertical_axis: str = "y",
    load_skel: bool=True,
    load_pose: bool=True,
    skel: Skel=None,
    skel_name: str=None,
) -> Skel | Animation:
    
    if not load_skel and not load_pose:
        logging.info("Either `load_skel` or `load_pose` must be specified.")
        raise ValueError
    
    if isinstance(filepath, str):
        filepath = Path(filepath)
    
    # List bvh file by each row (line) and each word (column).
    with open(filepath, "r") as f:
        lines: list[str] = [line.strip() for line in f if line != ""]
        motion_idx: int = lines.index("MOTION")
        lines: list[str | list[str]] = \
            list(map(lambda x: x.split(), lines))
        f.close()
    
    # Load HIERARCHY term.
    if load_skel:
        skel, order = load_hierarchy(
            lines = lines[:motion_idx], 
            skel_name=skel_name,
            rest_forward=rest_forward,
            rest_vertical=rest_vertical,
            forward_axis=forward_axis,
            vertical_axis=vertical_axis
        )

    # Load MOTION term.
    if load_pose:
        assert not skel is None, "You need to load `Skel` or define `Skel`." 
        name = filepath.name.split(".")[0]
        
        fps, trans, quats, poss = load_motion(
            lines = lines[motion_idx:],
            start = start,
            end = end,
            order = order,
            skel = skel
        )
        anim = Animation(skel, quats, trans, fps=fps, anim_name=name)
        positions = anim.gpos # [T, J, 3]
        for joint_idx, pos in poss.items():
            positions[:, joint_idx] = pos
        anim.positions = positions
        return anim
    
    return skel

def load_hierarchy(
    lines: list[str | list[str]],
    skel_name: str,
    rest_forward: list[int],
    rest_vertical: list[int],
    forward_axis: str,
    vertical_axis: str,
) -> Skel:
    
    channelmap: dict[str, str] = {
        "Xrotation" : "x",
        "Yrotation" : "y",
        "Zrotation" : "z",   
    }
    
    stacks: list[int] = [-1]
    parents: list[int] = []
    name_list: list[str] = []
    joints: list[Joint] = []
    depth:int = 0
    end_site: bool = False
    
    for line in lines:
        if "ROOT" in line or "JOINT" in line:
            parents.append(stacks[-1])
            stacks.append(len(parents) - 1)
            name_list.append(line[1])
            
        elif "OFFSET" in line:
            if not end_site:
                offset = np.array(list(map(float, line[1:])))
                joints.append(Joint(
                    name = name_list[-1],
                    parent = parents[-1],
                    offset = offset,
                    root = (parents[-1]==-1),
                    ))
        
        elif "CHANNELS" in line:
            dof = int(line[1])
            if dof == 3:
                order = "".join([channelmap[p] for p in line[2:2+3]])
            elif dof == 6:
                order = "".join([channelmap[p] for p in line[5:5+3]])
            joints[-1].dof = dof
        
        elif "End" in line:
            end_site = True
            stacks.append(len(parents) - 1)
        
        elif "{" in line:
            depth += 1
        
        elif "}" in line:
            depth -= 1
            end_site = False
            stacks.pop()
    
    assert depth == 0, "Brackets are not closed."
    
    skel = Skel(joints, skel_name, rest_forward, rest_vertical, forward_axis, vertical_axis)
    return skel, order

def load_motion(
    lines: list[list[str]],
    order: str,
    skel: Skel,
    start: int=None,
    end: int=None,
) -> tuple[int, np.ndarray, np.ndarray, dict[int: np.ndarray]]:
    
    fps: int = round(1 / float(lines[2][2]))
    lines: list[list[str]] = lines[3:]
    
    np_lines = np.array(
        list(map(lambda line: list(map(float, line)), lines))
    ) # T × dim_J matrix
    np_lines = np_lines[start:end]
    
    eangles = []
    poss = {}
    ckpt = 0
    joint_idx = 0
    for dof in skel.dofs:
        if dof == 3:
            eangle = np_lines[:, ckpt:ckpt+3]
            eangles.append(eangle[:, None])
            ckpt += 3
            joint_idx += 1
        elif dof == 6:
            pos = np_lines[:, ckpt:ckpt+3] # [T, 3]
            eangle = np_lines[:, ckpt+3:ckpt+6]
            poss[joint_idx] = pos
            eangles.append(eangle[:, None])
            ckpt += 6
            joint_idx += 1
    
    assert sum(skel.dofs) == np_lines.shape[1], \
        "Skel and Motion are not compatible."
    
    eangles = np.concatenate(eangles, axis=1)
    
    trans = poss[0]
    quats = quat.unroll(quat.from_euler(eangles, order))
    
    return fps, trans, quats, poss

def save(
    filepath: Path | str,
    anim: Animation, 
    order: str="zyx",
    save_pos: bool=False,
    ) -> bool:
    
    skel = anim.skel
    trans = anim.trans
    quats = anim.quats
    positions = anim.positions
    fps = anim.fps
    
    with open(filepath, "w") as f:
        # write hierarchy data.
        f.write("HIERARCHY\n")
        index_order = save_hierarchy(f, skel, 0, order, 0, save_pos)
        
        # write motion data.
        f.write("MOTION\n")
        f.write("Frames: %d\n" % len(trans))
        f.write("Frame Time: %f\n" % (1.0 / fps))
        save_motion(f, trans, quats, positions, save_pos, order, index_order)
        f.close()

def save_hierarchy(
    f: TextIOWrapper,
    skel: Skel,
    index: int,
    order: str,
    depth: int,
    save_pos: bool,
) -> list[int]:

    def order2xyzrotation(order: str) -> str:
        cmap: dict[str, str] = {
            "x" : "Xrotation",
            "y" : "Yrotation",
            "z" : "Zrotation",   
        }
        return "%s %s %s" % (cmap[order[0]], cmap[order[1]], cmap[order[2]])
    
    joint = skel[index]
    index_order = [index]
    if joint.root:
        f.write("\t" * depth + "ROOT %s\n" % joint.name)
    else:
        f.write("\t" * depth + "JOINT %s\n" % joint.name)
    f.write("\t" * depth + "{\n")
    depth += 1
    offset = joint.offset
    f.write("\t" * depth + \
        "OFFSET %f %f %f\n" % (offset[0], offset[1], offset[2]))
    
    if joint.root or save_pos:
        f.write("\t" * depth + \
            "CHANNELS 6 Xposition Yposition Zposition %s\n"\
            % order2xyzrotation(order))
    else:
        f.write("\t" * depth + "CHANNELS 3 %s\n"\
            % order2xyzrotation(order))
    
    children_idxs = skel.get_children(index, return_idx=True)
    for child_idx in children_idxs:
        ch_index_order = save_hierarchy(f, skel, child_idx, order, depth, save_pos)
        index_order.extend(ch_index_order)
    if children_idxs == []:
        f.write("\t" * depth + "End Site\n")
        f.write("\t" * depth + "{\n")
        f.write("\t" * (depth + 1) + "OFFSET %f %f %f\n" \
            % (0, 0, 0))
        f.write("\t" * depth + "}\n")
    
    depth -= 1
    f.write("\t" * depth + "}\n")
    return index_order

def save_motion(
    f: TextIOWrapper,
    trans: np.ndarray,
    quats: np.ndarray,
    positions: np.ndarray,
    save_pos: bool,
    order: str,
    index_order: list[int],
) -> None:
    
    def write_position_rotation(
        pos: np.ndarray, 
        rot: np.ndarray,
        ) -> str:
        pos, rot = pos.tolist(), rot.tolist()
        return "%f %f %f %f %f %f " \
            % (pos[0], pos[1], pos[2], rot[0], rot[1], rot[2])

    def write_rotation(rot: np.ndarray) -> str:
        rot = rot.tolist()
        return "%f %f %f " %(rot[0], rot[1], rot[2])
    
    eangles = np.rad2deg(quat.to_euler(quats, order)) # (T, J, 3)
    for i in range(len(trans)):
        for j in index_order:
            if j == 0:
                f.write(
                    "%s" % write_position_rotation(trans[i], eangles[i, j])
                    )
            elif save_pos:
                f.write(
                    "%s" % write_position_rotation(positions[i, j], eangles[i, j])
                    ) 
            else:
                f.write(
                    "%s" % write_rotation(eangles[i, j])
                )
        f.write("\n")

================================================
FILE: anim/inverse_kinematics/ccd_ik.py
================================================
from __future__ import annotations

import sys
from pathlib import Path
ROOT = Path(__file__).resolve().parent.parent.parent
sys.path.append(str(ROOT))

import copy
import numpy as np
import matplotlib.pyplot as plt
from util import quat
from anim.skel import Joint, Skel
from anim.animation import Animation

def normalize_vector(vector: np.ndarray):
    norm = np.linalg.norm(vector, axis=-1, keepdims=True)
    norm = np.where(norm==0, 1e-10, norm)
    return vector / norm

# まずはEnd effectorのみの実装(Jointの末端が一つのみ)
def simple_ccd_ik(
    anim: Animation,
    tgt_pos: np.ndarray,
    iter: int=10,
) -> Animation:
    anim_ = copy.deepcopy(anim)
    
    for i in range(iter):
        # except end effector
        for parent in reversed(anim_.parents):
            if parent == -1:
                continue
            cur_quat = anim_.quats[:,parent] # (T, 4)
            
            # 現在のJointを基準としたeffectorの位置(global座標系)
            rel_effpos = anim_.gpos[:,-1] - anim_.gpos[:,parent]
            # rel_effpos = quat.mul_vec(quat.inv(cur_quat), rel_effpos)
            eff_vec = normalize_vector(rel_effpos)
            
            # 現在のJointを基準としたtargetの位置(global座標系)
            rel_tgtpos = tgt_pos - anim.gpos[:,parent]
            # rel_tgtpos = quat.mul_vec(quat.inv(cur_quat), rel_tgtpos)
            tgt_vec = normalize_vector(rel_tgtpos)
            
            rel_quat = quat.normalize(quat.between(eff_vec, tgt_vec))
            anim_.quats[:,parent] = quat.abs(quat.normalize(quat.mul(rel_quat, cur_quat)))
    
    return anim_

if __name__=="__main__":
    """使用例"""
    
    # 簡単なJointの定義
    joints = []
    joints.append(Joint(name="ROOT", parent=-1, offset=np.zeros(3), root=True, dof=6))
    joints.append(Joint(name="J1", parent= 0, offset=np.array([1, 0, 0])))
    joints.append(Joint(name="J2", parent= 1, offset=np.array([1, 0, 0])))
    joints.append(Joint(name="J3", parent= 2, offset=np.array([1, 0, 0])))
    #    Root       J1         J2         J3
    #     *----------*----------*----------*
    # (0, 0, 0)  (1, 0, 0)  (2, 0, 0)  (3, 0, 0)


    skel = Skel(joints=joints, skel_name="sample_skel")
    anim = Animation.no_animation(skel, num_frame=2, anim_name="sample_anim")
    
    # first and second frame
    target_position = np.array([[1, 1, 1], [1, 2, 1]])
    
    new_anim = simple_ccd_ik(anim, target_position)
    
    fig = plt.figure(figsize=(11, 7))
    
    # first frame
    ax1 = fig.add_subplot(121, projection="3d")
    ax1.set_xlim3d(-2, 2)
    ax1.set_ylim3d(-2, 2)
    ax1.set_zlim3d(-2, 2)
    ax1.set_box_aspect((1,1,1))
    
    orig = anim.gpos[0]
    poss = new_anim.gpos[0]
    tgt  = target_position[0]
    
    ax1.plot(orig[:,0], orig[:,1], orig[:,2], alpha=0.4)
    ax1.scatter(orig[:,0], orig[:,1], orig[:,2], alpha=0.4, label="orig")
    ax1.plot(poss[:,0], poss[:,1], poss[:,2])
    ax1.scatter(poss[:,0], poss[:,1], poss[:,2], label="calc")
    ax1.scatter(tgt[0], tgt[1], tgt[2], s=50, marker="*", label="target")
    ax1.legend()

    # second frame
    ax2 = fig.add_subplot(122, projection="3d")
    ax2.set_xlim3d(-2, 2)
    ax2.set_ylim3d(-2, 2)
    ax2.set_zlim3d(-2, 2)
    ax2.set_box_aspect((1,1,1))
    
    orig = anim.gpos[1]
    poss = new_anim.gpos[1]
    tgt  = target_position[1]
    
    ax2.plot(orig[:,0], orig[:,1], orig[:,2], alpha=0.4)
    ax2.scatter(orig[:,0], orig[:,1], orig[:,2], alpha=0.4, label="orig")
    ax2.plot(poss[:,0], poss[:,1], poss[:,2])
    ax2.scatter(poss[:,0], poss[:,1], poss[:,2], label="calc")
    ax2.scatter(tgt[0], tgt[1], tgt[2], s=50, marker="*", label="target")
    ax2.legend()
    
    plt.show()

================================================
FILE: anim/inverse_kinematics/fabrik.py
================================================
from __future__ import annotations

import sys
from pathlib import Path
ROOT = Path(__file__).resolve().parent.parent.parent
sys.path.append(str(ROOT))

import copy
import numpy as np
import matplotlib.pyplot as plt
from util import quat
from anim.skel import Joint, Skel
from anim.animation import Animation

def normalize_vector(vector: np.ndarray):
    norm = np.linalg.norm(vector, axis=-1, keepdims=True)
    norm = np.where(norm==0, 1e-10, norm)
    return vector / norm

def backward_reaching(
    anim: Animation,
    tgt_pos: np.ndarray,
) -> Animation:
    
    joint_idxs = anim.skel.indices
    target = tgt_pos.copy() # (T, 3)
    cur_gposs = anim.gpos.copy()
    bone_lengths = anim.skel.bone_lengths
    for j, parent in zip(reversed(joint_idxs), reversed(anim.parents)):
        if parent == -1:
            continue
        cur_quat = anim.quats[:,parent] # (T, 4)
        
        # 現在のJointを基準とした更新前の子Joint位置(global座標系)
        rel_chpos = cur_gposs[:,j] - cur_gposs[:,parent]
        ch_vec = normalize_vector(rel_chpos)
        
        # 現在のJointを基準とした更新後の子Joint位置(global座標系)
        rel_tgtpos = target - cur_gposs[:, parent]
        tgt_vec = normalize_vector(rel_tgtpos)
        
        rel_quat = quat.normalize(quat.between(ch_vec, tgt_vec))
        anim.quats[:,parent] = quat.abs(quat.normalize(quat.mul(rel_quat, cur_quat)))
        
        # 親がROOTのとき、offsetを変更
        if parent == 0:
            anim.trans = target - tgt_vec * bone_lengths[j]
        else:
            # targetの更新
            target = target - tgt_vec * bone_lengths[j]
        
    return anim

def forward_reaching(
    anim: Animation,
    base: np.ndarray,
) -> Animation:
    
    joint_idxs = np.arange(len(anim.skel))
    target = base.copy() # (T, 3)
    past_gposs = anim.gpos.copy()
    
    for j, parent in zip(joint_idxs, anim.parents):
        if parent == -1:
            continue
        elif parent == 0:
            anim.trans = target
            continue
        cur_quat = anim.quats[:,parent] # (T, 4)
        
        # 過去の親 -> 過去の子のJoint位置
        past_chpos = past_gposs[:,j] - past_gposs[:,parent]
        pa_vec = normalize_vector(past_chpos)
        
        # 現在の親 -> 過去の子のJoint位置
        cur_chpos = past_gposs[:,j] - anim.gpos[:,parent]
        cur_vec = normalize_vector(cur_chpos)
        
        rel_quat = quat.normalize(quat.between(pa_vec, cur_vec))
        anim.quats[:,parent] = quat.abs(quat.normalize(quat.mul(rel_quat, cur_quat)))
        
    return anim

# まずはEnd effectorのみの実装(Jointの末端が一つのみ)
def simple_fabrik(
    anim: Animation,
    tgt_pos: np.ndarray,
    iter: int=10,
) -> Animation:
    
    anim_ = copy.deepcopy(anim)
    # define base(root) positions.
    base = anim.trans.copy() # (T, 3)
    
    for i in range(iter):
        anim_ = backward_reaching(anim_, tgt_pos)
        anim_ = forward_reaching(anim_, base)
    
    return anim_

if __name__=="__main__":
    """使用例"""
    
    # 簡単なJointの定義
    joints = []
    joints.append(Joint(name="ROOT", parent=-1, offset=np.zeros(3), root=True, dof=6))
    joints.append(Joint(name="J1", parent= 0, offset=np.array([1, 0, 0])))
    joints.append(Joint(name="J2", parent= 1, offset=np.array([1, 0, 0])))
    joints.append(Joint(name="J3", parent= 2, offset=np.array([1, 0, 0])))
    #    Root       J1         J2         J3
    #     *----------*----------*----------*
    # (0, 0, 0)  (1, 0, 0)  (2, 0, 0)  (3, 0, 0)


    skel = Skel(joints=joints, skel_name="sample_skel")
    anim = Animation.no_animation(skel, num_frame=2, anim_name="sample_anim")
    
    # target positions for first frame and second frame.
    tgts = np.array([
        [1, 1, 1], 
        [1, 2, 1]]
    )
    
    new_anim = simple_fabrik(anim, tgts)
    
    fig = plt.figure(figsize=(11, 7))
    
    # first frame
    ax1 = fig.add_subplot(121, projection="3d")
    ax1.set_xlim3d(-2, 2)
    ax1.set_ylim3d(-2, 2)
    ax1.set_zlim3d(-2, 2)
    ax1.set_box_aspect((1,1,1))
    
    orig = anim.gpos[0]
    poss = new_anim.gpos[0]
    tgt  = tgts[0]
    
    ax1.plot(orig[:,0], orig[:,1], orig[:,2], alpha=0.4)
    ax1.scatter(orig[:,0], orig[:,1], orig[:,2], alpha=0.4, label="orig")
    ax1.plot(poss[:,0], poss[:,1], poss[:,2])
    ax1.scatter(poss[:,0], poss[:,1], poss[:,2], label="calc")
    ax1.scatter(tgt[0], tgt[1], tgt[2], s=50, marker="*", label="target")
    ax1.legend()

    # second frame
    ax2 = fig.add_subplot(122, projection="3d")
    ax2.set_xlim3d(-2, 2)
    ax2.set_ylim3d(-2, 2)
    ax2.set_zlim3d(-2, 2)
    ax2.set_box_aspect((1,1,1))
    
    orig = anim.gpos[1]
    poss = new_anim.gpos[1]
    tgt  = tgts[1]
    
    ax2.plot(orig[:,0], orig[:,1], orig[:,2], alpha=0.4)
    ax2.scatter(orig[:,0], orig[:,1], orig[:,2], alpha=0.4, label="orig")
    ax2.plot(poss[:,0], poss[:,1], poss[:,2])
    ax2.scatter(poss[:,0], poss[:,1], poss[:,2], label="calc")
    ax2.scatter(tgt[0], tgt[1], tgt[2], s=50, marker="*", label="target")
    ax2.legend()
    
    plt.savefig("demo.png")

================================================
FILE: anim/inverse_kinematics/jacobi_ik.py
================================================
from __future__ import annotations

import sys
from pathlib import Path
ROOT = Path(__file__).resolve().parent.parent.parent
sys.path.append(str(ROOT))

import copy
import numpy as np
import matplotlib.pyplot as plt
from util import quat
from anim.skel import Joint, Skel
from anim.animation import Animation

def simple_jacobi_ik(
    anim: Animation,
    tgt_pos: np.ndarray,
    iter: int=10,
) -> Animation:
    return

if __name__=="__main__":
    """使用例"""
    
    # 簡単なJointの定義
    joints = []
    joints.append(Joint(name="ROOT", index=0, parent=-1, offset=np.zeros(3), root=True, dof=6))
    joints.append(Joint(name="J1", index=1, parent= 0, offset=np.array([1, 0, 0])))
    joints.append(Joint(name="J2", index=2, parent= 1, offset=np.array([1, 0, 0])))
    joints.append(Joint(name="J3", index=3, parent= 2, offset=np.array([1, 0, 0])))
    #    Root       J1         J2         J3
    #     *----------*----------*----------*
    # (0, 0, 0)  (1, 0, 0)  (2, 0, 0)  (3, 0, 0)


    skel = Skel(joints=joints, skel_name="sample_skel")
    anim = Animation.no_animation(skel, num_frame=2, anim_name="sample_anim")
    
    # target positions for first frame and second frame.
    tgts = np.array([
        [1, 1, 1], 
        [1, 2, 1]]
    )
    
    new_anim = simple_jacobi_ik(anim, tgts)
    
    fig = plt.figure(figsize=(11, 7))
    
    # first frame
    ax1 = fig.add_subplot(121, projection="3d")
    ax1.set_xlim3d(-2, 2)
    ax1.set_ylim3d(-2, 2)
    ax1.set_zlim3d(-2, 2)
    ax1.set_box_aspect((1,1,1))
    
    orig = anim.gpos[0]
    poss = new_anim.gpos[0]
    tgt  = tgts[0]
    
    ax1.plot(poss[:,0], poss[:,1], poss[:,2])
    ax1.scatter(poss[:,0], poss[:,1], poss[:,2], label="calc")
    ax1.plot(orig[:,0], orig[:,1], orig[:,2])
    ax1.scatter(orig[:,0], orig[:,1], orig[:,2], label="orig")
    ax1.scatter(tgt[0], tgt[1], tgt[2], label="target")
    ax1.legend()

    # second frame
    ax2 = fig.add_subplot(122, projection="3d")
    ax2.set_xlim3d(-2, 2)
    ax2.set_ylim3d(-2, 2)
    ax2.set_zlim3d(-2, 2)
    ax2.set_box_aspect((1,1,1))
    
    orig = anim.gpos[1]
    poss = new_anim.gpos[1]
    tgt  = tgts[1]
    
    ax2.plot(poss[:,0], poss[:,1], poss[:,2])
    ax2.scatter(poss[:,0], poss[:,1], poss[:,2], label="calc")
    ax2.plot(orig[:,0], orig[:,1], orig[:,2])
    ax2.scatter(orig[:,0], orig[:,1], orig[:,2], label="orig")
    ax2.scatter(tgt[0], tgt[1], tgt[2], label="target")
    ax2.legend()
    
    plt.show()

================================================
FILE: anim/inverse_kinematics/two_bone_ik.py
================================================
from __future__ import annotations

import sys
from pathlib import Path
ROOT = Path(__file__).resolve().parent.parent.parent
sys.path.append(str(ROOT))

import copy
import numpy as np
import matplotlib.pyplot as plt
from util import quat
from anim.skel import Joint, Skel
from anim.animation import Animation

def normalize(tensor: np.ndarray, axis:int=-1) -> np.ndarray:
    """ Returns a tensor normalized in the `axis` dimension.
    args: 
        tensor -> tensor / norm(tensor, axis=-axis)
        axis: dimemsion.
    """
    norm = np.linalg.norm(tensor, axis=axis, keepdims=True)
    norm = np.where(norm==0, 1e-10, norm)
    return tensor / norm
    

def two_bone_ik(
    anim: Animation,
    base_joint: int,
    middle_joint: int,
    eff_joint: int,
    tgt_pos: np.ndarray,
    fwd: np.ndarray,
    max_length_buffer: float=1e-3,
) -> Animation:
    """Simple two bone IK method. This algorithm can be solved analytically.
    args:
        anim: Animation
        base_joint: index of base (fixed) joint.
        middle_joint: index of middle joint.
        eff_joint: index of effector joint.
        tgt_pos: (T, 3), target positions for effector.
        fwd: (T, 3), forward vectors for middle joint.
        max_length_buffer: エラー回避用
    """
    anim = copy.deepcopy(anim)
    tgt = tgt_pos.copy()
    
    # 連結チェック
    assert anim.parents[eff_joint] == middle_joint and \
           anim.parents[middle_joint] == base_joint
    
    # 骨の最大の伸びを取得
    lbm: float = np.linalg.norm(anim.offsets[middle_joint], axis=-1)
    lme: float = np.linalg.norm(anim.offsets[eff_joint], axis=-1)
    max_length: float = lbm + lme - max_length_buffer
    
    bt_poss = tgt - anim.gpos[:, base_joint] # (T, 3)
    bt_lengths = np.linalg.norm(bt_poss, axis=-1) # (T)
    # targetが骨の届かない位置に存在するフレームを取得
    error_frame = np.where(bt_lengths > max_length)
    # base -> effectorの長さ1のベクトル
    bt_vec = normalize(bt_poss) # (T, 3)
    # 届かないフレームのみ更新
    tgt[error_frame] = anim.gpos[error_frame, base_joint] + \
                           bt_vec[error_frame] * max_length
    
    # get normal vector for axis of rotations.
    axis_rot = np.cross(bt_vec, fwd, axis=-1)
    axis_rot = normalize(axis_rot) # (T, 3)
    
    # get the relative positions.
    be_poss = anim.gpos[:, eff_joint] - anim.gpos[:, base_joint]
    be_vec = normalize(be_poss) # (T, 3)
    bm_poss = anim.gpos[:, middle_joint] - anim.gpos[:, base_joint]
    bm_vec = normalize(bm_poss) # (T, 3)
    me_poss = anim.gpos[:, eff_joint] - anim.gpos[:, middle_joint]
    me_vec = normalize(me_poss) # (T, 3)
    
    # get the current interior angles of the base and middle joints.
    # einsum calculate dot product.
    be_bm_0 = np.arccos(np.clip(np.einsum("ij,ij->i", be_vec, bm_vec), -1, 1)) # (T,)
    mb_me_0 = np.arccos(np.clip(np.einsum("ij,ij->i", -bm_vec, me_vec), -1, 1)) # (T,)
    
    # get the new interior angles of the base and middle joints.
    lbt = np.linalg.norm(tgt - anim.gpos[:, base_joint], axis=-1)
    be_bm_1 = np.arccos(np.clip((lbm * lbm + lbt * lbt - lme * lme)/(2 * lbm * lbt) , -1, 1)) # (T,)
    mb_me_1 = np.arccos(np.clip((lbm * lbm + lme * lme - lbt * lbt)/(2 * lbm * lme) , -1, 1)) # (T,)
    
    r0 = quat.from_angle_axis(be_bm_1 - be_bm_0, axis_rot)
    r1 = quat.from_angle_axis(mb_me_1 - mb_me_0, axis_rot)
    
    # effectorをtarget方向へ向けるための回転
    r2 = quat.from_angle_axis(
        np.arccos(np.clip(np.einsum("ij,ij->i", be_vec, bt_vec), -1, 1)),
        normalize(np.cross(be_vec, bt_vec))
    )
    
    anim.quats[:, base_joint] =  quat.mul(r2, quat.mul(r0, anim.quats[:, base_joint]))
    anim.quats[:, middle_joint] = quat.mul(r1, anim.quats[:, middle_joint])
    
    return anim

if __name__=="__main__":
    """使用例"""
    
    # 簡単なJointの定義
    joints = []
    joints.append(Joint(name="ROOT", parent=-1, offset=np.array([0, 2, 0]), root=True, dof=6))
    joints.append(Joint(name="J1", parent= 0, offset=np.array([0, -1, 0.1])))
    joints.append(Joint(name="J2", parent= 1, offset=np.array([0, -1, -0.1])))
    joints.append(Joint(name="J3", parent= 2, offset=np.array([0, 0, 0.2])))

    skel = Skel(joints=joints, skel_name="sample_skel")
    anim = Animation.no_animation(skel, num_frame=2, anim_name="sample_anim")
    anim.trans = np.array([
        [0, 2, 0],
        [0, 2, 0]
    ])
    
    # target positions for first frame and second frame.
    tgts = np.array([
        [0, 1, 0], 
        [0.2, 0.8, 0.2]
    ])
    
    fwd = np.array([
        [0.2, 0, 1],
        [0.4, 0.2, 0.5]
    ])
    
    fwd_ = normalize(fwd) * 0.2
    
    new_anim = two_bone_ik(
        anim=anim, 
        base_joint=0, 
        middle_joint=1, 
        eff_joint=2,
        tgt_pos=tgts,
        fwd=fwd)
    
    fig = plt.figure(figsize=(11, 7))
    
    # first frame
    ax1 = fig.add_subplot(121, projection="3d")
    ax1.set_xlim3d(-1, 1)
    ax1.set_ylim3d(-1, 1)
    ax1.set_zlim3d( 0, 2)
    ax1.set_box_aspect((1,1,1))
    
    orig = anim.gpos[0]
    poss = new_anim.gpos[0]
    tgt  = tgts[0]
    
    ax1.plot(orig[:,0], -orig[:,2], orig[:,1], alpha=0.4)
    ax1.scatter(orig[:,0], -orig[:,2], orig[:,1], label="orig", alpha=0.4)
    ax1.plot(poss[:,0], -poss[:,2], poss[:,1])
    ax1.scatter(poss[:,0], -poss[:,2], poss[:,1], label="calc")
    ax1.plot(
        [poss[1,0], poss[1,0]+fwd_[0, 0]], 
        [-poss[1,2], -poss[1,2]-fwd_[0, 2]], 
        [poss[1,1], poss[1,1]+fwd_[0, 1]], label="fwd")
    ax1.scatter(tgt[0], -tgt[2], tgt[1], s=80, color="red", marker="*", label="target")
    ax1.legend()

    # second frame
    ax2 = fig.add_subplot(122, projection="3d")
    ax2.set_xlim3d(-1, 1)
    ax2.set_ylim3d(-1, 1)
    ax2.set_zlim3d(0, 2)
    ax2.set_box_aspect((1,1,1))
    
    orig = anim.gpos[1]
    poss = new_anim.gpos[1]
    tgt  = tgts[1]
    
    ax2.plot(orig[:,0], -orig[:,2], orig[:,1], alpha=0.4)
    ax2.scatter(orig[:,0], -orig[:,2], orig[:,1], label="orig", alpha=0.4)
    ax2.plot(poss[:,0], -poss[:,2], poss[:,1])
    ax2.scatter(poss[:,0], -poss[:,2], poss[:,1], label="calc")
    ax2.plot(
        [poss[1,0], poss[1,0]+fwd_[1, 0]], 
        [-poss[1,2], -poss[1,2]-fwd_[1, 2]], 
        [poss[1,1], poss[1,1]+fwd_[1, 1]], label="fwd")
    ax2.scatter(tgt[0], -tgt[2], tgt[1], s=80, color="red", marker="*", label="target")
    ax2.legend()
    
    plt.show()

================================================
FILE: anim/keyframe.py
================================================
from __future__ import annotations

from dataclasses import dataclass
import numpy as np

from anim.skel import Skel

@dataclass
class KeyFrame:
    frame: int
    joint: str | int
    rotation: np.ndarray = None
    position: np.ndarray = None
    interp: str = "linear" # interpolation method.


class KeyFrameAnimation:
    def __init__(
        self,
        skel: Skel,
        keys: list[KeyFrame],
        fps: int,
        anim_name: str=None,
        positions: np.ndarray=None,
    ) -> None:
        self.skel = skel
        # TBD

================================================
FILE: anim/motion_matching/database.py
================================================
from __future__ import annotations

from dataclasses import dataclass
import numpy as np
from scipy.spatial import KDTree
from util import quat
from anim.skel import Skel
from anim.animation import Animation

class Database(Animation):
    def __init__(
        self, 
        anims: list[Animation]=None, 
        skel: Skel=None,
        quats: np.ndarray=None,
        trans: np.ndarray=None,
        fps: int=None,
        starts: list[int]=None,
        names: list[str]=None,
        db_name: str=None,
    ) -> None:
        self.name = db_name
        
        if anims is not None:
            self.skel = anims[0].skel # all `Skel` must be the same.
            self.quats = np.concatenate([anim.quats for anim in anims], axis=0)
            self.trans = np.concatenate([anim.trans for anim in anims], axis=0)
            self.fps = anims[0].fps # all fps must be the same.
            self.starts = []
            self.ends = []
            self.names = []
            cur_frame = 0
            for anim in anims:
                self.starts.append(cur_frame)
                cur_frame += len(anim)
                self.ends.append(cur_frame-1)
                self.names.append(anim.name)
        else:
            self.skel = skel
            self.quats = quats
            self.trans = trans
            self.fps = fps
            self.starts = starts
            self.names = names
        
        self.num_anim = len(self.starts)
        
    def __len__(self) -> int: return len(self.trans)
    
    def __add__(self, other: Database) -> Database:
        assert isinstance(other, Database)
        starts: list[int] = self.starts.copy()
        ends: list[int] = self.ends.copy()
        names:  list[str] = self.names.copy()
        starts.extend(list(map(lambda x: x+len(self), other.starts)))
        ends.extend(list(map(lambda x: x+len(self), other.ends)))
        names.extend(other.names)
        quats: np.ndarray = np.concatenate([self.quats, other.quats], axis=0)
        trans: np.ndarray = np.concatenate([self.trans, other.trans], axis=0)
        
        return Database(
            skel = self.skel,
            quats=quats,
            trans=trans,
            fps=self.fps,
            starts=starts,
            names=names,
            db_name=self.name,
        )
    
    def cat(self, other: Database) -> None:
        assert isinstance(other, Database)
        self.starts.extend(list(map(lambda x: x+len(self), other.starts)))
        self.ends.extend(list(map(lambda x: x+len(self), other.ends)))
        self.names.extend(other.names)
        self.quats = np.concatenate([self.quats, other.quats], axis=0)
        self.trans = np.concatenate([self.trans, other.trans], axis=0)

    # ============
    #   Velocity
    # ============
    @property
    def gposvel(self) -> np.ndarray:
        gpos : np.ndarray = self.gpos
        gpvel: np.ndarray = np.zeros_like(gpos)
        gpvel[1:] = (gpos[1:] - gpos[:-1]) * self.fps # relative position from previous frame.
        for start in self.starts:
            gpvel[start] = gpvel[start+1] - (gpvel[start+3] - gpvel[start+2])
        return gpvel
    
    @property
    def cposvel(self) -> np.ndarray:
        cpos : np.ndarray = self.cpos
        cpvel: np.ndarray = np.zeros_like(cpos)
        cpvel[1:] = (cpos[1:] - cpos[:-1]) * self.fps # relative position from previous frame.
        for start in self.starts:
            cpvel[start] = cpvel[start+1] - (cpvel[start+3] - cpvel[start+2])
        return cpvel

    @property
    def lrotvel(self) -> np.ndarray:
        """Calculate rotation velocities with rotation vector style."""    
        
        lrot : np.ndarray = self.quats.copy()
        lrvel: np.ndarray = np.zeros_like(self.lpos)
        lrvel[1:] = quat.to_axis_angle(
            quat.abs(
                quat.mul(lrot[1:], quat.inv(lrot[:-1])))
            ) * self.fps
        for start in self.starts:
            lrvel[start] = lrvel[start+1] - (lrvel[start+3] - lrvel[start+2])
        return lrvel
    
    # ===================
    #  Future trajectory
    # ===================
    def clamp_future_idxs(self, offset: int) -> np.ndarray:
        """Function to calculate the frame array for `offset` frame ahead.
        If `offset` frame ahead does not exist, 
        return the last frame in each animation.
        """
        idxs = np.arange(len(self)) + offset
        for end in self.ends:
            idxs = np.where((idxs > end) & (idxs <= end + offset), end, idxs)
        return idxs

@dataclass
class MatchingDatabase:
    features: np.ndarray # [T, num_features]
    means: np.ndarray    # [num_features]
    stds: np.ndarray     # [num_features]
    indices: list[int]

    # The elements belows are used only AABB.
    dense_bound_size: int = None
    dense_bound_mins: np.ndarray = None # [num_bounds, num_features]
    dense_bound_maxs: np.ndarray = None # [num_bounds, num_features]
    sparse_bound_size: int = None
    sparse_bound_mins: np.ndarray = None # [num_bounds, num_features]
    sparse_bound_maxs: np.ndarray = None # [num_bounds, num_features]
    
    # kd-tree
    kdtree: KDTree = None
    
    def __len__(self):
        return len(self.features)

================================================
FILE: anim/motion_matching/mm.py
================================================
from __future__ import annotations

import sys
import numpy as np
from scipy.spatial import KDTree
# import taichi as ti

from anim.motion_matching.database import Database, MatchingDatabase

def normalize_features(features: np.ndarray, weights: np.ndarray, axis=0) -> tuple:
    """Normalize function for matching features. (features: [T, num_features])"""
    means = np.mean(features, axis=axis) # [num_feat]
    stds = np.std(features, axis=axis)   # [num_feat]
    scales = np.where(stds==0, 1e-10, stds / weights)
    norms = (features - means[None]) / scales[None]
    return norms, means, scales

def normalize_query(query: np.ndarray, means:np.ndarray, stds:np.ndarray) -> np.ndarray:
    """Normalize function for `query`. 
       `means` and `stds` are calcurated by normalize_features().
    """
    return (query - means) / stds

def calc_box_distance(
    best_cost: float,
    query: np.ndarray,    # [n_features]
    box_mins: np.ndarray, # [n_features]
    box_maxs: np.ndarray, # [n_features]
) -> tuple[float, bool]:
    """Calculate distance between `query` to an AABB."""
    cost = 0.0
    smaller = True
    for i, feat in enumerate(query):
        cost += np.square(feat - np.clip(feat, box_mins[i], box_maxs[i]))
        if cost >= best_cost:
            smaller = False
            break
    return cost, smaller
    
def create_position_features(db: Database) -> np.ndarray:
    left_foot_idx = db.skel.names.index("LeftFoot")
    right_foot_idx = db.skel.names.index("RightFoot")
    features = db.cpos[:, [left_foot_idx, right_foot_idx]].reshape(len(db), -1) # (T, 2 * 3)
    return features

def create_velocity_features(db: Database) -> np.ndarray:
    left_foot_idx = db.skel.names.index("LeftFoot")
    right_foot_idx = db.skel.names.index("RightFoot")
    hips_idx = db.skel.names.index("Hips")
    features = db.cposvel[:, [left_foot_idx, right_foot_idx, hips_idx]].reshape(len(db), -1) # (T, 3 * 3)
    return features

def create_traj_features(db: Database) -> np.ndarray:
    traj_pos0 = db.future_traj_poss(20, remove_vertical=True)
    traj_pos1 = db.future_traj_poss(40, remove_vertical=True)
    traj_pos2 = db.future_traj_poss(60, remove_vertical=True)
    pos_features = np.concatenate([traj_pos0, traj_pos1, traj_pos2], axis=-1) # [T, 2*3]
    
    traj_dir0 = db.future_traj_dirs(20, remove_vertical=True)
    traj_dir1 = db.future_traj_dirs(40, remove_vertical=True)
    traj_dir2 = db.future_traj_dirs(60, remove_vertical=True)
    dir_features = np.concatenate([traj_dir0, traj_dir1, traj_dir2], axis=-1) # [T, 2*3]
    
    features = np.concatenate([pos_features, dir_features], axis=-1) # [T, 2*3 + 2*3]
    return features

def create_matching_features(
    db: Database,
    w_pos_foot: float,
    w_vel_foot: float,
    w_vel_hips: float,
    w_traj_pos: float,
    w_traj_dir: float,
    ignore_end: int
) -> tuple[np.ndarray, np.ndarray, np.ndarray]:
    """Create a `MatchingDatabase` to find the best frame with motion matching.
       Each feature is calculated with `create_***_features()` above, 
       and this function integrates them.
    """
    pos_feats = create_position_features(db)
    vel_feats = create_velocity_features(db)
    traj_feats = create_traj_features(db)
    features = np.concatenate([pos_feats, vel_feats, traj_feats], axis=-1)
    
    # remove ignore index from mdb and create a list that ties mdb indices to db indices.
    indices = []
    rmv_starts = []
    for i, (start, end) in enumerate(zip(db.starts, db.ends)):
        rmv_starts.append(end - ignore_end * (i + 1) + 1)
        
        for j in range(start, end - ignore_end + 1):
            indices.append(j)
    for st in rmv_starts:
        np.delete(features, slice(st, st+ignore_end), axis=0)
    
    # normalize
    weights = np.array(
        [w_pos_foot] * 6 +
        [w_vel_foot] * 6 +
        [w_vel_hips] * 3 +
        [w_traj_pos] * 6 +
        [w_traj_dir] * 6
    )
    fnorms, means, stds = normalize_features(features, weights)
    
    return fnorms, means, stds, indices

def create_matching_aabb(features: np.ndarray, size: int | None) -> tuple[np.ndarray, np.ndarray]:
    """Create Axis-Aligned Bounding Boxes (AABB) on frame dim.
       This will speed up the Nearest Neighbour search.
       based on https://dl.acm.org/doi/abs/10.1145/3386569.3392440.
    """
    num_frames = len(features)
    bound_mins = []
    bound_maxs = []
    
    num_bounds = (num_frames - 1) // size + 1 # advance
    frame_bound_ends = [(i + 1) * size for i in range(num_bounds)]
    frame_bound_ends[-1] = num_frames
    cur_idx = 0
    for end in frame_bound_ends:
        bound_mins.append(np.min(features[cur_idx:end], axis=0, keepdims=True)) # [1, num_features]
        bound_maxs.append(np.max(features[cur_idx:end], axis=0, keepdims=True))
    
    np_bound_mins = np.concatenate(bound_mins, axis=0) # [num_bounds, num_features]
    np_bound_maxs = np.concatenate(bound_maxs, axis=0)
    return np_bound_mins, np_bound_maxs

def create_matching_database(
    db: Database,
    method: str,
    w_pos_foot: float,
    w_vel_foot: float,
    w_vel_hips: float,
    w_traj_pos: float,
    w_traj_dir: float,
    ignore_end: int,
    dense_bound_size: int = None, 
    sparse_bound_size: int = None,
) -> MatchingDatabase:
    """Create a simple `MatchingDatabase`."""
    fnorms, means, stds, indices = create_matching_features(
        db, w_pos_foot, w_vel_foot, w_vel_hips, w_traj_pos, w_traj_dir, ignore_end
    )
    if method == "aabb":
        dense_bound_mins, dense_bound_maxs = create_matching_aabb(fnorms, dense_bound_size)
        sparse_bound_mins, sparse_bound_maxs = create_matching_aabb(fnorms, sparse_bound_size)
        return MatchingDatabase(
            fnorms, means, stds, indices,
            dense_bound_size, dense_bound_mins, dense_bound_maxs,
            sparse_bound_size, sparse_bound_mins, sparse_bound_maxs,
        )
    
    elif method == "kd-tree":
        kdtree = KDTree(fnorms)
        return MatchingDatabase(fnorms, means, stds, indices, kdtree=kdtree)
    
    elif method in ["brute-force", "faiss"]: 
        return MatchingDatabase(fnorms, means, stds, indices)
    
    else: 
        raise ValueError("{} is not in methods.".format(method))

# def create_cost_matrix(
#     features: np.ndarray, # [num_frames, num_features]
#     method: str="taichi",
# ):
#     num_frames, num_features = features.shape
#     if method == "taichi":
#         ti_features = ti.Vector.field(n=num_features, dtype=float, shape=(num_frames))
#         ti_features.from_numpy(features)
#         cost_mat = ti.field(float, shape=(num_frames, num_frames))
#         cost_mat.fill(0)
#         _create_cost_matrix(ti_features, cost_mat, num_features)
#         return cost_mat.to_numpy()

# @ti.kernel
# def _create_cost_matrix(
#     features: ti.template(),
#     cost_mat: ti.template(),
#     num_features: int
# ):
#     for i, j in cost_mat:
#         for k in ti.static(range(num_features)):
#             cost_mat[i, j] += (features[i][k] - features[j][k]) ** 2

def motion_matching_search(
    cur_idx: int,
    method: str, # ["brute-force", "aabb", "kd-tree", "faiss"]
    mdb: MatchingDatabase,
    query: np.ndarray, # [num_features]
    norm_query: bool=True # If we need normalize query.
):
    """Search best index in `MatchingDatabase`.
       This function is called when 
       * motion can be updated (every N frames),
       * motion should be updated (when the animation ends, `cur_idx`==-1),
       etc.
    """
    if norm_query:
        qnorm = normalize_query(query, mdb.means, mdb.stds) # [num_features]
    else: 
        qnorm = query

    if not cur_idx == -1: # can be updated
        cur_idx = mdb.indices.index(cur_idx) # db index -> mdb index
        # mdb.features[cur_idx] will be replaced to controlled character's features. 
        best_cost: float = np.sum(np.square(mdb.features[cur_idx] - qnorm))
    else: best_cost: float = sys.float_info.max # should be updated
    
    if method == "brute-force":
        return brute_force_search(mdb, qnorm, cur_idx, best_cost)
    elif method == "aabb":
        return aabb_search(mdb, qnorm, cur_idx, best_cost)
    elif method == "kd-tree":
        return kd_search(mdb, qnorm, cur_idx, best_cost)
    elif method == "faiss":
        return faiss_search(mdb, qnorm, cur_idx, best_cost)

def brute_force_search(
    mdb: MatchingDatabase,
    qnorm: np.ndarray, # [num_features]
    cur_idx: int,
    best_cost: float,
) -> int:
    costs = np.sum(np.square(mdb.features - qnorm[None].repeat(len(mdb), axis=0)), axis=-1)
    cur_cost = np.min(costs)
    if cur_cost < best_cost:
        return mdb.indices[np.argmin(costs)] # mdb index -> db index
    else:
        return mdb.indices[cur_idx]

def aabb_search(
    mdb: MatchingDatabase,
    qnorm: np.ndarray, # [num_features]
    cur_idx: int,
    best_cost: float,
) -> int:
    i = 0
    best_idx = cur_idx
    # for all sparse boxes
    while i < len(mdb):
        sparse_box_idx = i // mdb.sparse_bound_size # box index
        i_sparse_next = (sparse_box_idx + 1) * mdb.sparse_bound_size # frame index on next sparce box
        # Calculate distance between query to sparse bounding box.
        cur_cost, smaller = calc_box_distance(
            best_cost, qnorm, 
            mdb.dense_bound_mins[sparse_box_idx], 
            mdb.sparse_bound_maxs[sparse_box_idx],
        )
        
        if not smaller:
            i = i_sparse_next
            continue
        
        # for all dense boxes
        while i < len(mdb) and i < i_sparse_next:
            dense_box_idx = i // mdb.dense_bound_size # box index
            i_dense_next = (dense_box_idx + 1) * mdb.dense_bound_size # frame index

            # Calculate distance to a dense bounding box.
            cur_cost, smaller = calc_box_distance(
                best_cost, qnorm, 
                mdb.sparse_bound_mins[sparse_box_idx], 
                mdb.sparse_bound_maxs[sparse_box_idx]
            )

            if not smaller:
                i = i_dense_next
                continue

            # Calculate distance to a feature inside box.
            while i < len(mdb) and i < i_dense_next:
                cur_cost = np.sum(np.square(qnorm - mdb.features[i]))
                if cur_cost < best_cost:
                    best_idx = i
                    best_cost = cur_cost
                
                i += 1
    return mdb.indices[best_idx] # mdb index -> db index

def kd_search(
    mdb: MatchingDatabase,
    qnorm: np.ndarray,
    cur_idx: int,
    best_cost: float,
) -> int:
    dist, index = mdb.kdtree.query(qnorm, k=1)
    if dist < best_cost:
        return mdb.indices[index] # mdb index -> db index
    else:
        return mdb.indices[cur_idx]

def faiss_search(
    mdb: MatchingDatabase,
    qnorm: np.ndarray,
    cur_idx: int,
    best_cost: float,
) -> int:
    return

================================================
FILE: anim/pose.py
================================================
from __future__ import annotations

import numpy as np
from util import quat
from anim.skel import Skel

class Pose:
    def __init__(
        self,
        skel: Skel,
        quats: np.ndarray,
        root_pos: np.ndarray,
    ) -> None:
        self.skel = skel
        self.quats = quats #[J,]
        self.root_pos = root_pos # [3,]
        
    def forward_kinematics(self):
        if not hasattr(self, "lpos"):
            offsets = self.skel.offsets
            parents = self.skel.parents
            offsets[0] = self.root_pos
        else:
            offsets = self.lpos
        return quat.fk(self.quats, offsets, parents)
    
    def set_local_positions(self):
        offsets = self.skel.offsets
        offsets[0] = self.root_pos
        self.lpos = offsets
    
    def set_global_positions(self):
        _, self.global_positions = self.forward_kinematics()
    
    def set_global_rotations(self):
        self.global_rotations, _ = self.forward_kinematics()
    
    def set_gpos_and_grot(self):
        self.global_rotations, self.global_positions = \
            self.forward_kinematics()

================================================
FILE: anim/skel.py
================================================
from __future__ import annotations

from dataclasses import dataclass
import numpy as np

@dataclass
class Joint:
    name: str
    parent: int
    offset: np.ndarray
    root: bool = False
    dof: int = 3

def axis_to_vector(axis: str):
    match axis:
        case "x":
            return [1, 0, 0]
        case "y":
            return [0, 1, 0]
        case "z":
            return [0, 0, 1]
        case "-x":
            return [-1, 0, 0]
        case "-y":
            return [0, -1, 0]
        case "-z":
            return [0, 0, -1]
        case _:
            raise ValueError

class Skel:
    def __init__(
        self,
        joints: list[Joint],
        skel_name: str="skeleton",
        rest_forward: list[int]=[0, 0, 1],
        rest_vertical: list[int]=[0, 1, 0],
        forward_axis: str="z",
        vertical_axis: str="y",
    ) -> None:
        """Class for skeleton offset definition.

        Args:
            joints (list[Joint]): list of Joints.
            skel_name (str, optional): name of skeleton. Defaults to "skeleton".
            rest_forward  (list[int], optional): forward direction of the rest pose. Defaults to [0, 0, 1].
            rest_vertical (list[int], optional): vertical direction of the rest pose. Defaults to [0, 1, 0].
            forward_axis (str, optional): forward axis of the coodinates. Defaults to "z".
            vertical_axis (str, optional): vertical axis of the coodinates. Defaults to "y".
        """
        self.skel_name = skel_name
        self.joints = joints
        self.rest_forward = rest_forward
        self.rest_vertical = rest_vertical
        self.forward_axis = forward_axis
        self.forward = axis_to_vector(forward_axis)
        self.vertical_axis = vertical_axis
        self.vertical = axis_to_vector(vertical_axis)
    
    def __len__(self) -> int:
        return len(self.joints)
    
    def __getitem__(self, index: int | slice | str) -> Joint | list[Joint]:
        return self.get_joint(index)
    
    @property
    def indices(self) -> list[int]:
        return [idx for idx in range(len(self))]
    
    @property
    def parents(self) -> list[int]:
        """Get list of all joint's parent indices."""
        return [j.parent for j in self.joints]
    
    @property
    def children(self) -> dict[int: list[int]]:
        children_dict = {}
        for i in range(len(self)):
            children_dict[i] = []
        for i, parent in enumerate(self.parents):
            if parent == -1:
                continue
            children_dict[parent].append(i)
        return children_dict
    
    @property
    def names(self) -> list[str]:
        """Get all joints names."""
        return [j.name for j in self.joints]
    
    @property
    def offsets(self) -> np.ndarray:
        """Offset coordinates of all joints (np.ndarray)."""
        offsets: list[np.ndarray] = []
        for joint in self.joints:
            offsets.append(joint.offset)
        return np.array(offsets)
    
    @property
    def dofs(self) -> list[int]:
        return [j.dof for j in self.joints]
    
    @property
    def joint_depths(self) -> list[int]:
        """Get hierarchical distance between joints to ROOT."""
        def get_depth(joint_idx: int, cur_depth: int=0):
            parent_idx = self.parents[joint_idx]
            if  parent_idx != -1:
                depth = cur_depth + 1
                cur_depth = get_depth(parent_idx, depth)
            return cur_depth
        return [get_depth(idx) for idx in self.indices]
    
    @property
    def bone_lengths(self) -> np.ndarray:
        lengths = np.linalg.norm(self.offsets, axis=-1)
        lengths[0] = 0
        return lengths
    
    def get_joint(self, index: int | slice | str) -> Joint | list[Joint]:
        """Get Joint from index or slice or joint name."""
        if isinstance(index, str):
            index: int = self.get_index_from_jname(index)
        return self.joints[index]
    
    def get_index_from_jname(self, jname: str) -> int:
        """Get joint index from joint name."""
        jname_list = self.names
        return jname_list.index(jname)
    
    def get_children(
        self, 
        index: int | str,
        return_idx: bool = False
    ) -> list[Joint] | list[int]:
        """Get list of children joints or children indices.
        args:
            index: index of joint or name of joint.
            return_idx: if True, return joint index.
        return:
            list of joints or list of indices (if return_idx).
        """
        if isinstance(index, str):
            index: int = self.get_index_from_jname(index)
        
        children_idx = self.children[index]
        if return_idx:
            return children_idx
        else:
            children = []
            for child_idx in children_idx:
                children.append(self.joints[child_idx])
            return children
    
    def get_parent(
        self, 
        index: int | str,
        return_idx: bool = False
    ) -> Joint | int | None:
        """Get parent joint or parent index.
        args:
            index (int | str): index of joint or name of joint.
            return_idx (bool): if True, return joint index.
        return:
            None : if parent doesn't exists.
            index (int): if return_idx = True.
            joint (Joint): if return_idx = False.
        """
        if isinstance(index, str):
            _index: int = self.get_index_from_jname(index)
        elif isinstance(index, int):
            _index: int = index
        
        if return_idx: return self.parents[_index]
        elif self.parents[_index] == -1:
            return None
        else:
            return self.joints[self.parents[_index]]
    
    @staticmethod
    def from_names_parents_offsets(
        names: list[str],
        parents: list[int],
        offsets: np.ndarray,
        skel_name: str="skeleton",
        rest_forward: list[int]=[0, 0, 1],
        rest_vertical: list[int] = [0, 1, 0],
        forward_axis: str = "z",
        vertical_axis: str = "y"
    ) -> Skel:
        """Construct new Skel from names, parents, offsets.
        
        args:
            names(list[str]) : list of joints names.
            parents(list[int]) : list of joints parents indices.
            offsets(np.ndarray): joint offset relative coordinates from parent joints.
            skel_name (str, optional): name of skeleton. Defaults to "skeleton".
            rest_forward  (list[int], optional): forward direction of the rest pose. Defaults to [0, 0, 1].
            rest_vertical (list[int], optional): vertical direction of the rest pose. Defaults to [0, 1, 0].
            forward_axis (str, optional): forward axis of the coodinates. Defaults to "z".
            vertical_axis (str, optional): vertical axis of the coodinates. Defaults to "y".
        return:
            Skel
        """
        ln, lp, lo = len(names), len(parents), len(offsets)
        assert len(set([ln, lp, lo])) == 1
        joints = []
        for name, parent, offset in zip(names, parents, offsets):
            dof = 6 if parent == -1 else 3
            joints.append(Joint(name, parent, offset, (parent==-1), dof))
        return Skel(joints, skel_name, rest_forward, rest_vertical, forward_axis, vertical_axis)

================================================
FILE: anim/smpl.py
================================================
"""
smpl.py

JOINT_NAMES, SMPLH_JOINT_NAMES, SMPL_JOINT_NAMES are from \
https://github.com/vchoutas/smplx/blob/main/smplx/joint_names.py
"""

from __future__ import annotations

from pathlib import Path
import numpy as np
from util.load import pickle_load

JOINT_NAMES = [
    "pelvis",
    "left_hip",
    "right_hip",
    "spine1",
    "left_knee",
    "right_knee",
    "spine2",
    "left_ankle",
    "right_ankle",
    "spine3",
    "left_foot",
    "right_foot",
    "neck",
    "left_collar",
    "right_collar",
    "head",
    "left_shoulder",
    "right_shoulder",
    "left_elbow",
    "right_elbow",
    "left_wrist",
    "right_wrist",
    "jaw",
    "left_eye_smplhf",
    "right_eye_smplhf",
    "left_index1",
    "left_index2",
    "left_index3",
    "left_middle1",
    "left_middle2",
    "left_middle3",
    "left_pinky1",
    "left_pinky2",
    "left_pinky3",
    "left_ring1",
    "left_ring2",
    "left_ring3",
    "left_thumb1",
    "left_thumb2",
    "left_thumb3",
    "right_index1",
    "right_index2",
    "right_index3",
    "right_middle1",
    "right_middle2",
    "right_middle3",
    "right_pinky1",
    "right_pinky2",
    "right_pinky3",
    "right_ring1",
    "right_ring2",
    "right_ring3",
    "right_thumb1",
    "right_thumb2",
    "right_thumb3",
    "nose",
    "right_eye",
    "left_eye",
    "right_ear",
    "left_ear",
    "left_big_toe",
    "left_small_toe",
    "left_heel",
    "right_big_toe",
    "right_small_toe",
    "right_heel",
    "left_thumb",
    "left_index",
    "left_middle",
    "left_ring",
    "left_pinky",
    "right_thumb",
    "right_index",
    "right_middle",
    "right_ring",
    "right_pinky",
    "right_eye_brow1",
    "right_eye_brow2",
    "right_eye_brow3",
    "right_eye_brow4",
    "right_eye_brow5",
    "left_eye_brow5",
    "left_eye_brow4",
    "left_eye_brow3",
    "left_eye_brow2",
    "left_eye_brow1",
    "nose1",
    "nose2",
    "nose3",
    "nose4",
    "right_nose_2",
    "right_nose_1",
    "nose_middle",
    "left_nose_1",
    "left_nose_2",
    "right_eye1",
    "right_eye2",
    "right_eye3",
    "right_eye4",
    "right_eye5",
    "right_eye6",
    "left_eye4",
    "left_eye3",
    "left_eye2",
    "left_eye1",
    "left_eye6",
    "left_eye5",
    "right_mouth_1",
    "right_mouth_2",
    "right_mouth_3",
    "mouth_top",
    "left_mouth_3",
    "left_mouth_2",
    "left_mouth_1",
    "left_mouth_5",  # 59 in OpenPose output
    "left_mouth_4",  # 58 in OpenPose output
    "mouth_bottom",
    "right_mouth_4",
    "right_mouth_5",
    "right_lip_1",
    "right_lip_2",
    "lip_top",
    "left_lip_2",
    "left_lip_1",
    "left_lip_3",
    "lip_bottom",
    "right_lip_3",
    # Face contour
    "right_contour_1",
    "right_contour_2",
    "right_contour_3",
    "right_contour_4",
    "right_contour_5",
    "right_contour_6",
    "right_contour_7",
    "right_contour_8",
    "contour_middle",
    "left_contour_8",
    "left_contour_7",
    "left_contour_6",
    "left_contour_5",
    "left_contour_4",
    "left_contour_3",
    "left_contour_2",
    "left_contour_1",
]


SMPLH_JOINT_NAMES = [
    "pelvis",
    "left_hip",
    "right_hip",
    "spine1",
    "left_knee",
    "right_knee",
    "spine2",
    "left_ankle",
    "right_ankle",
    "spine3",
    "left_foot",
    "right_foot",
    "neck",
    "left_collar",
    "right_collar",
    "head",
    "left_shoulder",
    "right_shoulder",
    "left_elbow",
    "right_elbow",
    "left_wrist",
    "right_wrist",
    "left_index1",
    "left_index2",
    "left_index3",
    "left_middle1",
    "left_middle2",
    "left_middle3",
    "left_pinky1",
    "left_pinky2",
    "left_pinky3",
    "left_ring1",
    "left_ring2",
    "left_ring3",
    "left_thumb1",
    "left_thumb2",
    "left_thumb3",
    "right_index1",
    "right_index2",
    "right_index3",
    "right_middle1",
    "right_middle2",
    "right_middle3",
    "right_pinky1",
    "right_pinky2",
    "right_pinky3",
    "right_ring1",
    "right_ring2",
    "right_ring3",
    "right_thumb1",
    "right_thumb2",
    "right_thumb3",
    "nose",
    "right_eye",
    "left_eye",
    "right_ear",
    "left_ear",
    "left_big_toe",
    "left_small_toe",
    "left_heel",
    "right_big_toe",
    "right_small_toe",
    "right_heel",
    "left_thumb",
    "left_index",
    "left_middle",
    "left_ring",
    "left_pinky",
    "right_thumb",
    "right_index",
    "right_middle",
    "right_ring",
    "right_pinky",
]

SMPL_JOINT_NAMES = [
    "pelvis",
    "left_hip",
    "right_hip",
    "spine1",
    "left_knee",
    "right_knee",
    "spine2",
    "left_ankle",
    "right_ankle",
    "spine3",
    "left_foot",
    "right_foot",
    "neck",
    "left_collar",
    "right_collar",
    "head",
    "left_shoulder",
    "right_shoulder",
    "left_elbow",
    "right_elbow",
    "left_wrist",
    "right_wrist",
    "left_hand",
    "right_hand",
]

def load_model(model_path: Path, gender: str=None):
    if isinstance(model_path, str):
        model_path = Path(model_path)
    if model_path.suffix == "":
        model_path = model_path / gender / "model.npz"
    match model_path.suffix:
        case ".npz":
            model_dict = np.load(model_path, allow_pickle=True)
        case ".pkl":
            try:
                model_dict = pickle_load(model_path)
            except:
                model_dict = pickle_load(model_path, encoding="latin1")
        case _ :  
            ValueError("This file is not supported.")
    return model_dict

def calc_skel_offsets(
        betas: np.ndarray,
        J_regressor: np.ndarray,
        shapedirs: np.ndarray,
        v_template: np.ndarray,
) -> np.ndarray:
    """
    args:
        betas (np.ndarray): body shape parameters of SMPL. shape: (num_betas).
        J_regressor (np.ndarray): Joint regressor from SMPL vertices. shape: (num_J, num_vertices).
        shapedirs (np.ndarray): Matrix to convert from PCA (betas) to SMPL blendshapes. shape: (num_vertices, 3, num_betas).
        v_template (np.ndarray): Template body vertices of SMPL. shape: (num_vertices, 3).
    return:
        joints (np.ndarray): Offset joint positons in 3D space (absolute coordinates). shape: (num_J, 3).
    """
    num_betas = min(len(betas), shapedirs.shape[-1])
    betas, shapedirs = betas[:num_betas], shapedirs[..., :num_betas]
    blendshape = shapedirs @ betas
    vertices = v_template + blendshape
    joints = J_regressor @ vertices

    return joints



================================================
FILE: anitaichi/animation/anim.py
================================================
import numpy as np
import taichi as ti
from anitaichi.animation.skel import Skel
from anitaichi.transform import ti_quat, quat

def forward_kinematics_rotations(
    parents: list[int],  # (J)
    rots: np.ndarray,    # (T, J, 4) or (J, 4)
):
    grots = []
    for i, pa in enumerate(parents):
        if pa == -1:
            grots.append(rots[...,i:i+1,:])
        else:
            grots.append(quat.mul(grots[pa], rots[...,i:i+1,:]))
    return np.concatenate(grots, axis=-2)

# Forward Kinematics using numpy.
# (Since it is difficult to parallelize each bone, use this for Pose FK.)
def forward_kinematics(
    offsets: np.ndarray, # (J, 3)
    parents: list[int],  # (J)
    grots: np.ndarray,   # (T, J, 4) or (J, 4)
    trans: np.ndarray    # (T, 3) or (3)
) -> np.ndarray:
    gposs = []
    offsets = offsets[None].repeat(len(grots), axis=0)
    for i, pa in enumerate(parents):
        if pa == -1:
            gposs.append(trans[..., None, :])
        else:
            gposs.append(quat.mul_vec(grots[..., pa:pa+1, :], offsets[..., i:i+1, :]) + gposs[pa])
    return np.concatenate(gposs, axis=-2)

# Forward kinematics using taichi.
# (Parallelization in the time direction.)
@ti.kernel
def ti_forward_kinematics(
    parents: ti.template(), # (J)
    offsets: ti.template(), # (J) * 3
    lrots: ti.template(),   # (T, J) * 4
    grots: ti.template(),   # (T, J) * 4
    gposs: ti.template(),   # (T, J) * 3
):
    # parallelize time dimention.
    for i in ti.ndrange(grots.shape[0]):
        for j in ti.ndrange(grots.shape[1]):
            if parents[j] == -1:
                grots[i, j] = lrots[i, j]
                gposs[i, j] = offsets[j]
            else:
                grots[i, j] = ti_quat.mul(grots[i, parents[j]], lrots[i, j])
                gposs[i, j] = ti_quat.mul_vec3(grots[i, parents[j]], offsets[j]) + gposs[i, parents[j]]

# @ti.kernel
# def calc_local_positions(
#     parents: ti.template(), # (J)
#     offsets: ti.template(), # (J) * 3
#     grots: ti.template(),   # (T, J) * 4
#     lposs: ti.template()    # (T, J) * 3
# ):
#     for i, j in grots:
#         if parents[j] == -1:
#             lposs[i, j] = offsets[j]
#         else:
#             lposs[i, j] = ti_quat.mul_vec3(grots[i, j], offsets[j])

# Convert numpy array to taichi vector field.
def convert_to_vector_field(array: np.ndarray):
    shape, dim_vector = array.shape[:-1], array.shape[-1]
    field = ti.Vector.field(dim_vector, dtype=float, shape=shape)
    field.from_numpy(array)
    return field


class Pose:
    def __init__(
        self,
        skel: Skel,
        rots: np.ndarray,
        root_pos: np.ndarray,
    ) -> None:
        self.skel = skel
        self.rots = rots
        self.root_pos = root_pos
    
    @property
    def local_rotations(self):
        return self.rots
    
    @property
    def global_rotations(self):
        parents = self.skel.parents
        lrots = self.rots
        grots = forward_kinematics_rotations(parents, lrots)
        return grots
    
    @property
    def global_positions(self):
        offsets = self.skel.offsets
        parents = self.skel.parents
        grots = self.global_rotations
        trans = self.root_pos
        gposs = forward_kinematics(offsets, parents, grots, trans)
        return gposs


class Animation:
    def __init__(
        self,
        skel: Skel,
        rots: np.ndarray,
        trans: np.ndarray,
        fps: int,
        anim_name: str,
    ) -> None:
        self.skel = skel
        self.rots = rots
        self.trans = trans

        self.fps = fps
        self.anim_name = anim_name

    def __len__(self):
        return len(self.rots)

    @property
    def local_rotations(self):
        return self.rots

    @property
    def global_rotations(self):
        parents = self.skel.parents
        lrots = self.rots
        grots = forward_kinematics_rotations(parents, lrots)
        return grots
    
    # @property
    # def local_positions_field(self):
    #     parents = self.skel.parents_field
    #     offsets = self.skel.offsets_field
    #     grots = self.global_rotations
    #     lposs = ti.Vector.field(3, dtype=float, shape=grots.shape)
    #     calc_local_positions(parents, offsets, grots, lposs)
    #     return lposs

    @property
    def global_positions(self):
        offsets = self.skel.offsets
        parents = self.skel.parents
        grots = self.global_rotations
        trans = self.trans
        gposs = forward_kinematics(offsets, parents, grots, trans)
        return gposs
    
    def ti_fk(self):
        parents = self.skel.parents_field # (J)
        offsets = self.skel.offsets_field # (J) * 3
        lrots = convert_to_vector_field(self.local_rotations)  # (T, J) * 4
        grots = ti.Vector.field(4, dtype=float, shape=lrots.shape) # (T, J) * 4
        gposs = ti.Vector.field(3, dtype=float, shape=lrots.shape) # (T, J) * 3
        ti_forward_kinematics(parents, offsets, lrots, grots, gposs)
        self.global_rotations_field = grots
        self.global_positions_field = gposs
    
    def to_pose(self, frame_num: int) -> Pose:
        skel = self.skel
        rots = self.rots[frame_num]
        root_pos = self.trans[frame_num]
        return Pose(skel, rots, root_pos)

    def to_poses(self, frames: slice) -> list[Pose]:
        if isinstance(frames, int):
            return self.to_pose(frames)
        skel = self.skel
        anim_rots = self.rots[frames]
        anim_trans = self.trans[frames]
        poses = []
        for rots, root_pos in zip(anim_rots, anim_trans):
            poses.append(Pose(skel, rots, root_pos))
        return poses


================================================
FILE: anitaichi/animation/anim_loader/bvh.py
================================================
"""
bvh.py
"""
# loading and writing a biovision hierarchy data (BVH).

from __future__ import annotations

from io import TextIOWrapper
from pathlib import Path
import logging
import numpy as np

from anitaichi.animation.anim import Animation
from anitaichi.animation.skel import Skel, Joint
from anitaichi.transform import quat

def load(
    filepath: Path | str, 
    start: int=None, 
    end: int=None, 
    order: str=None,
    load_skel: bool=True,
    load_pose: bool=True,
    skel: Skel=None,
    skel_name: str=None,
) -> Skel | Animation:
    
    if not load_skel and not load_pose:
        logging.info("Either load_skel or load_pose must be specified.")
        raise ValueError
    
    if isinstance(filepath, str):
        filepath = Path(filepath)
    
    # List bvh file by each row (line) and each word (column).
    with open(filepath, "r") as f:
        lines: list[str] = [line.strip() for line in f if line != ""]
        motion_idx: int = lines.index("MOTION")
        lines: list[str | list[str]] = \
            list(map(lambda x: x.split(), lines))
        f.close()
    
    # Load HIERARCHY term.
    if load_skel:
        skel, order = load_hierarchy(
            lines = lines[:motion_idx], 
            skel_name=skel_name,
        )

    # Load MOTION term.
    if load_pose:
        assert not skel is None, "You need to load skeleton or define skeleton." 
        name = filepath.name.split(".")[0]
        
        fps, trans, quats = load_motion(
            lines = lines[motion_idx:],
            start = start,
            end = end,
            order = order,
            skel = skel
        )
        return Animation(
            skel=skel,
            rots=quats,
            trans=trans,
            fps=fps,
            anim_name=name,
        )
    
    return skel

def load_hierarchy(
    lines: list[str | list[str]],
    skel_name: str,
) -> Skel:
    
    channelmap: dict[str, str] = {
        "Xrotation" : "x",
        "Yrotation" : "y",
        "Zrotation" : "z",   
    }
    
    stacks: list[int] = [-1]
    parents: list[int] = []
    name_list: list[str] = []
    idx = 0
    joints: list[Joint] = []
    depth:int = 0
    end_site: bool = False
    
    for line in lines:
        if "ROOT" in line or "JOINT" in line:
            parents.append(stacks[-1])
            stacks.append(len(parents) - 1)
            name_list.append(line[1])
            
        elif "OFFSET" in line:
            if not end_site:
                offset = list(map(float, line[1:]))
                joints.append(Joint(
                    name = name_list[-1],
                    index = idx,
                    parent = parents[-1],
                    offset = offset,
                    ))
                idx += 1
        
        elif "CHANNELS" in line:
            dof = int(line[1])
            if dof == 3:
                order = "".join([channelmap[p] for p in line[2:2+3]])
            elif dof == 6:
                order = "".join([channelmap[p] for p in line[5:5+3]])
            joints[-1].dof = dof
        
        elif "End" in line:
            end_site = True
            stacks.append(len(parents) - 1)
        
        elif "{" in line:
            depth += 1
        
        elif "}" in line:
            depth -= 1
            end_site = False
            stacks.pop()
    
    assert depth == 0, "Brackets are not closed."
    
    skel = Skel(joints=joints, skel_name=skel_name)
    return skel, order

def load_motion(
    lines: list[list[str]],
    order: str,
    skel: Skel,
    start: int=None,
    end: int=None,
) -> tuple[int, np.ndarray, np.ndarray]:
    
    fps: int = round(1 / float(lines[2][2]))
    lines: list[list[str]] = lines[3:]
    
    np_lines = np.array(
        list(map(lambda line: list(map(float, line)), lines))
    ) # T × dim_J(J × 3 + 3 or J × 6) matrix.
    np_lines = np_lines[start:end]
    
    dofs = []
    eangles = []
    poss = []
    ckpt = 0
    for joint in skel.joints:
        dof = joint.dof
        dofs.append(dof)
        if dof == 3:
            eangle = np_lines[:, ckpt:ckpt+3]
            eangles.append(eangle[:, None])
            ckpt += 3
        elif dof == 6:
            pos = np_lines[:, ckpt:ckpt+3] # [T, 3]
            eangle = np_lines[:, ckpt+3:ckpt+6]
            poss.append(pos[:, None]) # [T, 1, 3]
            eangles.append(eangle[:, None]) # [T, 1, 3]
            ckpt += 6
    
    assert sum(dofs) == np_lines.shape[1], \
        "Skel and Motion are not compatible."
    
    poss = np.concatenate(poss, axis=1)
    eangles = np.concatenate(eangles, axis=1)
    
    trans = poss[:, 0]
    quats = quat.unroll(quat.from_euler(eangles, order))
    
    return fps, trans, quats

def save(
    filepath: Path | str,
    anim: Animation, 
    order: str="zyx",
    ) -> bool:
    
    skel = anim.skel
    trans = anim.trans.to_numpy()
    quats = anim.rots.to_numpy()
    fps = anim.fps
    
    with open(filepath, "w") as f:
        # write hierarchy data.
        f.write("HIERARCHY\n")
        index_order = save_hierarchy(f, skel, 0, order, 0)
        
        # write motion data.
        f.write("MOTION\n")
        f.write("Frames: %d\n" % len(trans))
        f.write("Frame Time: %f\n" % (1.0 / fps))
        save_motion(f, trans, quats, order, index_order)
        f.close()

def save_hierarchy(
    f: TextIOWrapper,
    skel: Skel,
    index: int,
    order: str,
    depth: int,
) -> list[int]:

    def order2xyzrotation(order: str) -> str:
        cmap: dict[str, str] = {
            "x" : "Xrotation",
            "y" : "Yrotation",
            "z" : "Zrotation",   
        }
        return "%s %s %s" % (cmap[order[0]], cmap[order[1]], cmap[order[2]])
    
    joint = skel[index]
    index_order = [index]
    if joint.root:
        f.write("\t" * depth + "ROOT %s\n" % joint.name)
    else:
        f.write("\t" * depth + "JOINT %s\n" % joint.name)
    f.write("\t" * depth + "{\n")
    depth += 1
    offset = joint.offset
    f.write("\t" * depth + \
        "OFFSET %f %f %f\n" % (offset[0], offset[1], offset[2]))
    
    if joint.root:
        f.write("\t" * depth + \
            "CHANNELS 6 Xposition Yposition Zposition %s\n"\
            % order2xyzrotation(order))
    else:
        f.write("\t" * depth + "CHANNELS 3 %s\n"\
            % order2xyzrotation(order))
    
    children_idxs = skel.get_children(index, return_idx=True)
    for child_idx in children_idxs:
        ch_index_order = save_hierarchy(f, skel, child_idx, order, depth)
        index_order.extend(ch_index_order)
    if children_idxs == []:
        f.write("\t" * depth + "End Site\n")
        f.write("\t" * depth + "{\n")
        f.write("\t" * (depth + 1) + "OFFSET %f %f %f\n" \
            % (0, 0, 0))
        f.write("\t" * depth + "}\n")
    
    depth -= 1
    f.write("\t" * depth + "}\n")
    return index_order

def save_motion(
    f: TextIOWrapper,
    trans: np.ndarray,
    quats: np.ndarray,
    order: str,
    index_order: list[int],
) -> None:
    
    def write_position_rotation(
        pos: np.ndarray, 
        rot: np.ndarray,
        ) -> str:
        pos, rot = pos.tolist(), rot.tolist()
        return "%f %f %f %f %f %f " \
            % (pos[0], pos[1], pos[2], rot[0], rot[1], rot[2])

    def write_rotation(rot: np.ndarray) -> str:
        rot = rot.tolist()
        return "%f %f %f " %(rot[0], rot[1], rot[2])
    
    eangles = np.rad2deg(quat.to_euler(quats, order)) # (T, J, 3)
    for i in range(len(trans)):
        for j in index_order:
            if j == 0:
                f.write(
                    "%s" % write_position_rotation(trans[i], eangles[i, j])
                    )
            else:
                f.write(
                    "%s" % write_rotation(eangles[i, j])
                )
        f.write("\n")

================================================
FILE: anitaichi/animation/skel.py
================================================
from __future__ import annotations

import numpy as np
import taichi as ti

class Joint:
    def __init__(self, name: str, index: int, parent: int, offset: list[float], dof:int =None, root: bool=None):
        self.name = name
        self.index = index
        self.parent = parent
        self.offset = offset
        if dof != None:
            self.dof = dof
        else: self.dof = 6 if (parent==-1) else 3
        if root != None:
            self.root = root
        self.root: bool = (parent==-1)

class Skel:
    def __init__(self, joints: list[Joint], skel_name: str=None):
        self.joints = joints
        self.num_joints = len(joints)
        self.skel_name = skel_name
    
    def __len__(self):
        return self.num_joints
    
    # ==================
    # Read only property
    # ==================
    @property
    def offsets(self) -> np.ndarray: # For numpy array
        offsets = []
        for j in self.joints:
            offsets.append(j.offset)
        return np.array(offsets)

    @property
    def offsets_field(self): # For Taichi field
        offsets = ti.Vector.field(3, dtype=float, shape=(self.num_joints,))
        for i in range(len(self.joints)):
            offsets[i] = self.joints[i].offset
        return offsets
    
    @property
    def parents(self) -> list[int]:
        return [j.parent for j in self.joints]

    @property
    def parents_field(self): 
        parents_idx_field = ti.field(int, shape=len(self.joints))
        parents_idx = [joint.parent for joint in self.joints]
        for i in range(len(self.joints)):
            parents_idx_field[i] = parents_idx[i]
        return parents_idx_field

================================================
FILE: anitaichi/transform/quat.py
================================================
from __future__ import annotations

import numpy as np

from anitaichi.transform import vec

# Make origin quaternions (No rotations).
def eye(shape, dtype=np.float32):
    return np.ones(list(shape) + [4], dtype=dtype) * np.asarray([1, 0, 0, 0], dtype=dtype)

# Return norm of quaternions.
def length(x):
    return np.sqrt(np.sum(x * x, axis=-1))

# Make unit quaternions.
def normalize(x, eps=1e-8):
    return x / (length(x)[...,None] + eps)

def abs(x):
    return np.where(x[...,0:1] > 0.0, x, -x)

# Calculate inverse rotations.
def inv(q):
    return np.array([1, -1, -1, -1], dtype=np.float32) * q

# Calculate the dot product of two quaternions.
def dot(x, y):
    return np.sum(x * y, axis=-1)[...,None] if x.ndim > 1 else np.sum(x * y, axis=-1)

# Multiply two quaternions (return rotations).
def mul(x, y):
    x0, x1, x2, x3 = x[..., 0:1], x[..., 1:2], x[..., 2:3], x[..., 3:4]
    y0, y1, y2, y3 = y[..., 0:1], y[..., 1:2], y[..., 2:3], y[..., 3:4]

    return np.concatenate([
        y0 * x0 - y1 * x1 - y2 * x2 - y3 * x3,
        y0 * x1 + y1 * x0 - y2 * x3 + y3 * x2,
        y0 * x2 + y1 * x3 + y2 * x0 - y3 * x1,
        y0 * x3 - y1 * x2 + y2 * x1 + y3 * x0], axis=-1)

def inv_mul(x, y):
    return mul(inv(x), y)

def mul_inv(x, y):
    return mul(x, inv(y))

# Multiply quaternions and vectors (return vectors).
def mul_vec(q, x):
    t = 2.0 * vec.cross(q[..., 1:], x)
    return x + q[..., 0][..., None] * t + vec.cross(q[..., 1:], t)

def inv_mul_vec(q, x):
    return mul_vec(inv(q), x)

def unroll(x):
    y = x.copy()
    for i in range(1, len(x)):
        d0 = np.sum( y[i] * y[i-1], axis=-1)
        d1 = np.sum(-y[i] * y[i-1], axis=-1)
        y[i][d0 < d1] = -y[i][d0 < d1]
    return y

# Calculate quaternions between two unit 3D vectors (x to y).
def between(x, y):
    return np.concatenate([
        np.sqrt(np.sum(x*x, axis=-1) * np.sum(y*y, axis=-1))[...,None] + 
        np.sum(x * y, axis=-1)[...,None], 
        vec.cross(x, y)], axis=-1)

def log(x, eps=1e-5):
    length = np.sqrt(np.sum(np.square(x[...,1:]), axis=-1))[...,None]
    halfangle = np.where(length < eps, np.ones_like(length), np.arctan2(length, x[...,0:1]) / length)
    return halfangle * x[...,1:]
    
def exp(x, eps=1e-5):
    halfangle = np.sqrt(np.sum(np.square(x), axis=-1))[...,None]
    c = np.where(halfangle < eps, np.ones_like(halfangle), np.cos(halfangle))
    s = np.where(halfangle < eps, np.ones_like(halfangle), np.sinc(halfangle / np.pi))
    return np.concatenate([c, s * x], axis=-1)

# Calculate global space rotations and positions from local space.
def fk(lrot, lpos, parents):
    
    gp, gr = [lpos[...,:1,:]], [lrot[...,:1,:]]
    for i in range(1, len(parents)):
        gp.append(mul_vec(gr[parents[i]], lpos[...,i:i+1,:]) + gp[parents[i]])
        gr.append(mul    (gr[parents[i]], lrot[...,i:i+1,:]))
        
    return np.concatenate(gr, axis=-2), np.concatenate(gp, axis=-2)

def fk_rot(lrot, parents):
    
    gr = [lrot[...,:1,:]]
    for i in range(1, len(parents)):
        gr.append(mul(gr[parents[i]], lrot[...,i:i+1,:]))
        
    return np.concatenate(gr, axis=-2)

# Calculate root centric space rotations and positions.
def fk_centric(lrot, lpos, parents):
    rrot, rpos = lrot.copy(), lpos.copy()
    rrot[:,0,:] = eye([rrot.shape[0]])
    rpos[:,0,:] = 0
    return fk(rrot, rpos, parents)

# Calculate local space rotations and positions from global space.
def ik(grot, gpos, parents):
    
    return (
        np.concatenate([
            grot[...,:1,:],
            mul(inv(grot[...,parents[1:],:]), grot[...,1:,:]),
        ], axis=-2),
        np.concatenate([
            gpos[...,:1,:],
            mul_vec(
                inv(grot[...,parents[1:],:]),
                gpos[...,1:,:] - gpos[...,parents[1:],:]),
        ], axis=-2))

def ik_rot(grot, parents):
    
    return np.concatenate([grot[...,:1,:], mul(inv(grot[...,parents[1:],:]), grot[...,1:,:]),
        ], axis=-2)
    
def fk_vel(lrot, lpos, lvel, lang, parents):
    
    gp, gr, gv, ga = [lpos[...,:1,:]], [lrot[...,:1,:]], [lvel[...,:1,:]], [lang[...,:1,:]]
    for i in range(1, len(parents)):
        gp.append(mul_vec(gr[parents[i]], lpos[...,i:i+1,:]) + gp[parents[i]])
        gr.append(mul    (gr[parents[i]], lrot[...,i:i+1,:]))
        gv.append(mul_vec(gr[parents[i]], lvel[...,i:i+1,:]) + 
            vec.cross(ga[parents[i]], mul_vec(gr[parents[i]], lpos[...,i:i+1,:])) +
            gv[parents[i]])
        ga.append(mul_vec(gr[parents[i]], lang[...,i:i+1,:]) + ga[parents[i]])
        
    return (
        np.concatenate(gr, axis=-2), 
        np.concatenate(gp, axis=-2),
        np.concatenate(gv, axis=-2),
        np.concatenate(ga, axis=-2))

# Linear Interpolation of two vectors
def lerp(x, y, t):
    return (1 - t) * x + t * y

# LERP of quaternions
def quat_lerp(x, y, t):
    return normalize(lerp(x, y, t))

# Spherical linear interpolation of quaternions
def slerp(x, y, t):
    if t == 0:
        return x
    elif t == 1:
        return y
    
    if dot(x, y) < 0:
        y = - y
    ca = dot(x, y)
    theta = np.arccos(np.clip(ca, 0, 1))
    
    r = normalize(y - x * ca)
    
    return x * np.cos(theta * t) + r * np.sin(theta * t)


################################################################
# Conversion from quaternions to other rotation representations.
################################################################

# Calculate axis-angle from  quaternions.
# This function is based on ACTOR
# https://github.com/Mathux/ACTOR/blob/d3b0afe674e01fa2b65c89784816c3435df0a9a5/src/utils/rotation_conversions.py#L481
def to_axis_angle(x, eps=1e-5):
    norm = np.linalg.norm(x[...,1:],axis=-1,keepdims=True)
    half_angle = np.arctan2(norm, x[...,:1])
    angle = 2 * half_angle
    small_angle = np.abs(angle) < eps
    sin_half_angle_over_angle = np.empty_like(angle)
    sin_half_angle_over_angle[~small_angle] = (
        np.sin(half_angle[~small_angle]) / angle[~small_angle]
    )
    # for x small, sin(x/2) is about x/2 - (x/2)^3/6
    # so sin(x/2)/x is about 1/2 - (x*x)/48
    sin_half_angle_over_angle[small_angle] = (
        0.5 - (angle[small_angle] * angle[small_angle]) / 48
    )
    return x[..., 1:] / sin_half_angle_over_angle

# Calculate euler angles from quaternions.(!Under construction.)
def to_euler(x, order='zyx'):
    x0, x1, x2, x3 = x[..., 0:1], x[..., 1:2], x[..., 2:3], x[..., 3:4]

    if order == 'zyx':
        return np.concatenate([
            np.arctan2(2.0 * (x0 * x3 + x1 * x2), 1.0 - 2.0 * (x2 * x2 + x3 * x3)),
            np.arcsin(np.clip(2.0 * (x0 * x2 - x3 * x1), -1.0, 1.0)),
            np.arctan2(2.0 * (x0 * x1 + x2 * x3), 1.0 - 2.0 * (x1 * x1 + x2 * x2)),
        ], axis=-1)
    elif order == 'xzy':
        return np.concatenate([
            np.arctan2(2.0 * (x1 * x0 - x2 * x3), -x1 * x1 + x2 * x2 - x3 * x3 + x0 * x0),
            np.arctan2(2.0 * (x2 * x0 - x1 * x3), x1 * x1 - x2 * x2 - x3 * x3 + x0 * x0),
            np.arcsin(np.clip(2.0 * (x1 * x2 + x3 * x0), -1.0, 1.0))
        ], axis=-1)
    else:
        raise NotImplementedError('Cannot convert to ordering %s' % order)

# Calculate rotation matrix from quaternions.
def to_xform(x):

    qw, qx, qy, qz = x[...,0:1], x[...,1:2], x[...,2:3], x[...,3:4]
    
    x2, y2, z2 = qx + qx, qy + qy, qz + qz
    xx, yy, wx = qx * x2, qy * y2, qw * x2
    xy, yz, wy = qx * y2, qy * z2, qw * y2
    xz, zz, wz = qx * z2, qz * z2, qw * z2
    
    return np.concatenate([
        np.concatenate([1.0 - (yy + zz), xy - wz, xz + wy], axis=-1)[...,None,:],
        np.concatenate([xy + wz, 1.0 - (xx + zz), yz - wx], axis=-1)[...,None,:],
        np.concatenate([xz - wy, yz + wx, 1.0 - (xx + yy)], axis=-1)[...,None,:],
    ], axis=-2)

# Calculate 6d orthogonal rotation representation (ortho6d) from quaternions.
# https://github.com/papagina/RotationContinuity
def to_xform_xy(x):

    qw, qx, qy, qz = x[...,0:1], x[...,1:2], x[...,2:3], x[...,3:4]
    
    x2, y2, z2 = qx + qx, qy + qy, qz + qz
    xx, yy, wx = qx * x2, qy * y2, qw * x2
    xy, yz, wy = qx * y2, qy * z2, qw * y2
    xz, zz, wz = qx * z2, qz * z2, qw * z2
    
    return np.concatenate([
        np.concatenate([1.0 - (yy + zz), xy - wz], axis=-1)[...,None,:],
        np.concatenate([xy + wz, 1.0 - (xx + zz)], axis=-1)[...,None,:],
        np.concatenate([xz - wy, yz + wx], axis=-1)[...,None,:],
    ], axis=-2)

# Calculate scaled angle axis from quaternions.
def to_scaled_angle_axis(x, eps=1e-5):
    return 2.0 * log(x, eps)


################################################################
# Conversion from other rotation representations to quaternions.
################################################################

# Calculate quaternions from axis and angle.
def from_angle_axis(angle, axis):
    c = np.cos(angle / 2.0)[..., None]
    s = np.sin(angle / 2.0)[..., None]
    q = np.concatenate([c, s * axis], axis=-1)
    return q

# Calculate quaternions from axis-angle.
def from_axis_angle(rots):
    angle = np.linalg.norm(rots, axis=-1)
    axis = rots / angle[...,None]
    return from_angle_axis(angle, axis)

# Calculate quaternions from euler angles.
def from_euler(e, order='zyx', mode="degree"):
    if mode=="degree":
        e = np.deg2rad(e)
    axis = {
        'x': np.asarray([1, 0, 0], dtype=np.float32),
        'y': np.asarray([0, 1, 0], dtype=np.float32),
        'z': np.asarray([0, 0, 1], dtype=np.float32)}

    q0 = from_angle_axis(e[..., 0], axis[order[0]])
    q1 = from_angle_axis(e[..., 1], axis[order[1]])
    q2 = from_angle_axis(e[..., 2], axis[order[2]])

    return mul(q0, mul(q1, q2))

# Calculate quaternions from rotation matrix.
def from_xform(ts):
    
    return normalize(
        np.where((ts[...,2,2] < 0.0)[...,None],
            np.where((ts[...,0,0] >  ts[...,1,1])[...,None],
                np.concatenate([
                    (ts[...,2,1]-ts[...,1,2])[...,None], 
                    (1.0 + ts[...,0,0] - ts[...,1,1] - ts[...,2,2])[...,None], 
                    (ts[...,1,0]+ts[...,0,1])[...,None], 
                    (ts[...,0,2]+ts[...,2,0])[...,None]], axis=-1),
                np.concatenate([
                    (ts[...,0,2]-ts[...,2,0])[...,None], 
                    (ts[...,1,0]+ts[...,0,1])[...,None], 
                    (1.0 - ts[...,0,0] + ts[...,1,1] - ts[...,2,2])[...,None], 
                    (ts[...,2,1]+ts[...,1,2])[...,None]], axis=-1)),
            np.where((ts[...,0,0] < -ts[...,1,1])[...,None],
                np.concatenate([
                    (ts[...,1,0]-ts[...,0,1])[...,None], 
                    (ts[...,0,2]+ts[...,2,0])[...,None], 
                    (ts[...,2,1]+ts[...,1,2])[...,None], 
                    (1.0 - ts[...,0,0] - ts[...,1,1] + ts[...,2,2])[...,None]], axis=-1),
                np.concatenate([
                    (1.0 + ts[...,0,0] + ts[...,1,1] + ts[...,2,2])[...,None], 
                    (ts[...,2,1]-ts[...,1,2])[...,None], 
                    (ts[...,0,2]-ts[...,2,0])[...,None], 
                    (ts[...,1,0]-ts[...,0,1])[...,None]], axis=-1))))

# Calculate quaternions from ortho6d.
def from_xform_xy(x):

    c2 = vec.cross(x[...,0], x[...,1])
    c2 = c2 / np.sqrt(np.sum(np.square(c2), axis=-1))[...,None]
    c1 = vec.cross(c2, x[...,0])
    c1 = c1 / np.sqrt(np.sum(np.square(c1), axis=-1))[...,None]
    c0 = x[...,0]
    
    return from_xform(np.concatenate([
        c0[...,None], 
        c1[...,None], 
        c2[...,None]], axis=-1))

# Calculate quaternions from scaled angle axis.
def from_scaled_angle_axis(x, eps=1e-5):
    return exp(x / 2.0, eps)

================================================
FILE: anitaichi/transform/ti_quat.py
================================================
import taichi as ti
import taichi.math as tm

from anitaichi.transform import ti_vec

@ti.func
def eye():
    return ti.Vector([1.0, 0.0, 0.0, 0.0])

@ti.func
def normalize(q):
    return tm.normalize(q)

@ti.func
def inv(q):
    return ti.Vector([-q[0], q[1], q[2], q[3]])

@ti.func
def abs(q):
    return -q if q[0] < 0.0 else q

@ti.func
def mul(x, y):
    qout0 = y[0] * x[0] - y[1] * x[1] - y[2] * x[2] - y[3] * x[3]
    qout1 = y[0] * x[1] + y[1] * x[0] - y[2] * x[3] + y[3] * x[2]
    qout2 = y[0] * x[2] + y[1] * x[3] + y[2] * x[0] - y[3] * x[1]
    qout3 = y[0] * x[3] - y[1] * x[2] + y[2] * x[1] + y[3] * x[0]
    return ti.Vector([qout0, qout1, qout2, qout3])

@ti.func
def mul_vec3(q, v):
    v_quat = ti.Vector([q[1], q[2], q[3]])
    t = 2.0 * ti_vec.cross(v_quat, v)
    return v + q[0] * t + ti_vec.cross(v_quat, t)

@ti.func
def from_angle_and_axis(angle, axis):
    c = tm.cos(angle / 2.0)
    s = tm.sin(angle / 2.0)
    return ti.Vector([c, s * axis.x, s * axis.y, s * axis.z])

@ti.func
def from_euler(euler, order):
    axis = {
        "x": ti.Vector([1.0, 0.0, 0.0]),
        "y": ti.Vector([0.0, 1.0, 0.0]),
        "z": ti.Vector([0.0, 0.0, 1.0])
    }
    q0 = from_angle_and_axis(euler[0], axis[order[0]])
    q1 = from_angle_and_axis(euler[1], axis[order[1]])
    q2 = from_angle_and_axis(euler[2], axis[order[2]])

    return mul(q0, mul(q1, q2))


================================================
FILE: anitaichi/transform/ti_vec.py
================================================
import taichi as ti
import taichi.math as tm

@ti.func
def cross(v, w):
    out0 = v.y * w.z - v.z * w.y
    out1 = v.z * w.x - v.x * w.z
    out2 = v.x * w.y - v.y * w.x
    return ti.Vector([out0, out1, out2])

@ti.func
def dot(v, w):
    return ti.Vector([v.x * w.x, v.y * w.y, v.z * w.z])

@ti.func
def length(v):
    return tm.sqrt(v.x * v.x + v.y + v.y + v.z + v.z)

@ti.func
def normalize(v):
    return tm.normalize(v)

================================================
FILE: anitaichi/transform/vec.py
================================================
from __future__ import annotations

import numpy as np

# Calculate cross object of two 3D vectors.
def cross(a, b):
    return np.concatenate([
        a[...,1:2]*b[...,2:3] - a[...,2:3]*b[...,1:2],
        a[...,2:3]*b[...,0:1] - a[...,0:1]*b[...,2:3],
        a[...,0:1]*b[...,1:2] - a[...,1:2]*b[...,0:1]], axis=-1)

def dot(a, b, keepdims=False):
    return np.sum(a*b, axis=-1, keepdims=keepdims)

def length(v, keepdims=False):
    return np.linalg.norm(v, axis=-1, keepdims=keepdims)

def normalize(v):
    lengths = length(v, keepdims=True)
    lengths = np.where(lengths==0, 1e-10, lengths) # avoid 0 divide
    return v / lengths



================================================
FILE: blender/fbx2bvh.py
================================================


================================================
FILE: configs/DeepLearning/LMM.toml
================================================
# Common settings
exp_name = "original"

# device settings
device = "cuda"
gpus = [0]

# which model do you train? ["all", "decompressor", "stepper", "projector"]
train_model = "all"

# if you need preprocess (if false, load from processed_dir)
need_preprocess = true
# processed data dir
processed_dir = "model/LMM/preprocessed"
processed_file_name = "dataset.pkl"

# saved files settings
checkpoint_dir = "model/LMM/checkpoints"
save_config_dir = "model/LMM/configs"

# random seed
seed = 0

# Dataset settings
[dataset]
  # dataset dir
  dir = "data/lafan1"
  files = ["pushAndStumble1_subject5.bvh", "run1_subject5.bvh", "walk1_subject5.bvh"]
  starts = [194, 90, 80]
  ends = [351, 7086, 7791]
  ignore_end = 20
  # matching features weights. (for decompressor)
  # TBD: Specify feature that is used.
  # [w_pos_foot, w_vel_foot, w_vel_hips, w_traj_pos, w_traj_dir]
  feat_weights = [0.75, 1.0, 1.0, 1.0, 1.5]
  [dataset.train]
  [dataset.val]
  [dataset.test]

# Model settings
[decompressor]
  model = "mlp"
  [decompressor.train]
    max_iter = 300000
    log_freq = 100
    save_freq = 50000

    num_epochs = 10
    batch_size = 32
    [decompressor.train.loss]
      losses = ["mse"]
      weights = [1.0]
    [decompressor.train.optim]
      method = "radam"
      lr = 0.001
      momentum = 0.9
      betas = [0.5, 0.999]

[stepper]
  model = "mlp"
  [stepper.train]
    max_iter = 300000
    log_freq = 100
    save_freq = 50000
    [stepper.train.loss]
      losses = ["mse"]
      weights = [1.0]
    [stepper.train.optim]
      method = "radam"
      lr = 0.00003
      betas = [0.5, 0.999]

[projector]
  model = "mlp"
  [projector.train]
    max_iter = 300000
    log_freq = 100
    save_freq = 50000
    [projector.train.loss]
      losses = ["mse"]
      weights = [1.0]
    [projector.train.optim]
      method = "radam"
      lr = 0.00003
      betas = [0.5, 0.999]


================================================
FILE: data/data.md
================================================
# data
Please place your data here.  
In my case, I create symbolic links for datasets as shown below.

```
data/
|_ aistpp/
|_ amass/
|_ lafan1/
|_ smpl/
    |_ female/
        |_ model.pkl
    |_ male/
        |_ model.pkl
    |_ neutral/
        |_ model.pkl
|_smplh/
    |_ female/
        |_ model.npz
    |_ male/
        |_ model.npz
    |_ neutral/
        |_ model.npz
|_smplx/
    |_ female/
        |_ model.npz
    |_ male/
        |_ model.npz
    |_ neutral/
        |_ model.npz
```

================================================
FILE: model/LMM/LMM.md
================================================
<div align="center">

# Learned Motion Matching

[**original repo**](https://github.com/orangeduck/Motion-Matching)  
[**paper**](https://dl.acm.org/doi/abs/10.1145/3386569.3392440)  

</div>


Unofficial JAX imprementation of LMM.  If you want to train, simply run
```python
python model/LMM/train.py
```
If you want to change experimental settings, please change settings on [`configs/DeepLearning/LMM.toml`](../../configs/DeepLearning/LMM.toml).

================================================
FILE: model/LMM/decompressor.py
================================================
from __future__ import annotations

import jax.numpy as jnp
from flax import linen as nn

class Compressor(nn.Module):
    """A simple MLP network."""
    output_size: int
    hidden_size: int = 512
    
    @nn.compact
    def __call__(self, x):
        nbatch, nwindow = x.shape[:2]
        x = x.reshape([nbatch * nwindow, -1])
        for _ in range(3):
            x = nn.Dense(self.hidden_size)(x)
            x = nn.elu(x)
        x = nn.Dense(self.output_size)(x)
        
        return x.reshape([nbatch, nwindow, -1])


class Decompressor(nn.Module):
    output_size: int
    hidden_size: int = 512
    
    @nn.compact
    def __call__(self, x):
        nbatch, nwindow = x.shape[:2]
        x = x.reshape([nbatch * nwindow, -1])
        x = nn.Dense(self.hidden_size)(x)
        x = nn.elu(x)
        x = nn.Dense(self.output_size)(x)
        
        return x.reshape([nbatch, nwindow, -1])



================================================
FILE: model/LMM/preprocessing.py
================================================
from __future__ import annotations

from pathlib import Path
import pickle

from anim import bvh
from anim.animation import Animation
from anim.motion_matching.mm import create_matching_features

def preprocess_motion_data(BASEPATH: Path, cfg, save_dir: Path):
    dataset_dir = BASEPATH / cfg.dir
    anims = []
    for file, start, end in zip(cfg.files, cfg.starts, cfg.ends):
        anims.append(bvh.load(dataset_dir / file, start, end))
    

================================================
FILE: model/LMM/projector.py
================================================
from __future__ import annotations

import jax.numpy as jnp
from flax import linen as nn

class Projector(nn.Module):
    """A simple MLP network."""
    output_size: int
    hidden_size: int = 512
    
    @nn.compact
    def __call__(self, x):
        nbatch, nwindow = x.shape[:2]
        x = x.reshape([nbatch * nwindow, -1])
        for _ in range(3):
            x = nn.Dense(self.hidden_size)(x)
            x = nn.elu(x)
        x = nn.Dense(self.output_size)(x)
        
        return x.reshape([nbatch, nwindow, -1])





================================================
FILE: model/LMM/setting.py
================================================
"""
setting.py
Device setting etc..
"""
from __future__ import annotations

import sys
from pathlib import Path
import shutil

from util.load import pickle_load, toml_load
from model.LMM.preprocessing import preprocess_motion_data

class Config:
    def __init__(self, BASEPATH: Path, setting_path: str) -> None:
        # Load a setting file.
        try:
            if (BASEPATH / setting_path).exists():
                setting_path = BASEPATH / setting_path
                setting = toml_load(setting_path)
            else:
                setting = toml_load(setting_path)
        except:
            setting_path = BASEPATH / "configs/DeepLearning/LMM.toml"
            setting = toml_load(setting_path)
        sys.stdout.write(f"Config file: {str(setting_path)}\n")
        
        self.setting = setting
        self.BASEPATH = BASEPATH
        self.seed = setting.seed
        
        # Device settings (gpu or cpu)
        self.device = setting.device
        # GPU settings.
        if self.device == "gpu" or "cuda":
            if isinstance(setting.gpus, list):
                # if you use multi gpus
                if len(setting.gpus) > 1:
                    self.gpus = setting.gpus
                    self.multi_gpus_setting()
                else:
                # if you use single gpu
                    self.gpus = setting.gpus[0]
                    self.single_gpu_setting()
            else:
                self.gpus = setting.gpus
                self.single_gpu_setting()
        
        # Dataset settings.
        # if you need preprocess
        if setting.need_preprocess:
            save_dir = BASEPATH / setting.processed_dir
            if not save_dir.exists():
                save_dir.mkdir(parents=True)
                sys.stdout.write(f"Create preprocessed folder at: {str(save_dir)}\n")
            self.dataset = preprocess_motion_data(BASEPATH, setting.dataset, save_dir / setting.processed_file_name)
        else:
            self.dataset = pickle_load(save_dir / setting.processed_file_name)
        
        # Save experiment settings.
        ckpt_dir = BASEPATH / setting.checkpoint_dir / setting.exp_name
        # TBD: if exists, load checkpoints and resume the experiment.
        if not ckpt_dir.exists():
            ckpt_dir.mkdir(parents=True)
            sys.stdout.write(f"Create checkpoint folder at: {str(ckpt_dir)}\n")
        self.ckpt_dir = ckpt_dir
            
        save_cfg_dir = BASEPATH / setting.save_config_dir
        save_cfg_path = save_cfg_dir / f"{setting.exp_name}.toml"
        if not save_cfg_dir.exists():
            save_cfg_dir.mkdir(parents=True)
            sys.stdout.write(f"Create config folder at: {str(save_cfg_dir)}\n")
        shutil.copy(str(setting_path), str(save_cfg_path))

    def single_gpu_setting(self):
        pass
    
    def multi_gpus_setting(self):
        pass

================================================
FILE: model/LMM/stepper.py
================================================
from __future__ import annotations

import jax.numpy as jnp
from flax import linen as nn

class Stepper(nn.Module):
    """A simple MLP network."""
    output_size: int
    hidden_size: int = 512
    
    @nn.compact
    def __call__(self, x):
        nbatch, nwindow = x.shape[:2]
        x = x.reshape([nbatch * nwindow, -1])
        for _ in range(3):
            x = nn.Dense(self.hidden_size)(x)
            x = nn.elu(x)
        x = nn.Dense(self.output_size)(x)
        
        return x.reshape([nbatch, nwindow, -1])

================================================
FILE: model/LMM/test.py
================================================


================================================
FILE: model/LMM/train.py
================================================
"""
train.py
You can define your configulation file like below.
```
python train.py configs/DeepLearning/LMM.toml
```
"""
from __future__ import annotations

import sys
from pathlib import Path
import numpy as np
import jax.numpy as jnp
from jax import jit, partial, random
from flax import linen as nn
from flax.training import train_state
import optax

BASEPATH = Path(__file__).resolve().parent.parent.parent
sys.path.append(str(BASEPATH))

from model.LMM.setting import Config
from model.LMM.decompressor import Compressor, Decompressor
from model.LMM.stepper import Stepper
from model.LMM.projector import Projector

def main(cfg: Config):
    ckpt_dir = cfg.ckpt_dir
    
    match cfg.setting.train_model:
        case "all":
            sys.stdout.write("Train models: decompressor, stepper, projector\n")
            train_decompressor(cfg.setting.decompressor, ckpt_dir) 
            train_stepper(cfg.setting.stepper, ckpt_dir)
            train_projector(cfg.setting.projector, ckpt_dir)
        case "decompressor":
            sys.stdout.write("Train model: decompressor\n")
            train_decompressor(cfg.setting.decompressor, ckpt_dir)
        case "stepper":
            sys.stdout.write("Train model: stepper\n")
            train_stepper(cfg.setting.stepper, ckpt_dir)
        case "projector":
            sys.stdout.write("Train model: projector\n")
            train_projector(cfg.setting.projector, ckpt_dir)
        case _:
            raise ValueError("Invalid value. Please check LMM.toml.")

def train_decompressor(cfg, ckpt_dir):
    pass

def train_stepper(cfg, ckpt_dir):
    pass

def train_projector(cfg, ckpt_dir):
    pass

if __name__ == "__main__":
    setting_path = sys.argv[1]
    cfg = Config(BASEPATH, setting_path)
    
    main(cfg)

================================================
FILE: test/bvh_viewer.py
================================================
from __future__ import annotations

import sys
from pathlib import Path

import numpy as np
import taichi as ti

BASEPATH = Path(__file__).resolve().parent.parent
sys.path.append(str(BASEPATH))

from anitaichi.animation.anim_loader import bvh

@ti.kernel
def get_frame(
    gposs: ti.types.ndarray(),
    cur_pose: ti.template(),
    frame: int
):
    for i in cur_pose:
        cur_pose[i] = gposs[frame, i] / 100

def update_camera(camera, anim, frame):
    root_pos = anim.trans[frame] / 100
    camera_pos = root_pos + np.array([0, 0, -10])

    camera.position(*camera_pos)
    camera.lookat(*root_pos)
    scene.set_camera(camera)

if __name__ == "__main__":
    # Starting taichi on CUDA.
    ti.init(arch=ti.cuda)
    
    window = ti.ui.Window("bvh viewer", (1024, 1024), vsync=True)
    gui = window.get_gui()
    canvas = window.get_canvas()
    canvas.set_background_color((1, 1, 1))
    scene = ti.ui.Scene()
    camera = ti.ui.Camera()

    bvh_file = BASEPATH / "data/aiming1_subject1.bvh"
    anim = bvh.load(bvh_file)
    anim.ti_fk()
    cur_pose = ti.Vector.field(3, dtype=float, shape=len(anim.skel))
    frame = 0

    while window.running:
        get_frame(anim.global_positions_field, cur_pose, frame)
        scene.particles(cur_pose, radius=0.05, color=(1.0, 0.0, 0.0))
        scene.ambient_light((0.5, 0.5, 0.5))
        update_camera(camera, anim, frame)

        frame += 1
        if frame == len(anim):
            frame = 0
        
        canvas.scene(scene)
        window.show()

================================================
FILE: test/calc_matching_time.py
================================================
# Calculate time for motion matching.
import sys
from pathlib import Path
import timeit

BASEPATH = Path(__file__).resolve().parent.parent
sys.path.append(str(BASEPATH))

from anim import bvh
from anim.motion_matching.database import Database
from anim.motion_matching.mm import create_matching_database, motion_matching_search

def create_database(bvh_dir: Path, files: list, starts: list, ends: list) -> Database:
    if isinstance(bvh_dir, str):
        bvh_dir = Path(bvh_dir)
    anims = []
    for file, start, end in zip(files, starts, ends):
        anim = bvh.load(bvh_dir / file, start, end)
        anims.append(anim)
    return Database(anims)

bvh_dir = BASEPATH / "data/lafan1"

files = ["pushAndStumble1_subject5.bvh", "run1_subject5.bvh", "walk1_subject5.bvh"]
starts = [194, 90, 80]
ends = [351, 7086, 7791]

matching_method = "kd-tree"

db = create_database(bvh_dir, files, starts, ends)
mdb = create_matching_database(
    db=db, method=matching_method,
    w_pos_foot=0.75, w_vel_foot=1, w_vel_hips=1, w_traj_pos=1, w_traj_dir=1.5,
    ignore_end=20, dense_bound_size=16, sparse_bound_size=64,
)

cur_idx = 100
query = mdb.features[mdb.indices.index(1320)]

iter = 100
score = timeit.timeit(
    "motion_matching_search(cur_idx, matching_method, mdb, query, False)", 
    globals=globals(), 
    number=iter,
)
print(score / iter)

================================================
FILE: test/character_controller.py
================================================
# TBD

import sys
from pathlib import Path
import pickle

BASEPATH = Path(__file__).resolve().parent.parent
sys.path.append(str(BASEPATH))

from anim import bvh
from anim.motion_matching.database import Database
from anim.motion_matching.mm import create_matching_database, motion_matching_search

def create_database(bvh_dir: Path, files: list, starts: list, ends: list) -> Database:
    if isinstance(bvh_dir, str):
        bvh_dir = Path(bvh_dir)
    anims = []
    for file, start, end in zip(files, starts, ends):
        anim = bvh.load(bvh_dir / file, start, end)
        anims.append(anim)
    return Database(anims)

def main():
    # init configs (this will replace to a yaml file).
    matching_method = "aabb"
    ignore_end = 20
    
    bvh_dir = BASEPATH / "data/lafan1"
    files = ["pushAndStumble1_subject5.bvh", "run1_subject5.bvh", "walk1_subject5.bvh"]
    starts = [194, 90, 80]
    ends = [351, 7086, 7791]
    
    load_database = False
    save_database = True
    
    search_time = 0.1
    search_timer = search_time
    
    desired_velocity_change_threshold = 50.0
    desired_rotation_change_threshold = 50.0
    desired_gait = 0.0
    desired_gait_velocity = 0.0
    
    simulation_velocity_halflife = 0.27
    simulation_rotation_halflife = 0.27
    
    simulation_run_fwrd_speed = 4.0
    simulation_run_side_speed = 3.0
    simulation_run_back_speed = 2.5
    simulation_walk_fwrd_speed = 1.75
    simulation_walk_side_speed = 1.5
    simulation_walk_back_speed = 1.25
    
    dt = 1 / 60
    
    
    # Create `Database` and `MatchingDatabase`.
    db = create_database(bvh_dir, files, starts, ends)
    mdb = create_matching_database(
        db=db, method=matching_method,
        w_pos_foot=0.75, w_vel_foot=1, w_vel_hips=1, w_traj_pos=1, w_traj_dir=1.5,
        dense_bound_size=16, sparse_bound_size=64,
    )
    if save_database:
        with open(BASEPATH / "data/motion_matching/db.pkl", "wb") as f:
            pickle.dump(db, f)
        with open(BASEPATH / "data/motion_matching/db.pkl", "wb") as f:
            pickle.dump(mdb, f)

    # Initialize parameters
    frame_idx = 0
    
    # Init the pose
    
    # Init the simulation (future direction etc.)
    
    # Init the camera
    
    
    
    
    
    
    """ここ以下は繰り返し"""
    
    # Generate query for motion matching.
    query = None
    
    # Check if we reached the end of the current anim.
    if frame_idx in db.ends: 
        end_of_anim = True
        cur_idx = -1
    else: 
        end_of_anim = False
        cur_idx = frame_idx
    
    if search_timer <= 0 or end_of_anim:
        # Motion Matching Search!
        cur_idx = motion_matching_search(cur_idx, matching_method, db, mdb, query, ignore_end)
        
        if cur_idx != frame_idx:
            frame_idx = cur_idx

        search_time = search_timer
    
    search_timer -= dt
    frame_idx += 1
    
    # Update the next pose
    
    # Update the simulation
    
    # Update the camera
    
    # Drawing
    

if __name__ == "__main__":
    main()


================================================
FILE: test/inverse_kinematics.py
================================================


================================================
FILE: test/path_following.py
================================================
from __future__ import annotations

import sys
from pathlib import Path
import pickle
import numpy as np

BASEPATH = Path(__file__).resolve().parent.parent
sys.path.append(str(BASEPATH))

from util.load import pickle_load
from util import quat
from anim import bvh
from anim.animation import Animation
from anim.motion_matching.database import Database
from anim.motion_matching.mm import create_matching_database, motion_matching_search

def create_database(bvh_dir: Path, files: list, starts: list, ends: list) -> Database:
    if isinstance(bvh_dir, str):
        bvh_dir = Path(bvh_dir)
    anims = []
    for file, start, end in zip(files, starts, ends):
        anim = bvh.load(bvh_dir / file, start, end)
        anims.append(anim)
    return Database(anims)

def create_query(db: Database, idx: int, traj_feats: np.ndarray) -> np.ndarray:
    left_foot_idx = db.skel.names.index("LeftFoot")
    right_foot_idx = db.skel.names.index("RightFoot")
    hips_idx = db.skel.names.index("Hips")
    
    fpos = db.cpos[idx, [left_foot_idx, right_foot_idx]].ravel()
    vels = db.cposvel[idx,[left_foot_idx, right_foot_idx, hips_idx]].ravel()
    return np.concatenate([fpos, vels, traj_feats])

def create_circle_traj():
    # Create path (circle)
    t = np.linspace(0, np.pi * 2, 100)
    x = 500 * np.cos(t)
    z = 500 * np.sin(t)
    dir_x = -z
    dir_z = x
    norm = np.sqrt(dir_x ** 2 + dir_z ** 2)
    dir_x = dir_x / norm
    dir_z = dir_z / norm
    x -= 500
    
    return np.array([
        x[20], z[20], x[40], z[40], x[60], z[60],
        dir_x[20], dir_z[20], dir_x[40], dir_z[40], dir_x[60], dir_z[60]
    ])
    

def create_animation_from_idxs(db: Database, anim_frames: list[int]) -> Animation:
    quats = []
    trans = []
    t = np.linspace(0, np.pi * 2, 100)
    x = 500 * np.cos(t) - 500
    z = 500 * np.sin(t)
    
    for i, frame in enumerate(anim_frames):
        c_root_rot, root_pos = db.croot(frame)
        rot = db.quats[frame]
        i = i % 100
        rot[0] = quat.mul(quat.from_angle_axis(t[i], [0, 1, 0]), c_root_rot)
        root_pos[0] = x[i]
        root_pos[2] = z[i]
        
        quats.append(rot)
        trans.append(root_pos)
    
    quats = np.array(quats)
    trans = np.array(trans)
    return Animation(db.skel, quats, trans, db.fps)

def main():
    matching_method = "kd-tree"
    
    bvh_dir = BASEPATH / "data/lafan1"
    files = ["pushAndStumble1_subject5.bvh", "run1_subject5.bvh", "walk1_subject5.bvh"]
    starts = [194, 90, 80]
    ends = [351, 7086, 7791]
    
    load_database = False
    save_database = True
    
    search_time = 5
    search_timer = 0
    
    # Create `Database` and `MatchingDatabase`.
    if load_database:
        db = pickle_load(BASEPATH / "data/db.pkl")
        mdb = pickle_load(BASEPATH / "data/mdb.pkl")
    else:
        db = create_database(bvh_dir, files, starts, ends)
        mdb = create_matching_database(
            db=db, method=matching_method,
            w_pos_foot=0.75, w_vel_foot=1, w_vel_hips=1, w_traj_pos=1, w_traj_dir=1.5,
            ignore_end=20, dense_bound_size=16, sparse_bound_size=64,
        )
    
    if save_database:
        with open(BASEPATH / "data/db.pkl", "wb") as f:
            pickle.dump(db, f)
        with open(BASEPATH / "data/mdb.pkl", "wb") as f:
            pickle.dump(mdb, f)

    # Initialize parameters
    frame_idx = 0
    frame_sum = 0
    animation_length = 120 # frame
    anim_frames = [] # animated frame will apend here.
    
    # We define the path that moves forward 3m every 20 frames.
    #               20,        40,       60    frames ahead.
    # path_poss = [ 200, 0,    400, 0,   600, 0 ]
    # path_dirs = [  0, 1,      0, 1,     0, 1  ]
    # traj_feats = np.concatenate([path_poss, path_dirs])
    
    # Create path (circle)
    traj_feats = create_circle_traj()
    
    # initial query
    query = create_query(db, frame_idx, traj_feats)
    
        # Animation loop
    while frame_sum < animation_length:
        # Check if we reached the end of the current anim.
        if frame_idx in db.ends: 
            end_of_anim = True
            cur_idx = -1
        else: 
            end_of_anim = False
            cur_idx = frame_idx
        
        if search_timer <= 0 or end_of_anim:
            # Motion Matching Search!
            cur_idx = motion_matching_search(cur_idx, matching_method, mdb, query)
            
            if cur_idx != frame_idx:
                frame_idx = cur_idx

            search_timer = search_time
        
        # update frame
        search_timer -= 1
        frame_idx += 1
        frame_sum += 1
        anim_frames.append(frame_idx)
        
        # Create new query
        query = create_query(db, frame_idx, traj_feats)
        
        sys.stdout.write("Processed frame: {}\n".format(frame_sum))
    
    # Save animation.
    print("Creating animation..")
    sim_anim = create_animation_from_idxs(db, anim_frames)
    bvh.save(BASEPATH / "data/sim_motion.bvh", sim_anim)
    print("saved at {}".format(str(BASEPATH / "data/sim_motion.bvh")))

if __name__ == "__main__":
    main()


================================================
FILE: test/plotting.py
================================================
from __future__ import annotations

import sys
from pathlib import Path
import numpy as np
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import matplotlib.ticker as ticker
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
import tkinter as tk

BASEPATH = Path(__file__).resolve().parent.parent
sys.path.append(str(BASEPATH))

from anim import bvh
from anim.motion_matching.database import Database
from anim.motion_matching.mm import create_matching_database

if __name__ == "__main__":
    bvh_folder = BASEPATH / "data"
    bvh_file = "sim_motion.bvh"
    start = 90
    # end = 7086
    end = 2090
    save_fig = True
    
    anim = bvh.load(bvh_folder / bvh_file, start, end)
    db = Database([anim])
    mdb = create_matching_database(
        db=db, method="brute-force",
        w_pos_foot=0.75, w_vel_foot=1, w_vel_hips=1, w_traj_pos=1, w_traj_dir=1.5,
        ignore_end=20, dense_bound_size=16, sparse_bound_size=64,
    )
    bar_lim = np.max(np.abs(mdb.features))
    gposs = db.gpos / 100
    cposs = db.cpos / 100
    parents = db.parents
    
    proj_root_poss = db.proj_root_pos(remove_vertical=True)
    root_dirs = db.root_direction(remove_vertical=True)
    future_20_gposs = db.future_traj_poss(20, cspace=False)
    future_20_gdirs = db.future_traj_dirs(20, cspace=False)
    future_20_cposs = db.future_traj_poss(20, cspace=True)
    future_20_cdirs = db.future_traj_dirs(20, cspace=True)
    
    fig = plt.figure(figsize=(7, 7))
    axg = fig.add_subplot(221, projection="3d")
    axc = fig.add_subplot(223, projection="3d")
    axf = fig.add_subplot(122)
    
    def update(index: int):
        gposes = gposs[index]
        cposes = cposs[index]
        features = mdb.features[index]
        
        proj_root_pos = proj_root_poss[index] / 100
        root_dir = root_dirs[index]
        
        future_20_gpos = future_20_gposs[index] / 100
        future_20_gdir = future_20_gdirs[index]
        future_20_cpos = future_20_cposs[index] / 100
        future_20_cdir = future_20_cdirs[index]
        
        # global space viewer settings
        axg.cla()
        axg.set_title("global")
        axg.grid(axis="y")
        axg.set_yticklabels([])
        axg.yaxis.pane.set_facecolor("gray")
        axg.yaxis.set_major_locator(ticker.NullLocator())
        axg.yaxis.set_minor_locator(ticker.NullLocator())
        axg.view_init(elev=120, azim=-90)
        axg.set_xlim3d(proj_root_pos[0]-3, proj_root_pos[0]+3)
        axg.set_ylim3d(0, 3)
        axg.set_zlim3d(proj_root_pos[1]-3, proj_root_pos[1]+3)
        axg.set_box_aspect((1, 0.5, 1))
        axg.scatter(gposes[:, 0], gposes[:, 1], gposes[:, 2], c="red", s=3, label="joints")
        for i, parent in enumerate(parents):
            if parent != -1:
                axg.plot(
                    [gposes[parent,0], gposes[i,0]],
                    [gposes[parent,1], gposes[i,1]],
                    [gposes[parent,2], gposes[i,2]], 
                    c="red", alpha=0.8
                )
        axg.plot(
            [proj_root_pos[0], proj_root_pos[0]+root_dir[0]],
            [0, 0],
            [proj_root_pos[1], proj_root_pos[1]+root_dir[1]],
            label="root direction", c="blue"
        )
        axg.plot(
            [future_20_gpos[0], future_20_gpos[0]+future_20_gdir[0]],
            [0, 0],
            [future_20_gpos[1], future_20_gpos[1]+future_20_gdir[1]],
            label="future direction 20", c="green"
        )
        axg.legend(bbox_to_anchor=(0, 1), loc="upper left", borderaxespad=0)
        
        # character space viewer settings
        axc.cla()
        axc.set_title("character")
        axc.grid(axis="y")
        axc.set_yticklabels([])
        axc.yaxis.pane.set_facecolor("gray")
        axc.yaxis.set_major_locator(ticker.NullLocator())
        axc.yaxis.set_minor_locator(ticker.NullLocator())
        axc.view_init(elev=120, azim=-90)
        axc.set_xlim3d(-3, 3)
        axc.set_ylim3d(0, 3)
        axc.set_zlim3d(-3, 3)
        axc.set_box_aspect((1,0.5,1))
        axc.scatter(cposes[:, 0], cposes[:, 1], cposes[:, 2], c="red", s=3, label="joints")
        for i, parent in enumerate(parents):
            if parent != -1:
                axc.plot(
                    [cposes[parent,0], cposes[i,0]],
                    [cposes[parent,1], cposes[i,1]],
                    [cposes[parent,2], cposes[i,2]], 
                    c="red", alpha=0.8
                )
        axc.plot([0, 0], [0, 0], [0, 1], label="root direction", c="blue")
        axc.plot(
            [future_20_cpos[0], future_20_cpos[0]+future_20_cdir[0]],
            [0, 0],
            [future_20_cpos[1], future_20_cpos[1]+future_20_cdir[1]],
            label="future direction 20", c="green"
        )
        
        # features bar settings
        axf.cla()
        axf.set_title("matching features")
        axf.grid(axis="x")
        axf.set_xlim(-bar_lim, bar_lim)
        axf.barh(np.arange(len(features)), features)
        
        fig.suptitle("frame: {}".format(index+1))
        sys.stdout.write("processed frame: {}\n".format(index+1))
        
    ani = FuncAnimation(fig, update, frames=np.arange(len(db)), interval=1000/anim.fps, repeat=False)
    
    if save_fig:
        fig_path = (BASEPATH / "figs" / bvh_file).with_suffix(".gif")
        ani.save(fig_path)
        sys.stdout.write("Saved figure as " + str(fig_path) + "\n")
    else:
        plt.show()

================================================
FILE: test/plotting_cspace.py
================================================
from __future__ import annotations

import sys
from pathlib import Path
import numpy as np
import matplotlib.ticker as ticker
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation

BASEPATH = Path(__file__).resolve().parent.parent
sys.path.append(str(BASEPATH))

from anim import bvh

if __name__ == "__main__":
    bvh_folder = BASEPATH / "data"
    bvh_file = "sim_motion.bvh"
    start, end = None, None
    elev, azim = 120, -90
    save_fig = False
    
    anim = bvh.load(bvh_folder / bvh_file, start, end)
    poss = anim.cpos / 100
    parents = anim.parents
    
    future_20_poss = anim.future_traj_poss(20, cspace=True)
    future_20_dirs = anim.future_traj_dirs(20, cspace=True)
    
    fig = plt.figure(figsize=(7, 7))
    ax = fig.add_subplot(111, projection="3d")
    
    def update(index: int):
        poses = poss[index]
        
        future_20_pos = future_20_poss[index] / 100
        future_20_dir = future_20_dirs[index]
        
        ax.cla()
        ax.set_title("character") 
        ax.grid(axis="y")
        ax.set_yticklabels([])
        ax.yaxis.pane.set_facecolor("gray")
        ax.yaxis.set_major_locator(ticker.NullLocator())
        ax.yaxis.set_minor_locator(ticker.NullLocator())
        ax.view_init(elev=elev, azim=azim)
        ax.set_xlim3d(-3, 3)
        ax.set_ylim3d(0, 3)
        ax.set_zlim3d(-3, 3)
        ax.set_box_aspect((2, 1, 2))
        
        ax.scatter(poses[:, 0], poses[:, 1], poses[:, 2], c="red", label="joints")
        for i, parent in enumerate(parents):
            if parent != -1:
                ax.plot(
                    [poses[parent,0], poses[i,0]],
                    [poses[parent,1], poses[i,1]],
                    [poses[parent,2], poses[i,2]], 
                    c="red", alpha=0.8
                )
        ax.plot(
            [future_20_pos[0], future_20_pos[0]+future_20_dir[0]],
            [0, 0],
            [future_20_pos[1], future_20_pos[1]+future_20_dir[1]],
            label="20 future direction", c="green"
        )
        ax.legend()
        fig.suptitle("frame: {}".format(index+1))
        sys.stdout.write("processed frame: {}\n".format(index+1))
    ani = FuncAnimation(fig, update, frames=np.arange(len(anim)), interval=1000/anim.fps, repeat=True)
    
    if save_fig:
        fig_path = (BASEPATH / "figs" / bvh_file).with_suffix(".gif")
        ani.save(fig_path)
    else:
        plt.show()

================================================
FILE: test/plotting_global.py
================================================
from __future__ import annotations

import sys
from pathlib import Path
import numpy as np
import matplotlib.ticker as ticker
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation

BASEPATH = Path(__file__).resolve().parent.parent
sys.path.append(str(BASEPATH))

from anim import bvh

if __name__ == "__main__":
    bvh_folder = BASEPATH / "data"
    bvh_file = "sim_motion.bvh"
    start, end = None, None
    elev, azim = 120, -90
    save_fig = False
    
    anim = bvh.load(bvh_folder / bvh_file, start, end)
    poss = anim.gpos / 100
    parents = anim.parents
    
    proj_root_poss = anim.proj_root_pos(remove_vertical=True)
    root_dirs = anim.root_direction(remove_vertical=True)
    future_20_poss = anim.future_traj_poss(20, cspace=False)
    future_20_dirs = anim.future_traj_dirs(20, cspace=False)
    
    fig = plt.figure(figsize=(7, 7))
    ax = fig.add_subplot(111, projection="3d")
    
    def update(index: int):
        poses = poss[index]
        
        # root direction
        proj_root_pos = proj_root_poss[index] / 100
        root_dir = root_dirs[index]
        
        # future root direction
        future_20_pos = future_20_poss[index] / 100
        future_20_dir = future_20_dirs[index]
        
        # plot settings
        ax.cla()
        ax.set_title("global") 
        ax.grid(axis="y")
        ax.set_yticklabels([])
        ax.yaxis.pane.set_facecolor("gray")
        ax.yaxis.set_major_locator(ticker.NullLocator())
        ax.yaxis.set_minor_locator(ticker.NullLocator())
        ax.view_init(elev=elev, azim=azim)
        ax.set_xlim3d(proj_root_pos[0]-3, proj_root_pos[0]+3)
        ax.set_ylim3d(0, 3)
        ax.set_zlim3d(proj_root_pos[1]-3, proj_root_pos[1]+3)
        ax.set_box_aspect((2, 1, 2))
        
        ax.scatter(poses[:, 0], poses[:, 1], poses[:, 2], c="red", label="joints")
        for i, parent in enumerate(parents):
            if parent != -1:
                ax.plot(
                    [poses[parent,0], poses[i,0]],
                    [poses[parent,1], poses[i,1]],
                    [poses[parent,2], poses[i,2]], 
                    c="red", alpha=0.8
                )
        ax.plot(
            [proj_root_pos[0], proj_root_pos[0]+root_dir[0]],
            [0, 0],
            [proj_root_pos[1], proj_root_pos[1]+root_dir[1]],
            label="root direction", c="blue"
        )
        ax.plot(
            [future_20_pos[0], future_20_pos[0]+future_20_dir[0]],
            [0, 0],
            [future_20_pos[1], future_20_pos[1]+future_20_dir[1]],
            label="20 future direction", c="green"
        )
        ax.legend()
        fig.suptitle("frame: {}".format(index+1))
        sys.stdout.write("Processed frame: {}\n".format(index+1))
    ani = FuncAnimation(fig, update, frames=np.arange(len(anim)), interval=1000/anim.fps, repeat=True)
    
    if save_fig:
        fig_path = (BASEPATH / "figs" / bvh_file).with_suffix(".gif")
        ani.save(fig_path)
        sys.stdout.write("Saved figure as " + str(fig_path) + "\n")
    else:
        plt.show()

================================================
FILE: todo.md
================================================
# TODO Works
## Motion Matching
- [x] 実装したマッチングアルゴリズムが実時間上で動くことを確認(複数回) - 現状1回あたり0.33s 
- [ ] FAISSを用いたGPUマッチングの実装 - faiss-gpuがAnacondaからインストールできない?
- [ ] [`anim/pose.py`](anim/pose.py)に`Pose`クラスを実装。関節位置などを保存する入れ子とする。
- [ ] 予め与えられたカーブからのアニメーション作成(オフライン、事前にsimulation boneの位置と向きを配列(T, pos + dir)に保存し、それに沿うようなアニメーションを作成する)
- [ ] キーボード入力に対応
- [ ] シミュレーションの作成(キーボードに対応させてsimulation boneを移動、回転)
- [ ] 将来の軌道カーブの作成(キーボード入力から作成)
- [ ] グラフィックスインターフェイスの用意(PyOpenGL or Taichi?)
- [ ] InertializationやIKを無視した、単なるポーズ再生の切り替えで動くことを確認(リアルタイム)
- [ ] ダンピングアルゴリズムの実装([`anim/blend.py`](anim/blend.py))
- [ ] FootIKを[`Animation`](anim/animation.py)に取り入れる
- [ ] FootIKや Inertializationによる自然なアニメーションの作成(リアルタイム)
- [ ] AMASSデータも使えるようにする

## その他
- [ ] Motion Blendの完全な実装
- [ ] Motion Graphsの作成(MMの機能を一部コピー)
- [ ] FullbodyIKを[`Animation`](anim/animation.py)に取り入れる(Jacobian and FABRIK)。
- [ ] Learned Motion Matchingの追実装
- [ ] Crowd animationの方策を検討
- [ ] LBSの実装


================================================
FILE: update.md
================================================
## Update 1.
1. Add reading and writing of positions by bvh.
2. Specifies forward and vertical direction.
3. Add foot contact detection.

================================================
FILE: util/dualquat.py
================================================
from __future__ import annotations

import numpy as np
from util import quat

def eye(shape: list[int], dtype=np.float32) -> np.ndarray:
    return np.ones(list(shape) + [8], dtype=dtype) * \
        np.asarray([1, 0, 0, 0, 0, 0, 0, 0], dtype=dtype)

def normalize(dq: np.ndarray, eps=1e-8) -> np.ndarray:
    mag = quat.length(dq[...,:4])
    mag = np.where(mag==0, eps, mag)
    return dq / mag[None]

def abs(dq: np.ndarray) -> np.ndarray:
    real = np.where(dq[...,0:1] > 0.0, dq[...,:4], -dq[...,:4])
    dual = np.where(dq[...,4:5] > 0.0, dq[...,4:], -dq[...,4:])
    return np.concatenate([real, dual], axis=-1)

def inv(dq: np.ndarray) -> np.ndarray:
    real = quat.inv(dq[...,:4])
    dual = -quat.mul_inv(quat.inv_mul(dq[...,:4], dq[...,4:]), dq[...,:4])
    return np.concatenate([real, dual], axis=-1)

def mul(x: np.ndarray, y: np.ndarray) -> np.ndarray:
    real = quat.mul(x[...,:4], y[...,:4])
    dual = quat.mul(x[...,:4], y[...,4:]) + quat.mul(x[...,4:], y[...,:4])
    return np.concatenate([real, dual], axis=-1)

def from_trans(trans: np.ndarray) -> np.ndarray:
    dual = np.zeros(trans.shape[:-1] + (4,))
    dual[...,1:] = trans * 0.5
    return np.concatenate([quat.eye(trans.shape[:,-1]), dual], axis=-1)

def from_rot(rot: np.ndarray) -> np.ndarray:
    return np.concatenate([rot, np.zeros(rot.shape[:-1] + (4,))], axis=-1)

def from_rot_and_trans(rot: np.ndarray, trans: np.ndarray) -> np.ndarray:
    rot = quat.normalize(rot)
    dual = np.zeros(trans.shape[:-1] + (4,))
    dual[...,1:] = trans
    dual = quat.mul(dual, rot) * 0.5
    return np.concatenate([rot, dual], axis=-1)

def to_trans(dq: np.ndarray) -> np.ndarray:
    return 2 * quat.mul_inv(dq[...,4:], dq[...,:4])[...,1:]

def to_rot(dq: np.ndarray) -> np.ndarray:
    return dq[...,:4]

def fk(dq: np.ndarray, parents: list[int]) -> np.ndarray:
    gdq = [dq[...,:1,:]]
    for i in range(1, len(parents)):
        gdq.append(mul(gdq[parents[i]], dq[...,i:i+1,:]))
    return np.concatenate(gdq, axis=-2)

================================================
FILE: util/load.py
================================================
"""
load.py
"""
from __future__ import annotations

from pathlib import Path
import toml, yaml, pickle, json
from easydict import EasyDict

def toml_load(path: Path):
    with open(path, "r") as f:
        cfg = toml.load(f)
    cfg = EasyDict(cfg)
    return cfg

def yaml_load(path: Path):
    with open(path, "r") as f:
        cfg = yaml.load(f, Loader=yaml.Loader)
    cfg = EasyDict(cfg)
    return cfg

def pickle_load(path: Path, encoding: str="ASCII"):
    with open(path, "rb") as f:
        cfg = pickle.load(f, encoding=encoding)
    return cfg

def json_load(path: Path):
    with open(path, "r") as f:
        cfg = json.loads(f.read())
    cfg = EasyDict(cfg)
    return cfg

================================================
FILE: util/quat.py
================================================
from __future__ import annotations

import numpy as np

# Calculate cross object of two 3D vectors.
def _fast_cross(a, b):
    return np.concatenate([
        a[...,1:2]*b[...,2:3] - a[...,2:3]*b[...,1:2],
        a[...,2:3]*b[...,0:1] - a[...,0:1]*b[...,2:3],
        a[...,0:1]*b[...,1:2] - a[...,1:2]*b[...,0:1]], axis=-1)

# Make origin quaternions (No rotations).
def eye(shape, dtype=np.float32):
    return np.ones(list(shape) + [4], dtype=dtype) * np.asarray([1, 0, 0, 0], dtype=dtype)

# Return norm of quaternions.
def length(x):
    return np.sqrt(np.sum(x * x, axis=-1))

# Make unit quaternions.
def normalize(x, eps=1e-8):
    return x / (length(x)[...,None] + eps)

def abs(x):
    return np.where(x[...,0:1] > 0.0, x, -x)

# Calculate inverse rotations.
def inv(q):
    return np.array([1, -1, -1, -1], dtype=np.float32) * q

# Calculate the dot product of two quaternions.
def dot(x, y):
    return np.sum(x * y, axis=-1)[...,None] if x.ndim > 1 else np.sum(x * y, axis=-1)

# Multiply two quaternions (return rotations).
def mul(x, y):
    x0, x1, x2, x3 = x[..., 0:1], x[..., 1:2], x[..., 2:3], x[..., 3:4]
    y0, y1, y2, y3 = y[..., 0:1], y[..., 1:2], y[..., 2:3], y[..., 3:4]

    return np.concatenate([
        y0 * x0 - y1 * x1 - y2 * x2 - y3 * x3,
        y0 * x1 + y1 * x0 - y2 * x3 + y3 * x2,
        y0 * x2 + y1 * x3 + y2 * x0 - y3 * x1,
        y0 * x3 - y1 * x2 + y2 * x1 + y3 * x0], axis=-1)

def inv_mul(x, y):
    return mul(inv(x), y)

def mul_inv(x, y):
    return mul(x, inv(y))

# Multiply quaternions and vectors (return vectors).
def mul_vec(q, x):
    t = 2.0 * _fast_cross(q[..., 1:], x)
    return x + q[..., 0][..., None] * t + _fast_cross(q[..., 1:], t)

def inv_mul_vec(q, x):
    return mul_vec(inv(q), x)

def unroll(x):
    y = x.copy()
    for i in range(1, len(x)):
        d0 = np.sum( y[i] * y[i-1], axis=-1)
        d1 = np.sum(-y[i] * y[i-1], axis=-1)
        y[i][d0 < d1] = -y[i][d0 < d1]
    return y

# Calculate quaternions between two unit 3D vectors (x to y).
def between(x, y):
    return np.concatenate([
        np.sqrt(np.sum(x*x, axis=-1) * np.sum(y*y, axis=-1))[...,None] + 
        np.sum(x * y, axis=-1)[...,None], 
        _fast_cross(x, y)], axis=-1)

def log(x, eps=1e-5):
    length = np.sqrt(np.sum(np.square(x[...,1:]), axis=-1))[...,None]
    halfangle = np.where(length < eps, np.ones_like(length), np.arctan2(length, x[...,0:1]) / length)
    return halfangle * x[...,1:]
    
def exp(x, eps=1e-5):
    halfangle = np.sqrt(np.sum(np.square(x), axis=-1))[...,None]
    c = np.where(halfangle < eps, np.ones_like(halfangle), np.cos(halfangle))
    s = np.where(halfangle < eps, np.ones_like(halfangle), np.sinc(halfangle / np.pi))
    return np.concatenate([c, s * x], axis=-1)

# Calculate global space rotations and positions from local space.
def fk(lrot, lpos, parents):
    
    gp, gr = [lpos[...,:1,:]], [lrot[...,:1,:]]
    for i in range(1, len(parents)):
        gp.append(mul_vec(gr[parents[i]], lpos[...,i:i+1,:]) + gp[parents[i]])
        gr.append(mul    (gr[parents[i]], lrot[...,i:i+1,:]))
        
    return np.concatenate(gr, axis=-2), np.concatenate(gp, axis=-2)

def fk_rot(lrot, parents):
    
    gr = [lrot[...,:1,:]]
    for i in range(1, len(parents)):
        gr.append(mul(gr[parents[i]], lrot[...,i:i+1,:]))
        
    return np.concatenate(gr, axis=-2)

def fk_vel(lrot, lpos, lvel, lang, parents):
    
    gp, gr, gv, ga = [lpos[...,:1,:]], [lrot[...,:1,:]], [lvel[...,:1,:]], [lang[...,:1,:]]
    for i in range(1, len(parents)):
        gp.append(mul_vec(gr[parents[i]], lpos[...,i:i+1,:]) + gp[parents[i]])
        gr.append(mul    (gr[parents[i]], lrot[...,i:i+1,:]))
        gv.append(mul_vec(gr[parents[i]], lvel[...,i:i+1,:]) + 
            _fast_cross(ga[parents[i]], mul_vec(gr[parents[i]], lpos[...,i:i+1,:])) +
            gv[parents[i]])
        ga.append(mul_vec(gr[parents[i]], lang[...,i:i+1,:]) + ga[parents[i]])
        
    return (
        np.concatenate(gr, axis=-2), 
        np.concatenate(gp, axis=-2),
        np.concatenate(gv, axis=-2),
        np.concatenate(ga, axis=-2))

# Calculate local space rotations and positions from global space.
def ik(grot, gpos, parents):
    
    return (
        np.concatenate([
            grot[...,:1,:],
            mul(inv(grot[...,parents[1:],:]), grot[...,1:,:]),
        ], axis=-2),
        np.concatenate([
            gpos[...,:1,:],
            mul_vec(
                inv(grot[...,parents[1:],:]),
                gpos[...,1:,:] - gpos[...,parents[1:],:]),
        ], axis=-2))

def ik_rot(grot, parents):
    
    return np.concatenate([grot[...,:1,:], mul(inv(grot[...,parents[1:],:]), grot[...,1:,:]),
        ], axis=-2)

# ================================================================
#  Conversion from quaternions to other rotation representations.
# ================================================================
# Calculate axis-angle from  quaternions.
# This function is based on ACTOR
# https://github.com/Mathux/ACTOR/blob/d3b0afe674e01fa2b65c89784816c3435df0a9a5/src/utils/rotation_conversions.py#L481
def to_axis_angle(x, eps=1e-5):
    norm = np.linalg.norm(x[...,1:],axis=-1,keepdims=True)
    half_angle = np.arctan2(norm, x[...,:1])
    angle = 2 * half_angle
    small_angle = np.abs(angle) < eps
    sin_half_angle_over_angle = np.empty_like(angle)
    sin_half_angle_over_angle[~small_angle] = (
        np.sin(half_angle[~small_angle]) / angle[~small_angle]
    )
    # for x small, sin(x/2) is about x/2 - (x/2)^3/6
    # so sin(x/2)/x is about 1/2 - (x*x)/48
    sin_half_angle_over_angle[small_angle] = (
        0.5 - (angle[small_angle] * angle[small_angle]) / 48
    )
    return x[..., 1:] / sin_half_angle_over_angle

# Calculate euler angles from quaternions.(!Under construction.)
def to_euler(x, order='zyx'):
    x0, x1, x2, x3 = x[..., 0:1], x[..., 1:2], x[..., 2:3], x[..., 3:4]

    if order == 'zyx':
        return np.concatenate([
            np.arctan2(2.0 * (x0 * x3 + x1 * x2), 1.0 - 2.0 * (x2 * x2 + x3 * x3)),
            np.arcsin(np.clip(2.0 * (x0 * x2 - x3 * x1), -1.0, 1.0)),
            np.arctan2(2.0 * (x0 * x1 + x2 * x3), 1.0 - 2.0 * (x1 * x1 + x2 * x2)),
        ], axis=-1)
    elif order == 'xzy':
        return np.concatenate([
            np.arctan2(2.0 * (x1 * x0 - x2 * x3), -x1 * x1 + x2 * x2 - x3 * x3 + x0 * x0),
            np.arctan2(2.0 * (x2 * x0 - x1 * x3), x1 * x1 - x2 * x2 - x3 * x3 + x0 * x0),
            np.arcsin(np.clip(2.0 * (x1 * x2 + x3 * x0), -1.0, 1.0))
        ], axis=-1)
    else:
        raise NotImplementedError('Cannot convert to ordering %s' % order)

# Calculate rotation matrix from quaternions.
def to_xform(x):

    qw, qx, qy, qz = x[...,0:1], x[...,1:2], x[...,2:3], x[...,3:4]
    
    x2, y2, z2 = qx + qx, qy + qy, qz + qz
    xx, yy, wx = qx * x2, qy * y2, qw * x2
    xy, yz, wy = qx * y2, qy * z2, qw * y2
    xz, zz, wz = qx * z2, qz * z2, qw * z2
    
    return np.concatenate([
        np.concatenate([1.0 - (yy + zz), xy - wz, xz + wy], axis=-1)[...,None,:],
        np.concatenate([xy + wz, 1.0 - (xx + zz), yz - wx], axis=-1)[...,None,:],
        np.concatenate([xz - wy, yz + wx, 1.0 - (xx + yy)], axis=-1)[...,None,:],
    ], axis=-2)

# Calculate 6d orthogonal rotation representation (ortho6d) from quaternions.
# https://github.com/papagina/RotationContinuity
def to_xform_xy(x):

    qw, qx, qy, qz = x[...,0:1], x[...,1:2], x[...,2:3], x[...,3:4]
    
    x2, y2, z2 = qx + qx, qy + qy, qz + qz
    xx, yy, wx = qx * x2, qy * y2, qw * x2
    xy, yz, wy = qx * y2, qy * z2, qw * y2
    xz, zz, wz = qx * z2, qz * z2, qw * z2
    
    return np.concatenate([
        np.concatenate([1.0 - (yy + zz), xy - wz], axis=-1)[...,None,:],
        np.concatenate([xy + wz, 1.0 - (xx + zz)], axis=-1)[...,None,:],
        np.concatenate([xz - wy, yz + wx], axis=-1)[...,None,:],
    ], axis=-2)

# Calculate scaled angle axis from quaternions.
def to_scaled_angle_axis(x, eps=1e-5):
    return 2.0 * log(x, eps)


# ================================================================
#  Conversion from other rotation representations to quaternions.
# ================================================================

# Calculate quaternions from axis and angle.
def from_angle_axis(angle, axis):
    c = np.cos(angle / 2.0)[..., None]
    s = np.sin(angle / 2.0)[..., None]
    q = np.concatenate([c, s * axis], axis=-1)
    return q

# Calculate quaternions from axis-angle.
def from_axis_angle(rots, eps=1e-5):
    angle = np.linalg.norm(rots, axis=-1)
    angle = np.where(angle==0, eps, angle)
    axis = rots / angle[...,None]
    return from_angle_axis(angle, axis)

# Calculate quaternions from euler angles.
def from_euler(e, order='zyx', mode="degree"):
    if mode=="degree":
        e = np.deg2rad(e)
    axis = {
        'x': np.asarray([1, 0, 0], dtype=np.float32),
        'y': np.asarray([0, 1, 0], dtype=np.float32),
        'z': np.asarray([0, 0, 1], dtype=np.float32)}

    q0 = from_angle_axis(e[..., 0], axis[order[0]])
    q1 = from_angle_axis(e[..., 1], axis[order[1]])
    q2 = from_angle_axis(e[..., 2], axis[order[2]])

    return mul(q0, mul(q1, q2))

# Calculate quaternions from rotation matrix.
def from_xform(ts):
    
    return normalize(
        np.where((ts[...,2,2] < 0.0)[...,None],
            np.where((ts[...,0,0] >  ts[...,1,1])[...,None],
                np.concatenate([
                    (ts[...,2,1]-ts[...,1,2])[...,None], 
                    (1.0 + ts[...,0,0] - ts[...,1,1] - ts[...,2,2])[...,None], 
                    (ts[...,1,0]+ts[...,0,1])[...,None], 
                    (ts[...,0,2]+ts[...,2,0])[...,None]], axis=-1),
                np.concatenate([
                    (ts[...,0,2]-ts[...,2,0])[...,None], 
                    (ts[...,1,0]+ts[...,0,1])[...,None], 
                    (1.0 - ts[...,0,0] + ts[...,1,1] - ts[...,2,2])[...,None], 
                    (ts[...,2,1]+ts[...,1,2])[...,None]], axis=-1)),
            np.where((ts[...,0,0] < -ts[...,1,1])[...,None],
                np.concatenate([
                    (ts[...,1,0]-ts[...,0,1])[...,None], 
                    (ts[...,0,2]+ts[...,2,0])[...,None], 
                    (ts[...,2,1]+ts[...,1,2])[...,None], 
                    (1.0 - ts[...,0,0] - ts[...,1,1] + ts[...,2,2])[...,None]], axis=-1),
                np.concatenate([
                    (1.0 + ts[...,0,0] + ts[...,1,1] + ts[...,2,2])[...,None], 
                    (ts[...,2,1]-ts[...,1,2])[...,None], 
                    (ts[...,0,2]-ts[...,2,0])[...,None], 
                    (ts[...,1,0]-ts[...,0,1])[...,None]], axis=-1))))

# Calculate quaternions from ortho6d.
def from_xform_xy(x):

    c2 = _fast_cross(x[...,0], x[...,1])
    c2 = c2 / np.sqrt(np.sum(np.square(c2), axis=-1))[...,None]
    c1 = _fast_cross(c2, x[...,0])
    c1 = c1 / np.sqrt(np.sum(np.square(c1), axis=-1))[...,None]
    c0 = x[...,0]
    
    return from_xform(np.concatenate([
        c0[...,None], 
        c1[...,None], 
        c2[...,None]], axis=-1))

# Calculate quaternions from scaled angle axis.
def from_scaled_angle_axis(x, eps=1e-5):
    return exp(x / 2.0, eps)
Download .txt
gitextract_5a6wl_uk/

├── .gitignore
├── .vscode/
│   └── settings.json
├── LICENSE
├── README.md
├── anim/
│   ├── aistpp.py
│   ├── amass.py
│   ├── animation.py
│   ├── blend.py
│   ├── bvh.py
│   ├── inverse_kinematics/
│   │   ├── ccd_ik.py
│   │   ├── fabrik.py
│   │   ├── jacobi_ik.py
│   │   └── two_bone_ik.py
│   ├── keyframe.py
│   ├── motion_matching/
│   │   ├── database.py
│   │   └── mm.py
│   ├── pose.py
│   ├── skel.py
│   └── smpl.py
├── anitaichi/
│   ├── animation/
│   │   ├── anim.py
│   │   ├── anim_loader/
│   │   │   └── bvh.py
│   │   └── skel.py
│   └── transform/
│       ├── quat.py
│       ├── ti_quat.py
│       ├── ti_vec.py
│       └── vec.py
├── blender/
│   └── fbx2bvh.py
├── configs/
│   ├── DeepLearning/
│   │   └── LMM.toml
│   └── skel_smpl_neutral.npz
├── data/
│   └── data.md
├── model/
│   └── LMM/
│       ├── LMM.md
│       ├── decompressor.py
│       ├── preprocessing.py
│       ├── projector.py
│       ├── setting.py
│       ├── stepper.py
│       ├── test.py
│       └── train.py
├── test/
│   ├── bvh_viewer.py
│   ├── calc_matching_time.py
│   ├── character_controller.py
│   ├── inverse_kinematics.py
│   ├── path_following.py
│   ├── plotting.py
│   ├── plotting_cspace.py
│   └── plotting_global.py
├── todo.md
├── update.md
└── util/
    ├── dualquat.py
    ├── load.py
    └── quat.py
Download .txt
SYMBOL INDEX (276 symbols across 38 files)

FILE: anim/aistpp.py
  function load (line 23) | def load(

FILE: anim/amass.py
  function load (line 24) | def load(

FILE: anim/animation.py
  class Animation (line 17) | class Animation:
    method __init__ (line 18) | def __init__(
    method __len__ (line 44) | def __len__(self) -> int: return len(self.trans)
    method __add__ (line 46) | def __add__(self, other: Animation) -> Animation:
    method __getitem__ (line 57) | def __getitem__(self, index: int | slice) -> Animation:
    method cat (line 79) | def cat(self, other: Animation) -> None:
    method parents (line 89) | def parents(self) -> list[int]:
    method joint_names (line 93) | def joint_names(self) -> list[str]:
    method offsets (line 97) | def offsets(self) -> np.ndarray:
    method grot (line 104) | def grot(self) -> np.ndarray:
    method crot (line 109) | def crot(self) -> np.ndarray:
    method axangs (line 124) | def axangs(self) -> np.ndarray:
    method xforms (line 129) | def xforms(self) -> np.ndarray:
    method ortho6ds (line 134) | def ortho6ds(self) -> np.ndarray:
    method sw_tws (line 138) | def sw_tws(self) -> tuple[np.ndarray, np.ndarray]:
    method set_positions_from_fk (line 184) | def set_positions_from_fk(self) -> None:
    method lpos (line 188) | def lpos(self) -> np.ndarray:
    method gpos (line 194) | def gpos(self) -> np.ndarray:
    method rtpos (line 205) | def rtpos(self) -> np.ndarray:
    method cpos (line 222) | def cpos(self) -> np.ndarray:
    method croot (line 233) | def croot(self, idx: int=None) -> tuple[np.ndarray, np.ndarray]:
    method gposvel (line 254) | def gposvel(self) -> np.ndarray:
    method cposvel (line 262) | def cposvel(self) -> np.ndarray:
    method lrotvel (line 270) | def lrotvel(self) -> np.ndarray:
    method local_transform (line 284) | def local_transform(self) -> np.ndarray:
    method global_transform (line 294) | def global_transform(self) -> np.ndarray:
    method local_dualquat (line 308) | def local_dualquat(self) -> np.ndarray:
    method global_dualquat (line 312) | def global_dualquat(self) -> np.ndarray:
    method proj_root_pos (line 318) | def proj_root_pos(self, remove_vertical: bool=False) -> np.ndarray:
    method proj_root_rot (line 334) | def proj_root_rot(self) -> np.ndarray:
    method root_direction (line 341) | def root_direction(self, remove_vertical: bool=False) -> np.ndarray:
    method future_traj_poss (line 365) | def future_traj_poss(self, frame: int, remove_vertical: bool=True, csp...
    method future_traj_dirs (line 391) | def future_traj_dirs(self, frame: int, remove_vertical: bool=True, csp...
    method clamp_future_idxs (line 417) | def clamp_future_idxs(self, offset: int) -> np.ndarray:
    method calc_foot_contact (line 429) | def calc_foot_contact(
    method mirror (line 470) | def mirror(self, dataset: str=None) -> Animation:
    method no_animation (line 504) | def no_animation(
  function mirror_rot_trans (line 516) | def mirror_rot_trans(
  function animation_mirror (line 540) | def animation_mirror(lrot, lpos, names, parents):

FILE: anim/blend.py
  function lerp (line 12) | def lerp(x, y, t):
  function quat_lerp (line 16) | def quat_lerp(x: np.ndarray, y: np.ndarray, t: float) -> np.ndarray:
  function slerp (line 21) | def slerp(x: np.ndarray, y: np.ndarray, t: float) -> np.ndarray:

FILE: anim/bvh.py
  function load (line 16) | def load(
  function load_hierarchy (line 78) | def load_hierarchy(
  function load_motion (line 141) | def load_motion(
  function save (line 185) | def save(
  function save_hierarchy (line 210) | def save_hierarchy(
  function save_motion (line 262) | def save_motion(

FILE: anim/inverse_kinematics/ccd_ik.py
  function normalize_vector (line 15) | def normalize_vector(vector: np.ndarray):
  function simple_ccd_ik (line 21) | def simple_ccd_ik(

FILE: anim/inverse_kinematics/fabrik.py
  function normalize_vector (line 15) | def normalize_vector(vector: np.ndarray):
  function backward_reaching (line 20) | def backward_reaching(
  function forward_reaching (line 54) | def forward_reaching(
  function simple_fabrik (line 85) | def simple_fabrik(

FILE: anim/inverse_kinematics/jacobi_ik.py
  function simple_jacobi_ik (line 15) | def simple_jacobi_ik(

FILE: anim/inverse_kinematics/two_bone_ik.py
  function normalize (line 15) | def normalize(tensor: np.ndarray, axis:int=-1) -> np.ndarray:
  function two_bone_ik (line 26) | def two_bone_ik(

FILE: anim/keyframe.py
  class KeyFrame (line 9) | class KeyFrame:
  class KeyFrameAnimation (line 17) | class KeyFrameAnimation:
    method __init__ (line 18) | def __init__(

FILE: anim/motion_matching/database.py
  class Database (line 10) | class Database(Animation):
    method __init__ (line 11) | def __init__(
    method __len__ (line 48) | def __len__(self) -> int: return len(self.trans)
    method __add__ (line 50) | def __add__(self, other: Database) -> Database:
    method cat (line 71) | def cat(self, other: Database) -> None:
    method gposvel (line 83) | def gposvel(self) -> np.ndarray:
    method cposvel (line 92) | def cposvel(self) -> np.ndarray:
    method lrotvel (line 101) | def lrotvel(self) -> np.ndarray:
    method clamp_future_idxs (line 117) | def clamp_future_idxs(self, offset: int) -> np.ndarray:
  class MatchingDatabase (line 128) | class MatchingDatabase:
    method __len__ (line 145) | def __len__(self):

FILE: anim/motion_matching/mm.py
  function normalize_features (line 10) | def normalize_features(features: np.ndarray, weights: np.ndarray, axis=0...
  function normalize_query (line 18) | def normalize_query(query: np.ndarray, means:np.ndarray, stds:np.ndarray...
  function calc_box_distance (line 24) | def calc_box_distance(
  function create_position_features (line 40) | def create_position_features(db: Database) -> np.ndarray:
  function create_velocity_features (line 46) | def create_velocity_features(db: Database) -> np.ndarray:
  function create_traj_features (line 53) | def create_traj_features(db: Database) -> np.ndarray:
  function create_matching_features (line 67) | def create_matching_features(
  function create_matching_aabb (line 108) | def create_matching_aabb(features: np.ndarray, size: int | None) -> tupl...
  function create_matching_database (line 129) | def create_matching_database(
  function motion_matching_search (line 187) | def motion_matching_search(
  function brute_force_search (line 220) | def brute_force_search(
  function aabb_search (line 233) | def aabb_search(
  function kd_search (line 282) | def kd_search(
  function faiss_search (line 294) | def faiss_search(

FILE: anim/pose.py
  class Pose (line 7) | class Pose:
    method __init__ (line 8) | def __init__(
    method forward_kinematics (line 18) | def forward_kinematics(self):
    method set_local_positions (line 27) | def set_local_positions(self):
    method set_global_positions (line 32) | def set_global_positions(self):
    method set_global_rotations (line 35) | def set_global_rotations(self):
    method set_gpos_and_grot (line 38) | def set_gpos_and_grot(self):

FILE: anim/skel.py
  class Joint (line 7) | class Joint:
  function axis_to_vector (line 14) | def axis_to_vector(axis: str):
  class Skel (line 31) | class Skel:
    method __init__ (line 32) | def __init__(
    method __len__ (line 60) | def __len__(self) -> int:
    method __getitem__ (line 63) | def __getitem__(self, index: int | slice | str) -> Joint | list[Joint]:
    method indices (line 67) | def indices(self) -> list[int]:
    method parents (line 71) | def parents(self) -> list[int]:
    method children (line 76) | def children(self) -> dict[int: list[int]]:
    method names (line 87) | def names(self) -> list[str]:
    method offsets (line 92) | def offsets(self) -> np.ndarray:
    method dofs (line 100) | def dofs(self) -> list[int]:
    method joint_depths (line 104) | def joint_depths(self) -> list[int]:
    method bone_lengths (line 115) | def bone_lengths(self) -> np.ndarray:
    method get_joint (line 120) | def get_joint(self, index: int | slice | str) -> Joint | list[Joint]:
    method get_index_from_jname (line 126) | def get_index_from_jname(self, jname: str) -> int:
    method get_children (line 131) | def get_children(
    method get_parent (line 155) | def get_parent(
    method from_names_parents_offsets (line 181) | def from_names_parents_offsets(

FILE: anim/smpl.py
  function load_model (line 266) | def load_model(model_path: Path, gender: str=None):
  function calc_skel_offsets (line 283) | def calc_skel_offsets(

FILE: anitaichi/animation/anim.py
  function forward_kinematics_rotations (line 6) | def forward_kinematics_rotations(
  function forward_kinematics (line 20) | def forward_kinematics(
  function ti_forward_kinematics (line 38) | def ti_forward_kinematics(
  function convert_to_vector_field (line 69) | def convert_to_vector_field(array: np.ndarray):
  class Pose (line 76) | class Pose:
    method __init__ (line 77) | def __init__(
    method local_rotations (line 88) | def local_rotations(self):
    method global_rotations (line 92) | def global_rotations(self):
    method global_positions (line 99) | def global_positions(self):
  class Animation (line 108) | class Animation:
    method __init__ (line 109) | def __init__(
    method __len__ (line 124) | def __len__(self):
    method local_rotations (line 128) | def local_rotations(self):
    method global_rotations (line 132) | def global_rotations(self):
    method global_positions (line 148) | def global_positions(self):
    method ti_fk (line 156) | def ti_fk(self):
    method to_pose (line 166) | def to_pose(self, frame_num: int) -> Pose:
    method to_poses (line 172) | def to_poses(self, frames: slice) -> list[Pose]:

FILE: anitaichi/animation/anim_loader/bvh.py
  function load (line 17) | def load(
  function load_hierarchy (line 72) | def load_hierarchy(
  function load_motion (line 133) | def load_motion(
  function save (line 178) | def save(
  function save_hierarchy (line 201) | def save_hierarchy(
  function save_motion (line 252) | def save_motion(

FILE: anitaichi/animation/skel.py
  class Joint (line 6) | class Joint:
    method __init__ (line 7) | def __init__(self, name: str, index: int, parent: int, offset: list[fl...
  class Skel (line 19) | class Skel:
    method __init__ (line 20) | def __init__(self, joints: list[Joint], skel_name: str=None):
    method __len__ (line 25) | def __len__(self):
    method offsets (line 32) | def offsets(self) -> np.ndarray: # For numpy array
    method offsets_field (line 39) | def offsets_field(self): # For Taichi field
    method parents (line 46) | def parents(self) -> list[int]:
    method parents_field (line 50) | def parents_field(self):

FILE: anitaichi/transform/quat.py
  function eye (line 8) | def eye(shape, dtype=np.float32):
  function length (line 12) | def length(x):
  function normalize (line 16) | def normalize(x, eps=1e-8):
  function abs (line 19) | def abs(x):
  function inv (line 23) | def inv(q):
  function dot (line 27) | def dot(x, y):
  function mul (line 31) | def mul(x, y):
  function inv_mul (line 41) | def inv_mul(x, y):
  function mul_inv (line 44) | def mul_inv(x, y):
  function mul_vec (line 48) | def mul_vec(q, x):
  function inv_mul_vec (line 52) | def inv_mul_vec(q, x):
  function unroll (line 55) | def unroll(x):
  function between (line 64) | def between(x, y):
  function log (line 70) | def log(x, eps=1e-5):
  function exp (line 75) | def exp(x, eps=1e-5):
  function fk (line 82) | def fk(lrot, lpos, parents):
  function fk_rot (line 91) | def fk_rot(lrot, parents):
  function fk_centric (line 100) | def fk_centric(lrot, lpos, parents):
  function ik (line 107) | def ik(grot, gpos, parents):
  function ik_rot (line 121) | def ik_rot(grot, parents):
  function fk_vel (line 126) | def fk_vel(lrot, lpos, lvel, lang, parents):
  function lerp (line 144) | def lerp(x, y, t):
  function quat_lerp (line 148) | def quat_lerp(x, y, t):
  function slerp (line 152) | def slerp(x, y, t):
  function to_axis_angle (line 175) | def to_axis_angle(x, eps=1e-5):
  function to_euler (line 192) | def to_euler(x, order='zyx'):
  function to_xform (line 211) | def to_xform(x):
  function to_xform_xy (line 228) | def to_xform_xy(x):
  function to_scaled_angle_axis (line 244) | def to_scaled_angle_axis(x, eps=1e-5):
  function from_angle_axis (line 253) | def from_angle_axis(angle, axis):
  function from_axis_angle (line 260) | def from_axis_angle(rots):
  function from_euler (line 266) | def from_euler(e, order='zyx', mode="degree"):
  function from_xform (line 281) | def from_xform(ts):
  function from_xform_xy (line 309) | def from_xform_xy(x):
  function from_scaled_angle_axis (line 323) | def from_scaled_angle_axis(x, eps=1e-5):

FILE: anitaichi/transform/ti_quat.py
  function eye (line 7) | def eye():
  function normalize (line 11) | def normalize(q):
  function inv (line 15) | def inv(q):
  function abs (line 19) | def abs(q):
  function mul (line 23) | def mul(x, y):
  function mul_vec3 (line 31) | def mul_vec3(q, v):
  function from_angle_and_axis (line 37) | def from_angle_and_axis(angle, axis):
  function from_euler (line 43) | def from_euler(euler, order):

FILE: anitaichi/transform/ti_vec.py
  function cross (line 5) | def cross(v, w):
  function dot (line 12) | def dot(v, w):
  function length (line 16) | def length(v):
  function normalize (line 20) | def normalize(v):

FILE: anitaichi/transform/vec.py
  function cross (line 6) | def cross(a, b):
  function dot (line 12) | def dot(a, b, keepdims=False):
  function length (line 15) | def length(v, keepdims=False):
  function normalize (line 18) | def normalize(v):

FILE: model/LMM/decompressor.py
  class Compressor (line 6) | class Compressor(nn.Module):
    method __call__ (line 12) | def __call__(self, x):
  class Decompressor (line 23) | class Decompressor(nn.Module):
    method __call__ (line 28) | def __call__(self, x):

FILE: model/LMM/preprocessing.py
  function preprocess_motion_data (line 10) | def preprocess_motion_data(BASEPATH: Path, cfg, save_dir: Path):

FILE: model/LMM/projector.py
  class Projector (line 6) | class Projector(nn.Module):
    method __call__ (line 12) | def __call__(self, x):

FILE: model/LMM/setting.py
  class Config (line 14) | class Config:
    method __init__ (line 15) | def __init__(self, BASEPATH: Path, setting_path: str) -> None:
    method single_gpu_setting (line 75) | def single_gpu_setting(self):
    method multi_gpus_setting (line 78) | def multi_gpus_setting(self):

FILE: model/LMM/stepper.py
  class Stepper (line 6) | class Stepper(nn.Module):
    method __call__ (line 12) | def __call__(self, x):

FILE: model/LMM/train.py
  function main (line 27) | def main(cfg: Config):
  function train_decompressor (line 48) | def train_decompressor(cfg, ckpt_dir):
  function train_stepper (line 51) | def train_stepper(cfg, ckpt_dir):
  function train_projector (line 54) | def train_projector(cfg, ckpt_dir):

FILE: test/bvh_viewer.py
  function get_frame (line 15) | def get_frame(
  function update_camera (line 23) | def update_camera(camera, anim, frame):

FILE: test/calc_matching_time.py
  function create_database (line 13) | def create_database(bvh_dir: Path, files: list, starts: list, ends: list...

FILE: test/character_controller.py
  function create_database (line 14) | def create_database(bvh_dir: Path, files: list, starts: list, ends: list...
  function main (line 23) | def main():

FILE: test/path_following.py
  function create_database (line 18) | def create_database(bvh_dir: Path, files: list, starts: list, ends: list...
  function create_query (line 27) | def create_query(db: Database, idx: int, traj_feats: np.ndarray) -> np.n...
  function create_circle_traj (line 36) | def create_circle_traj():
  function create_animation_from_idxs (line 54) | def create_animation_from_idxs(db: Database, anim_frames: list[int]) -> ...
  function main (line 76) | def main():

FILE: test/plotting.py
  function update (line 51) | def update(index: int):

FILE: test/plotting_cspace.py
  function update (line 32) | def update(index: int):

FILE: test/plotting_global.py
  function update (line 34) | def update(index: int):

FILE: util/dualquat.py
  function eye (line 6) | def eye(shape: list[int], dtype=np.float32) -> np.ndarray:
  function normalize (line 10) | def normalize(dq: np.ndarray, eps=1e-8) -> np.ndarray:
  function abs (line 15) | def abs(dq: np.ndarray) -> np.ndarray:
  function inv (line 20) | def inv(dq: np.ndarray) -> np.ndarray:
  function mul (line 25) | def mul(x: np.ndarray, y: np.ndarray) -> np.ndarray:
  function from_trans (line 30) | def from_trans(trans: np.ndarray) -> np.ndarray:
  function from_rot (line 35) | def from_rot(rot: np.ndarray) -> np.ndarray:
  function from_rot_and_trans (line 38) | def from_rot_and_trans(rot: np.ndarray, trans: np.ndarray) -> np.ndarray:
  function to_trans (line 45) | def to_trans(dq: np.ndarray) -> np.ndarray:
  function to_rot (line 48) | def to_rot(dq: np.ndarray) -> np.ndarray:
  function fk (line 51) | def fk(dq: np.ndarray, parents: list[int]) -> np.ndarray:

FILE: util/load.py
  function toml_load (line 10) | def toml_load(path: Path):
  function yaml_load (line 16) | def yaml_load(path: Path):
  function pickle_load (line 22) | def pickle_load(path: Path, encoding: str="ASCII"):
  function json_load (line 27) | def json_load(path: Path):

FILE: util/quat.py
  function _fast_cross (line 6) | def _fast_cross(a, b):
  function eye (line 13) | def eye(shape, dtype=np.float32):
  function length (line 17) | def length(x):
  function normalize (line 21) | def normalize(x, eps=1e-8):
  function abs (line 24) | def abs(x):
  function inv (line 28) | def inv(q):
  function dot (line 32) | def dot(x, y):
  function mul (line 36) | def mul(x, y):
  function inv_mul (line 46) | def inv_mul(x, y):
  function mul_inv (line 49) | def mul_inv(x, y):
  function mul_vec (line 53) | def mul_vec(q, x):
  function inv_mul_vec (line 57) | def inv_mul_vec(q, x):
  function unroll (line 60) | def unroll(x):
  function between (line 69) | def between(x, y):
  function log (line 75) | def log(x, eps=1e-5):
  function exp (line 80) | def exp(x, eps=1e-5):
  function fk (line 87) | def fk(lrot, lpos, parents):
  function fk_rot (line 96) | def fk_rot(lrot, parents):
  function fk_vel (line 104) | def fk_vel(lrot, lpos, lvel, lang, parents):
  function ik (line 122) | def ik(grot, gpos, parents):
  function ik_rot (line 136) | def ik_rot(grot, parents):
  function to_axis_angle (line 147) | def to_axis_angle(x, eps=1e-5):
  function to_euler (line 164) | def to_euler(x, order='zyx'):
  function to_xform (line 183) | def to_xform(x):
  function to_xform_xy (line 200) | def to_xform_xy(x):
  function to_scaled_angle_axis (line 216) | def to_scaled_angle_axis(x, eps=1e-5):
  function from_angle_axis (line 225) | def from_angle_axis(angle, axis):
  function from_axis_angle (line 232) | def from_axis_angle(rots, eps=1e-5):
  function from_euler (line 239) | def from_euler(e, order='zyx', mode="degree"):
  function from_xform (line 254) | def from_xform(ts):
  function from_xform_xy (line 282) | def from_xform_xy(x):
  function from_scaled_angle_axis (line 296) | def from_scaled_angle_axis(x, eps=1e-5):
Condensed preview — 51 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (181K chars).
[
  {
    "path": ".gitignore",
    "chars": 1900,
    "preview": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n# C extensions\n*.so\n\n# Distribution / packagi"
  },
  {
    "path": ".vscode/settings.json",
    "chars": 1495,
    "preview": "{\n    // \"python.linting.mypyEnabled\": true,\n    \n    \"files.associations\": {\n        \"__bit_reference\": \"cpp\",\n        "
  },
  {
    "path": "LICENSE",
    "chars": 1072,
    "preview": "MIT License\n\nCopyright (c) 2023 Kosuke Fukazawa\n\nPermission is hereby granted, free of charge, to any person obtaining a"
  },
  {
    "path": "README.md",
    "chars": 5389,
    "preview": "<p align=\"center\">\n    <picture>\n    <img alt=\"cat\" src=\"figs/icon.png\" width=512>\n    </picture>\n</p>\n\n<h2 align=\"cente"
  },
  {
    "path": "anim/aistpp.py",
    "chars": 2205,
    "preview": "\"\"\"\naistpp.py\nAIST++ : \n    format: ***.pkl (pickle file)\n    Parameters :\n        \"smpl_loss\" (float): I don't know how"
  },
  {
    "path": "anim/amass.py",
    "chars": 3759,
    "preview": "\"\"\"\namass.py\nAMASS :\n    Format: ***.npz (numpy binary file)\n    Parameters:\n        \"trans\" (np.ndarray): Root translat"
  },
  {
    "path": "anim/animation.py",
    "chars": 19409,
    "preview": "\"\"\"\nanimation.py\n\"\"\"\n# Basic animation class. Can use for \n# deep-learning (pre/post)processing \n# and visualization etc"
  },
  {
    "path": "anim/blend.py",
    "chars": 989,
    "preview": "from __future__ import annotations\n\nimport numpy as np\n\nfrom anim.animation import Animation\nfrom util import quat\n\n# =="
  },
  {
    "path": "anim/bvh.py",
    "chars": 8581,
    "preview": "\"\"\"\nbvh.py\n\"\"\"\n# loading and writing a biovision hierarchy data (BVH).\n\nfrom __future__ import annotations\n\nfrom io impo"
  },
  {
    "path": "anim/inverse_kinematics/ccd_ik.py",
    "chars": 3630,
    "preview": "from __future__ import annotations\n\nimport sys\nfrom pathlib import Path\nROOT = Path(__file__).resolve().parent.parent.pa"
  },
  {
    "path": "anim/inverse_kinematics/fabrik.py",
    "chars": 4998,
    "preview": "from __future__ import annotations\n\nimport sys\nfrom pathlib import Path\nROOT = Path(__file__).resolve().parent.parent.pa"
  },
  {
    "path": "anim/inverse_kinematics/jacobi_ik.py",
    "chars": 2468,
    "preview": "from __future__ import annotations\n\nimport sys\nfrom pathlib import Path\nROOT = Path(__file__).resolve().parent.parent.pa"
  },
  {
    "path": "anim/inverse_kinematics/two_bone_ik.py",
    "chars": 6328,
    "preview": "from __future__ import annotations\n\nimport sys\nfrom pathlib import Path\nROOT = Path(__file__).resolve().parent.parent.pa"
  },
  {
    "path": "anim/keyframe.py",
    "chars": 541,
    "preview": "from __future__ import annotations\n\nfrom dataclasses import dataclass\nimport numpy as np\n\nfrom anim.skel import Skel\n\n@d"
  },
  {
    "path": "anim/motion_matching/database.py",
    "chars": 5213,
    "preview": "from __future__ import annotations\n\nfrom dataclasses import dataclass\nimport numpy as np\nfrom scipy.spatial import KDTre"
  },
  {
    "path": "anim/motion_matching/mm.py",
    "chars": 10901,
    "preview": "from __future__ import annotations\n\nimport sys\nimport numpy as np\nfrom scipy.spatial import KDTree\n# import taichi as ti"
  },
  {
    "path": "anim/pose.py",
    "chars": 1113,
    "preview": "from __future__ import annotations\n\nimport numpy as np\nfrom util import quat\nfrom anim.skel import Skel\n\nclass Pose:\n   "
  },
  {
    "path": "anim/skel.py",
    "chars": 7310,
    "preview": "from __future__ import annotations\n\nfrom dataclasses import dataclass\nimport numpy as np\n\n@dataclass\nclass Joint:\n    na"
  },
  {
    "path": "anim/smpl.py",
    "chars": 6467,
    "preview": "\"\"\"\nsmpl.py\n\nJOINT_NAMES, SMPLH_JOINT_NAMES, SMPL_JOINT_NAMES are from \\\nhttps://github.com/vchoutas/smplx/blob/main/smp"
  },
  {
    "path": "anitaichi/animation/anim.py",
    "chars": 5633,
    "preview": "import numpy as np\nimport taichi as ti\nfrom anitaichi.animation.skel import Skel\nfrom anitaichi.transform import ti_quat"
  },
  {
    "path": "anitaichi/animation/anim_loader/bvh.py",
    "chars": 7825,
    "preview": "\"\"\"\nbvh.py\n\"\"\"\n# loading and writing a biovision hierarchy data (BVH).\n\nfrom __future__ import annotations\n\nfrom io impo"
  },
  {
    "path": "anitaichi/animation/skel.py",
    "chars": 1666,
    "preview": "from __future__ import annotations\n\nimport numpy as np\nimport taichi as ti\n\nclass Joint:\n    def __init__(self, name: st"
  },
  {
    "path": "anitaichi/transform/quat.py",
    "chars": 11586,
    "preview": "from __future__ import annotations\n\nimport numpy as np\n\nfrom anitaichi.transform import vec\n\n# Make origin quaternions ("
  },
  {
    "path": "anitaichi/transform/ti_quat.py",
    "chars": 1377,
    "preview": "import taichi as ti\nimport taichi.math as tm\n\nfrom anitaichi.transform import ti_vec\n\n@ti.func\ndef eye():\n    return ti."
  },
  {
    "path": "anitaichi/transform/ti_vec.py",
    "chars": 426,
    "preview": "import taichi as ti\nimport taichi.math as tm\n\n@ti.func\ndef cross(v, w):\n    out0 = v.y * w.z - v.z * w.y\n    out1 = v.z "
  },
  {
    "path": "anitaichi/transform/vec.py",
    "chars": 642,
    "preview": "from __future__ import annotations\n\nimport numpy as np\n\n# Calculate cross object of two 3D vectors.\ndef cross(a, b):\n   "
  },
  {
    "path": "blender/fbx2bvh.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "configs/DeepLearning/LMM.toml",
    "chars": 1890,
    "preview": "# Common settings\nexp_name = \"original\"\n\n# device settings\ndevice = \"cuda\"\ngpus = [0]\n\n# which model do you train? [\"all"
  },
  {
    "path": "data/data.md",
    "chars": 497,
    "preview": "# data\nPlease place your data here.  \nIn my case, I create symbolic links for datasets as shown below.\n\n```\ndata/\n|_ ais"
  },
  {
    "path": "model/LMM/LMM.md",
    "chars": 448,
    "preview": "<div align=\"center\">\n\n# Learned Motion Matching\n\n[**original repo**](https://github.com/orangeduck/Motion-Matching)  \n[*"
  },
  {
    "path": "model/LMM/decompressor.py",
    "chars": 906,
    "preview": "from __future__ import annotations\n\nimport jax.numpy as jnp\nfrom flax import linen as nn\n\nclass Compressor(nn.Module):\n "
  },
  {
    "path": "model/LMM/preprocessing.py",
    "chars": 446,
    "preview": "from __future__ import annotations\n\nfrom pathlib import Path\nimport pickle\n\nfrom anim import bvh\nfrom anim.animation imp"
  },
  {
    "path": "model/LMM/projector.py",
    "chars": 531,
    "preview": "from __future__ import annotations\n\nimport jax.numpy as jnp\nfrom flax import linen as nn\n\nclass Projector(nn.Module):\n  "
  },
  {
    "path": "model/LMM/setting.py",
    "chars": 2886,
    "preview": "\"\"\"\nsetting.py\nDevice setting etc..\n\"\"\"\nfrom __future__ import annotations\n\nimport sys\nfrom pathlib import Path\nimport s"
  },
  {
    "path": "model/LMM/stepper.py",
    "chars": 525,
    "preview": "from __future__ import annotations\n\nimport jax.numpy as jnp\nfrom flax import linen as nn\n\nclass Stepper(nn.Module):\n    "
  },
  {
    "path": "model/LMM/test.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "model/LMM/train.py",
    "chars": 1779,
    "preview": "\"\"\"\ntrain.py\nYou can define your configulation file like below.\n```\npython train.py configs/DeepLearning/LMM.toml\n```\n\"\""
  },
  {
    "path": "test/bvh_viewer.py",
    "chars": 1515,
    "preview": "from __future__ import annotations\n\nimport sys\nfrom pathlib import Path\n\nimport numpy as np\nimport taichi as ti\n\nBASEPAT"
  },
  {
    "path": "test/calc_matching_time.py",
    "chars": 1350,
    "preview": "# Calculate time for motion matching.\nimport sys\nfrom pathlib import Path\nimport timeit\n\nBASEPATH = Path(__file__).resol"
  },
  {
    "path": "test/character_controller.py",
    "chars": 3045,
    "preview": "# TBD\n\nimport sys\nfrom pathlib import Path\nimport pickle\n\nBASEPATH = Path(__file__).resolve().parent.parent\nsys.path.app"
  },
  {
    "path": "test/inverse_kinematics.py",
    "chars": 0,
    "preview": ""
  },
  {
    "path": "test/path_following.py",
    "chars": 5120,
    "preview": "from __future__ import annotations\n\nimport sys\nfrom pathlib import Path\nimport pickle\nimport numpy as np\n\nBASEPATH = Pat"
  },
  {
    "path": "test/plotting.py",
    "chars": 5458,
    "preview": "from __future__ import annotations\n\nimport sys\nfrom pathlib import Path\nimport numpy as np\nfrom matplotlib.backends.back"
  },
  {
    "path": "test/plotting_cspace.py",
    "chars": 2437,
    "preview": "from __future__ import annotations\n\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport matplotlib.ticker as t"
  },
  {
    "path": "test/plotting_global.py",
    "chars": 3084,
    "preview": "from __future__ import annotations\n\nimport sys\nfrom pathlib import Path\nimport numpy as np\nimport matplotlib.ticker as t"
  },
  {
    "path": "todo.md",
    "chars": 934,
    "preview": "# TODO Works\n## Motion Matching\n- [x] 実装したマッチングアルゴリズムが実時間上で動くことを確認(複数回) - 現状1回あたり0.33s \n- [ ] FAISSを用いたGPUマッチングの実装 - fai"
  },
  {
    "path": "update.md",
    "chars": 136,
    "preview": "## Update 1.\n1. Add reading and writing of positions by bvh.\n2. Specifies forward and vertical direction.\n3. Add foot co"
  },
  {
    "path": "util/dualquat.py",
    "chars": 2003,
    "preview": "from __future__ import annotations\n\nimport numpy as np\nfrom util import quat\n\ndef eye(shape: list[int], dtype=np.float32"
  },
  {
    "path": "util/load.py",
    "chars": 689,
    "preview": "\"\"\"\nload.py\n\"\"\"\nfrom __future__ import annotations\n\nfrom pathlib import Path\nimport toml, yaml, pickle, json\nfrom easydi"
  },
  {
    "path": "util/quat.py",
    "chars": 11152,
    "preview": "from __future__ import annotations\n\nimport numpy as np\n\n# Calculate cross object of two 3D vectors.\ndef _fast_cross(a, b"
  }
]

// ... and 1 more files (download for full content)

About this extraction

This page contains the full source code of the KosukeFukazawa/CharacterAnimationTools GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 51 files (165.8 KB), approximately 50.1k tokens, and a symbol index with 276 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!