master ae14392ea47c cached
189 files
52.4 MB
774.1k tokens
240 symbols
1 requests
Download .txt
Showing preview only (3,094K chars total). Download the full file or copy to clipboard to get everything.
Repository: second-state/WasmEdge-WASINN-examples
Branch: master
Commit: ae14392ea47c
Files: 189
Total size: 52.4 MB

Directory structure:
gitextract_91qb3w5n/

├── .github/
│   └── workflows/
│       ├── build_openvino_mobilenet.yml
│       ├── build_openvino_road_seg_adas.yml
│       ├── build_pytorch_yolo.yml
│       ├── chatTTS.yml
│       ├── llama.yml
│       ├── piper.yml
│       ├── pytorch.yml
│       └── tflite.yml
├── .gitignore
├── LICENSE
├── README.md
├── openvino-mobilenet-image/
│   ├── README.md
│   ├── download_mobilenet.sh
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── imagenet_classes.rs
│   │       └── main.rs
│   └── wasmedge-wasinn-example-mobilenet-image.wasm
├── openvino-mobilenet-raw/
│   ├── README.md
│   ├── download_mobilenet.sh
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── imagenet_classes.rs
│   │       └── main.rs
│   └── wasmedge-wasinn-example-mobilenet.wasm
├── openvino-road-segmentation-adas/
│   ├── README.md
│   ├── model/
│   │   └── road-segmentation-adas-0001.xml
│   ├── openvino-road-seg-adas/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       └── main.rs
│   ├── tensor/
│   │   ├── wasinn-openvino-inference-input-512x896x3xf32-bgr.tensor
│   │   └── wasinn-openvino-inference-output-1x4x512x896xf32.tensor
│   └── visualize_inference_result.ipynb
├── openvinogenai-raw/
│   ├── README.md
│   └── rust/
│       ├── Cargo.toml
│       └── src/
│           └── main.rs
├── pytorch-mobilenet-image/
│   ├── README.md
│   ├── gen_mobilenet_model.py
│   ├── gen_tensor.py
│   ├── image-1x3x224x224.rgb
│   ├── mobilenet.pt
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── imagenet_classes.rs
│   │       ├── main.rs
│   │       └── named_model.rs
│   ├── wasmedge-wasinn-example-mobilenet-image-named-model.wasm
│   └── wasmedge-wasinn-example-mobilenet-image.wasm
├── pytorch-yolo-image/
│   ├── README.md
│   ├── get_model.py
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── main.rs
│   │       └── yolo_classes.rs
│   └── yolov8n.torchscript
├── scripts/
│   ├── install_libtorch.sh
│   └── install_openvino.sh
├── tflite-birds_v1-image/
│   ├── README.md
│   ├── lite-model_aiy_vision_classifier_birds_V1_3.tflite
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── imagenet_classes.rs
│   │       └── main.rs
│   └── wasmedge-wasinn-example-tflite-bird-image.wasm
├── wasmedge-chatTTS/
│   ├── .gitignore
│   ├── Cargo.toml
│   ├── README.md
│   ├── assets/
│   │   └── demo.webm
│   ├── src/
│   │   └── main.rs
│   └── wasmedge-chattts.wasm
├── wasmedge-ggml/
│   ├── README.md
│   ├── basic/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-basic.wasm
│   ├── chatml/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-chatml.wasm
│   ├── command-r/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-command-r.wasm
│   ├── embedding/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llama-embedding.wasm
│   ├── gemma/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-gemma.wasm
│   ├── gemma-3/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   ├── base64.rs
│   │   │   └── main.rs
│   │   ├── wasmedge-ggml-gemma-3-base64.wasm
│   │   └── wasmedge-ggml-gemma-3.wasm
│   ├── gemma-4/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-gemma-4.wasm
│   ├── grammar/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-grammar.wasm
│   ├── json-schema/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-json-schema.wasm
│   ├── llama/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llama.wasm
│   ├── llama-stream/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llama-stream.wasm
│   ├── llava/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llava.wasm
│   ├── llava-base64-stream/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llava-base64-stream.wasm
│   ├── multimodel/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-multimodel.wasm
│   ├── nnrpc/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-nnrpc.wasm
│   ├── qwen/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   └── src/
│   │       └── main.rs
│   ├── qwen2vl/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-qwen2vl.wasm
│   ├── test/
│   │   ├── model-not-found/
│   │   │   ├── Cargo.toml
│   │   │   ├── README.md
│   │   │   ├── src/
│   │   │   │   └── main.rs
│   │   │   └── wasmedge-ggml-model-not-found.wasm
│   │   ├── phi-3/
│   │   │   ├── Cargo.toml
│   │   │   ├── README.md
│   │   │   ├── src/
│   │   │   │   └── main.rs
│   │   │   └── wasmedge-ggml-phi-3.wasm
│   │   ├── set-input-twice/
│   │   │   ├── Cargo.toml
│   │   │   ├── README.md
│   │   │   ├── src/
│   │   │   │   └── main.rs
│   │   │   └── wasmedge-ggml-set-input-twice.wasm
│   │   └── unload/
│   │       ├── Cargo.toml
│   │       ├── README.md
│   │       ├── src/
│   │       │   └── main.rs
│   │       └── wasmedge-ggml-unload.wasm
│   ├── tts/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-tts.wasm
│   └── whisper/
│       ├── Cargo.toml
│       ├── README.md
│       ├── src/
│       │   └── main.rs
│       └── whisper-basic.wasm
├── wasmedge-mlx/
│   ├── llama/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   └── src/
│   │       └── main.rs
│   ├── vlm/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── decode.py
│   │   ├── encode.py
│   │   └── src/
│   │       └── main.rs
│   └── whisper/
│       ├── Cargo.toml
│       ├── README.md
│       └── src/
│           └── main.rs
├── wasmedge-piper/
│   ├── Cargo.toml
│   ├── README.md
│   ├── config.schema.json
│   ├── dependencies.d2
│   ├── json_input.schema.json
│   └── src/
│       └── main.rs
├── wasmedge-tf-llama/
│   ├── README.md
│   └── rust/
│       ├── Cargo.toml
│       └── src/
│           └── main.rs
├── wasmedge-tf-mobilenet_v2/
│   ├── README.md
│   ├── imagenet_slim_labels.txt
│   ├── mobilenet_v2_1.4_224_frozen.pb
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       └── main.rs
│   └── wasmedge-tf-example-mobilenet.wasm
└── wasmedge-tf-mtcnn/
    ├── README.md
    ├── mtcnn.pb
    ├── rust/
    │   ├── Cargo.toml
    │   └── src/
    │       └── main.rs
    └── wasmedge-tf-example-mtcnn.wasm

================================================
FILE CONTENTS
================================================

================================================
FILE: .github/workflows/build_openvino_mobilenet.yml
================================================
name: OpenVINO Mobilenet Example

on:
  schedule:
    - cron: "0 0 * * *"
  push:
    branches: [master]
    paths:
      - ".github/workflows/build_openvino_mobilenet.yml"
      - "openvino-mobilenet-raw/**"
      - "openvino-mobilenet-image/**"
      - "scripts/install_openvino.sh"
  pull_request:
    branches: [master]
    paths:
      - ".github/workflows/build_openvino_mobilenet.yml"
      - "openvino-mobilenet-raw/**"
      - "openvino-mobilenet-image/**"
      - "scripts/install_openvino.sh"
  merge_group:

env:
  CARGO_TERM_COLOR: always

jobs:
  build_openvino_examples:
    runs-on: ubuntu-latest
    strategy:
      matrix:
        rust: [1.84]
    container:
      image: wasmedge/wasmedge:ubuntu-build-clang
    steps:
      - name: Checkout Wasi-NN examples
        uses: actions/checkout@v3
        with:
          fetch-depth: 0

      - name: Install Rust-stable
        uses: dtolnay/rust-toolchain@stable
        with:
          toolchain: ${{ matrix.rust }}
          target: wasm32-wasip1

      - name: Install dependencies
        run: |
          apt update
          apt install -y libtbbmalloc2

      - name: Install OpenVINO
        working-directory: scripts
        run: |
          bash install_openvino.sh

      - name: Install WasmEdge with Wasi-NN OpenVINO plugin
        env:
          CMAKE_BUILD_TYPE: "Release"
          VERSION: "0.14.1"
        run: |
          curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -s -- -v $VERSION -p /usr/local --plugins wasi_nn-openvino

      - name: Build and run openvino-mobilenet-raw
        working-directory: openvino-mobilenet-raw
        run: |
          bash download_mobilenet.sh
          cd rust
          cargo build --target wasm32-wasip1 --release
          cd ..
          wasmedge --dir .:. ./rust/target/wasm32-wasip1/release/wasmedge-wasinn-example-mobilenet.wasm mobilenet.xml mobilenet.bin tensor-1x224x224x3-f32.bgr

      - name: Build and run openvino-mobilenet-image
        working-directory: openvino-mobilenet-image
        run: |
          bash download_mobilenet.sh
          cd rust
          cargo build --target wasm32-wasip1 --release
          cd ..
          wasmedge --dir .:. ./rust/target/wasm32-wasip1/release/wasmedge-wasinn-example-mobilenet-image.wasm mobilenet.xml mobilenet.bin input.jpg


================================================
FILE: .github/workflows/build_openvino_road_seg_adas.yml
================================================
name: OpenVINO Road Segmentation ADAS Example

on:
  schedule:
    - cron: "0 0 * * *"
  push:
    branches: [master]
    paths:
      - ".github/workflows/build_openvino_road_seg_adas.yml"
      - "openvino-road-segmentation-adas/**"
      - "scripts/install_openvino.sh"
  pull_request:
    branches: [master]
    paths:
      - ".github/workflows/build_openvino_road_seg_adas.yml"
      - "openvino-road-segmentation-adas/**"
      - "scripts/install_openvino.sh"
  merge_group:

env:
  CARGO_TERM_COLOR: always

jobs:
  build_openvino_examples:
    runs-on: ubuntu-latest
    strategy:
      matrix:
        rust: [1.84]
    container:
      image: wasmedge/wasmedge:ubuntu-build-clang
    steps:
      - name: Checkout Wasi-NN examples
        uses: actions/checkout@v3
        with:
          fetch-depth: 0

      - name: Install Rust-stable
        uses: dtolnay/rust-toolchain@stable
        with:
          toolchain: ${{ matrix.rust }}
          target: wasm32-wasip1

      - name: Install dependencies
        run: |
          apt update
          apt install -y libtbbmalloc2

      - name: Install OpenVINO
        working-directory: scripts
        run: |
          bash install_openvino.sh

      - name: Install WasmEdge with Wasi-NN OpenVINO plugin
        env:
          CMAKE_BUILD_TYPE: "Release"
          VERSION: "0.14.1"
        run: |
          curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -s -- -v $VERSION -p /usr/local --plugins wasi_nn-openvino

      - name: Build and run openvino-road-segmentation-adas
        working-directory: openvino-road-segmentation-adas
        run: |
          cd openvino-road-seg-adas
          cargo build --target=wasm32-wasip1 --release
          cp target/wasm32-wasip1/release/openvino-road-seg-adas.wasm ..
          cd ..
          wasmedge --dir .:. openvino-road-seg-adas.wasm ./model/road-segmentation-adas-0001.xml ./model/road-segmentation-adas-0001.bin ./image/empty_road_mapillary.jpg


================================================
FILE: .github/workflows/build_pytorch_yolo.yml
================================================
name: Pytorch Yolo Detection

on:
  schedule:
    - cron: "0 0 * * *"
  push:
    branches: ['*']
    paths:
      - ".github/workflows/build_pytorch_yolo.yml"
      - "pytorch-yolo-image/**"
      - "scripts/install_libtorch.sh"
  pull_request:
    branches: ['*']
    paths:
      - ".github/workflows/build_pytorch_yolo.yml"
      - "pytorch-yolo-image/**"
      - "scripts/install_libtorch.sh"
  merge_group:

env:
  CARGO_TERM_COLOR: always

jobs:
  build_pytorch_examples:
    runs-on: ubuntu-latest
    strategy:
      matrix:
        rust: [1.84]
    container:
      image: wasmedge/wasmedge:ubuntu-build-clang
    steps:
      - name: Checkout Wasi-NN examples
        uses: actions/checkout@v3
        with:
          fetch-depth: 0

      - name: Install Rust-stable
        uses: dtolnay/rust-toolchain@stable
        with:
          toolchain: ${{ matrix.rust }}
          target: wasm32-wasip1

      - name: Install LibTorch
        working-directory: scripts
        run: |
          set -e
          bash install_libtorch.sh
          cp ./libtorch/lib/* /lib/

      - name: Install WasmEdge
        env:
          CMAKE_BUILD_TYPE: "Release"
          VERSION: "0.13.4"
        run: |
          curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -s -- -v $VERSION --plugins wasi_nn-pytorch -p /usr/local

      - name: Build and run pytorch-yolo-detection
        run: |
          cd pytorch-yolo-image
          cd rust
          cargo build --target=wasm32-wasip1 --release
          cp target/wasm32-wasip1/release/wasmedge-wasinn-example-yolo-image.wasm ..
          cd ..
          wasmedge --dir .:. wasmedge-wasinn-example-yolo-image.wasm ./yolov8n.torchscript ./input.jpg


================================================
FILE: .github/workflows/chatTTS.yml
================================================
name: ChatTTS example

on:
  schedule:
    - cron: "0 0 * * *"
  push:
    paths:
      - ".github/workflows/chatTTS.yml"
      - "wasmedge-chatTTS/**"
  pull_request:
    paths:
      - ".github/workflows/chatTTS.yml"
      - "wasmedge-chatTTS/**"
  merge_group:
jobs:
  build:
    runs-on: ubuntu-22.04
    steps:
      - name: Install Dependencies for building WasmEdge
        run: |
          sudo apt-get -y update
          sudo apt-get -y install wget git curl software-properties-common build-essential python3 python3-dev python3-pip ninja-build
          pip install chattts==0.1.1

      - name: Install Rust target for wasm
        run: |
          rustup target add wasm32-wasip1

      - name: Checkout WasmEdge
        uses: actions/checkout@v4
        with:
          repository: WasmEdge/WasmEdge
          path: WasmEdge
      - name: Build WasmEdge with WASI-NN ChatTTS plugin
        run: |
          cmake -GNinja -Bbuild -DCMAKE_BUILD_TYPE=Release -DWASMEDGE_USE_LLVM=OFF -DWASMEDGE_PLUGIN_WASI_NN_BACKEND=ChatTTS
          cmake --build build
        working-directory: WasmEdge

      - name: Checkout WasmEdge-WASINN-examples
        uses: actions/checkout@v4
        with:
          path: WasmEdge-WASINN-examples
    
      - name: Build wasm
        run: cargo build --target wasm32-wasip1 --release
        working-directory: WasmEdge-WASINN-examples/wasmedge-chatTTS
      
      - name: Execute
        run: WASMEDGE_PLUGIN_PATH=WasmEdge/build/plugins/wasi_nn WasmEdge/build/tools/wasmedge/wasmedge --dir .:. WasmEdge-WASINN-examples/wasmedge-chatTTS/target/wasm32-wasip1/release/wasmedge-chattts.wasm

      - name: Verify output
        run: test "$(file --brief output1.wav)" == 'RIFF (little-endian) data, WAVE audio, mono 24000 Hz'



================================================
FILE: .github/workflows/llama.yml
================================================
name: ggml llama2 examples

on:
  schedule:
    - cron: "0 0 * * *"
  workflow_dispatch:
    inputs:
      logLevel:
        description: 'Log level'
        required: true
        default: 'info'
  push:
    branches: [ '*' ]
    paths:
      - ".github/workflows/llama.yml"
      - "wasmedge-ggml/**"
  pull_request:
    branches: [ '*' ]
    paths:
      - ".github/workflows/llama.yml"
      - "wasmedge-ggml/**"
  merge_group:

jobs:
  build:
    strategy:
      matrix:
        runner: [ubuntu-latest]
        wasmedge: ["0.14.1"]
        plugin: [wasi_nn-ggml]
        job:
          - name: Gemma 2B
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/gemma
              curl -LO https://huggingface.co/second-state/Gemma-2b-it-GGUF/resolve/main/gemma-2b-it-Q5_K_M.gguf
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --env n_gpu_layers="$NGL" \
                --nn-preload default:GGML:AUTO:gemma-2b-it-Q5_K_M.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-gemma.wasm \
                default \
                '<start_of_turn>user Where is the capital of Japan? <end_of_turn><start_of_turn>model'

          - name: Llama3 8B
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/llama
              curl -LO https://huggingface.co/QuantFactory/Meta-Llama-3-8B-Instruct-GGUF/resolve/main/Meta-Llama-3-8B-Instruct.Q5_K_M.gguf
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --env n_gpu_layers="$NGL" \
                --env llama3=true \
                --nn-preload default:GGML:AUTO:Meta-Llama-3-8B-Instruct.Q5_K_M.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-llama.wasm \
                default \
                $"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe.  Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you do not know the answer to a question, please do not share false information.<|eot_id|>\n<|start_header_id|>user<|end_header_id|>\n\nWhat's the capital of Japan?<|eot_id|>\n<|start_header_id|>assistant<|end_header_id|>\n\n"

          - name: Llama3 8B (Streaming)
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/llama-stream
              curl -LO https://huggingface.co/QuantFactory/Meta-Llama-3-8B-Instruct-GGUF/resolve/main/Meta-Llama-3-8B-Instruct.Q5_K_M.gguf
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --env n_gpu_layers="$NGL" \
                --env llama3=true \
                --nn-preload default:GGML:AUTO:Meta-Llama-3-8B-Instruct.Q5_K_M.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-llama-stream.wasm \
                default \
                $"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe.  Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you do not know the answer to a question, please do not share false information.<|eot_id|>\n<|start_header_id|>user<|end_header_id|>\n\nWhat's the capital of Japan?<|eot_id|>\n<|start_header_id|>assistant<|end_header_id|>\n\n"

          - name: Embedding Example (All-MiniLM)
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/embedding
              curl -LO https://huggingface.co/second-state/All-MiniLM-L6-v2-Embedding-GGUF/resolve/main/all-MiniLM-L6-v2-ggml-model-f16.gguf
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --nn-preload default:GGML:AUTO:all-MiniLM-L6-v2-ggml-model-f16.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-llama-embedding.wasm \
                default \
                'hello world'

          - name: RPC Example
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/nnrpc
              curl -LO https://huggingface.co/second-state/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_M.gguf
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --env n_gpu_layers="$NGL" \
                --nn-preload default:GGML:AUTO:llama-2-7b-chat.Q5_K_M.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-nnrpc.wasm \
                default \
                $'[INST] <<SYS>>\nYou are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe.  Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you do not know the answer to a question, please do not share false information.\n<</SYS>>\nWhat is the capital of Japan?[/INST]'

          - name: Set Input Twice
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/test/set-input-twice
              curl -LO https://huggingface.co/second-state/Gemma-2b-it-GGUF/resolve/main/gemma-2b-it-Q5_K_M.gguf
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --env n_gpu_layers="$NGL" \
                --nn-preload default:GGML:AUTO:gemma-2b-it-Q5_K_M.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-set-input-twice.wasm \
                default \
                '<start_of_turn>user Where is the capital of Japan? <end_of_turn><start_of_turn>model'

          - name: Model Not Found
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/test/model-not-found
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --nn-preload default:GGML:AUTO:model-not-found.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-model-not-found.wasm \
                default

          - name: Unload
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/test/unload
              curl -LO https://huggingface.co/second-state/Gemma-2b-it-GGUF/resolve/main/gemma-2b-it-Q5_K_M.gguf
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --nn-preload default:GGML:AUTO:gemma-2b-it-Q5_K_M.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-unload.wasm \
                default \
                '<start_of_turn>user Where is the capital of Japan? <end_of_turn><start_of_turn>model'

          - name: JSON Schema
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/json-schema
              curl -LO https://huggingface.co/TheBloke/Llama-2-7b-Chat-GGUF/resolve/main/llama-2-7b-chat.Q5_K_M.gguf
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --env n_gpu_layers="$NGL" \
                --nn-preload default:GGML:AUTO:llama-2-7b-chat.Q5_K_M.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-json-schema.wasm \
                default \
                $'[INST] <<SYS>>\nYou are a helpful, respectful and honest assistant. Always output JSON format string.\n<</SYS>>\nGive me a JSON array of Apple products.[/INST]'

          - name: Gemma-3 Vision
            shell: bash
            run: |
              test -f ~/.wasmedge/env && source ~/.wasmedge/env
              cd wasmedge-ggml/gemma-3
              curl -LO https://huggingface.co/second-state/gemma-3-4b-it-GGUF/resolve/main/gemma-3-4b-it-Q5_K_M.gguf
              curl -LO https://huggingface.co/second-state/gemma-3-4b-it-GGUF/resolve/main/gemma-3-4b-it-mmproj-f16.gguf
              curl -LO https://llava-vl.github.io/static/images/monalisa.jpg
              cargo build --target wasm32-wasip1 --release
              time wasmedge --dir .:. \
                --env n_gpu_layers="$NGL" \
                --env image=monalisa.jpg \
                --env mmproj=gemma-3-4b-it-mmproj-f16.gguf \
                --nn-preload default:GGML:AUTO:gemma-3-4b-it-Q5_K_M.gguf \
                target/wasm32-wasip1/release/wasmedge-ggml-gemma-3.wasm \
                default \
                $'<start_of_turn>user\n<start_of_image><image><end_of_image>Describe this image<end_of_turn>\n<start_of_turn>model\n'

          - name: Build llama-stream
            run: |
              cd wasmedge-ggml/llama-stream
              cargo build --target wasm32-wasip1 --release

          - name: Build llava-base64-stream
            run: |
              cd wasmedge-ggml/llava-base64-stream
              cargo build --target wasm32-wasip1 --release

    name: ${{ matrix.runner == 'ubuntu-latest' && 'ubuntu:20.04' || matrix.runner }} - ${{ matrix.job.name }} - ${{ matrix.wasmedge }} - ${{ matrix.plugin }}
    runs-on: ${{ matrix.runner }}
    # set image to `ubuntu:20.04` if runner is `ubuntu-latest`
    container: ${{ matrix.runner == 'ubuntu-latest' && fromJSON('{"image":"ubuntu:20.04"}') || null }}
    steps:
    - uses: actions/checkout@v4

    - if: ${{ matrix.runner == 'ubuntu-latest' }}
      name: Install apt-get packages
      run: |
        ACCEPT_EULA=Y apt-get update
        ACCEPT_EULA=Y apt-get upgrade -y
        apt-get install -y wget git curl software-properties-common build-essential
      env:
        DEBIAN_FRONTEND: noninteractive

    - name: Install Rust target for wasm
      uses: dtolnay/rust-toolchain@stable
      with:
        target: wasm32-wasip1

    - name: Install WasmEdge + WASI-NN + GGML
      run: |
        curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -s -- -v ${{ matrix.wasmedge }} --plugins ${{ matrix.plugin }}

    - name: Set environment variable
      run: echo "NGL=${{ matrix.ngl || 0 }}" >> $GITHUB_ENV

    - name: ${{ matrix.job.name }}
      run: ${{ matrix.job.run }}
      shell: bash


================================================
FILE: .github/workflows/piper.yml
================================================
name: Piper Example

on:
  schedule:
    - cron: "0 0 * * *"
  push:
    paths:
      - ".github/workflows/piper.yml"
      - "wasmedge-piper/**"
  pull_request:
    paths:
      - ".github/workflows/piper.yml"
      - "wasmedge-piper/**"
  merge_group:

jobs:
  build:
    runs-on: ubuntu-22.04
    steps:
      - name: Install Dependencies for building WasmEdge
        run: |
          sudo apt-get update
          sudo apt-get install ninja-build

      - name: Checkout WasmEdge
        uses: actions/checkout@v4
        with:
          repository: WasmEdge/WasmEdge
          path: WasmEdge

      - name: Install ONNX Runtime
        run: sudo bash utils/wasi-nn/install-onnxruntime.sh
        working-directory: WasmEdge

      - name: Build WasmEdge with WASI-NN Piper plugin
        run: |
          cmake -GNinja -Bbuild -DCMAKE_BUILD_TYPE=Release -DWASMEDGE_USE_LLVM=OFF -DWASMEDGE_PLUGIN_WASI_NN_BACKEND=Piper
          cmake --build build
        working-directory: WasmEdge

      - name: Install Rust target for wasm
        run: rustup target add wasm32-wasip1

      - name: Checkout WasmEdge-WASINN-examples
        uses: actions/checkout@v4
        with:
          path: WasmEdge-WASINN-examples

      - name: Build wasm
        run: cargo build --target wasm32-wasip1 --release
        working-directory: WasmEdge-WASINN-examples/wasmedge-piper

      - name: Download model
        run: curl -LO https://huggingface.co/rhasspy/piper-voices/resolve/main/en/en_US/lessac/medium/en_US-lessac-medium.onnx

      - name: Download config
        run: curl -LO https://huggingface.co/rhasspy/piper-voices/resolve/main/en/en_US/lessac/medium/en_US-lessac-medium.onnx.json

      - name: Download espeak-ng-data
        run: |
          curl -LO https://github.com/rhasspy/piper/releases/download/2023.11.14-2/piper_linux_x86_64.tar.gz
          tar -xzf piper_linux_x86_64.tar.gz piper/espeak-ng-data --strip-components=1
          rm piper_linux_x86_64.tar.gz

      - name: Execute
        run: WASMEDGE_PLUGIN_PATH=WasmEdge/build/plugins/wasi_nn WasmEdge/build/tools/wasmedge/wasmedge --dir .:. WasmEdge-WASINN-examples/wasmedge-piper/target/wasm32-wasip1/release/wasmedge-piper.wasm

      - name: Verify output
        run: test "$(file --brief welcome.wav)" == 'RIFF (little-endian) data, WAVE audio, Microsoft PCM, 16 bit, mono 22050 Hz'


================================================
FILE: .github/workflows/pytorch.yml
================================================
name: PyTorch examples

on:
  schedule:
    - cron: "0 0 * * *"
  workflow_dispatch:
    inputs:
      logLevel:
        description: 'Log level'
        required: true
        default: 'info'
  push:
    branches: [ '*' ]
    paths:
      - ".github/workflows/pytorch.yml"
      - "pytorch-mobilenet-image/**"
  pull_request:
    branches: [ '*' ]
    paths:
      - ".github/workflows/pytorch.yml"
      - "pytorch-mobilenet-image/**"
  merge_group:

jobs:
  build:

    runs-on: ubuntu-latest
    container:
      image: ubuntu:20.04

    steps:
    - uses: actions/checkout@v4

    - name: Install apt-get packages
      run: |
        ACCEPT_EULA=Y apt-get update
        ACCEPT_EULA=Y apt-get upgrade -y
        apt-get install -y wget git curl software-properties-common build-essential unzip
      env:
        DEBIAN_FRONTEND: noninteractive

    - name: Install Rust target for wasm
      uses: dtolnay/rust-toolchain@stable
      with:
        target: wasm32-wasip1

    - name: Install WasmEdge + WASI-NN + PyTorch
      run: |
        VERSION=0.13.4
        curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -s -- -v $VERSION --plugins wasi_nn-pytorch -p /usr/local
        export PYTORCH_VERSION="1.8.2"
        # For the Ubuntu 20.04 or above, use the libtorch with cxx11 abi.
        export PYTORCH_ABI="libtorch-cxx11-abi"
        curl -s -L -O --remote-name-all https://download.pytorch.org/libtorch/lts/1.8/cpu/${PYTORCH_ABI}-shared-with-deps-${PYTORCH_VERSION}%2Bcpu.zip
        unzip -q "${PYTORCH_ABI}-shared-with-deps-${PYTORCH_VERSION}%2Bcpu.zip"
        rm -f "${PYTORCH_ABI}-shared-with-deps-${PYTORCH_VERSION}%2Bcpu.zip"
        export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:$(pwd)/libtorch/lib

    - name: Example
      run: |
        export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:$(pwd)/libtorch/lib
        cd pytorch-mobilenet-image/rust
        cargo build --target wasm32-wasip1 --release
        cd ..
        wasmedge compile rust/target/wasm32-wasip1/release/wasmedge-wasinn-example-mobilenet-image.wasm wasmedge-wasinn-example-mobilenet-image-aot.wasm
        wasmedge compile rust/target/wasm32-wasip1/release/wasmedge-wasinn-example-mobilenet-image-named-model.wasm wasmedge-wasinn-example-mobilenet-image-named-model-aot.wasm
        echo "Run without named model"
        wasmedge --dir .:. wasmedge-wasinn-example-mobilenet-image-aot.wasm mobilenet.pt input.jpg
        echo "Run with named model"
        wasmedge --dir .:. --nn-preload demo:PyTorch:CPU:mobilenet.pt wasmedge-wasinn-example-mobilenet-image-named-model-aot.wasm demo input.jpg


================================================
FILE: .github/workflows/tflite.yml
================================================
name: TFlite examples

on:
  schedule:
    - cron: "0 0 * * *"
  workflow_dispatch:
    inputs:
      logLevel:
        description: 'Log level'
        required: true
        default: 'info'
  push:
    branches: [ '*' ]
    paths:
      - ".github/workflows/tflite.yml"
      - "tflite-birds_v1-image/**"
  pull_request:
    branches: [ '*' ]
    paths:
      - ".github/workflows/tflite.yml"
      - "tflite-birds_v1-image/**"
  merge_group:

jobs:
  build:

    runs-on: ubuntu-latest
    container:
      image: ubuntu:20.04

    steps:
    - uses: actions/checkout@v4

    - name: Install apt-get packages
      run: |
        ACCEPT_EULA=Y apt-get update
        ACCEPT_EULA=Y apt-get upgrade -y
        apt-get install -y wget git curl software-properties-common build-essential
      env:
        DEBIAN_FRONTEND: noninteractive

    - name: Install Rust target for wasm
      uses: dtolnay/rust-toolchain@stable
      with:
        target: wasm32-wasip1

    - name: Install WasmEdge + WASI-NN + TFLite
      run: |
        VERSION=0.13.4
        TFVERSION=2.12.0
        curl -s -L -O --remote-name-all https://github.com/second-state/WasmEdge-tensorflow-deps/releases/download/TF-2.12.0-CC/WasmEdge-tensorflow-deps-TFLite-TF-$TFVERSION-CC-manylinux2014_x86_64.tar.gz
        tar -zxf WasmEdge-tensorflow-deps-TFLite-TF-$TFVERSION-CC-manylinux2014_x86_64.tar.gz
        rm -f WasmEdge-tensorflow-deps-TFLite-TF-$TFVERSION-CC-manylinux2014_x86_64.tar.gz
        mv libtensorflowlite_c.so /usr/local/lib
        mv libtensorflowlite_flex.so /usr/local/lib
        curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -s -- -v $VERSION --plugins wasi_nn-tensorflowlite -p /usr/local

    - name: Example
      run: |
        cd tflite-birds_v1-image/rust
        cargo build --target wasm32-wasip1 --release
        cd ..
        wasmedge compile rust/target/wasm32-wasip1/release/wasmedge-wasinn-example-tflite-bird-image.wasm wasmedge-wasinn-example-tflite-bird-image.wasm
        wasmedge --dir .:. wasmedge-wasinn-example-tflite-bird-image.wasm lite-model_aiy_vision_classifier_birds_V1_3.tflite bird.jpg



================================================
FILE: .gitignore
================================================
**/build
**/target
**/*.gguf

openvino-mobilenet-image/mobilenet.bin
openvino-mobilenet-image/mobilenet.xml

openvino-mobilenet-raw/mobilenet.bin
openvino-mobilenet-raw/mobilenet.xml
openvino-mobilenet-raw/tensor-1x224x224x3-f32.bgr


pytorch-yolo-image/*.pt
pytorch-yolo-image/*.onnx

wasmedge-ggml-llama/llama-2-7b-chat.ggmlv3.q4_0.bin

.DS_Store
Cargo.lock


================================================
FILE: LICENSE
================================================

                                 Apache License
                           Version 2.0, January 2004
                        http://www.apache.org/licenses/

   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

   1. Definitions.

      "License" shall mean the terms and conditions for use, reproduction,
      and distribution as defined by Sections 1 through 9 of this document.

      "Licensor" shall mean the copyright owner or entity authorized by
      the copyright owner that is granting the License.

      "Legal Entity" shall mean the union of the acting entity and all
      other entities that control, are controlled by, or are under common
      control with that entity. For the purposes of this definition,
      "control" means (i) the power, direct or indirect, to cause the
      direction or management of such entity, whether by contract or
      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      outstanding shares, or (iii) beneficial ownership of such entity.

      "You" (or "Your") shall mean an individual or Legal Entity
      exercising permissions granted by this License.

      "Source" form shall mean the preferred form for making modifications,
      including but not limited to software source code, documentation
      source, and configuration files.

      "Object" form shall mean any form resulting from mechanical
      transformation or translation of a Source form, including but
      not limited to compiled object code, generated documentation,
      and conversions to other media types.

      "Work" shall mean the work of authorship, whether in Source or
      Object form, made available under the License, as indicated by a
      copyright notice that is included in or attached to the work
      (an example is provided in the Appendix below).

      "Derivative Works" shall mean any work, whether in Source or Object
      form, that is based on (or derived from) the Work and for which the
      editorial revisions, annotations, elaborations, or other modifications
      represent, as a whole, an original work of authorship. For the purposes
      of this License, Derivative Works shall not include works that remain
      separable from, or merely link (or bind by name) to the interfaces of,
      the Work and Derivative Works thereof.

      "Contribution" shall mean any work of authorship, including
      the original version of the Work and any modifications or additions
      to that Work or Derivative Works thereof, that is intentionally
      submitted to Licensor for inclusion in the Work by the copyright owner
      or by an individual or Legal Entity authorized to submit on behalf of
      the copyright owner. For the purposes of this definition, "submitted"
      means any form of electronic, verbal, or written communication sent
      to the Licensor or its representatives, including but not limited to
      communication on electronic mailing lists, source code control systems,
      and issue tracking systems that are managed by, or on behalf of, the
      Licensor for the purpose of discussing and improving the Work, but
      excluding communication that is conspicuously marked or otherwise
      designated in writing by the copyright owner as "Not a Contribution."

      "Contributor" shall mean Licensor and any individual or Legal Entity
      on behalf of whom a Contribution has been received by Licensor and
      subsequently incorporated within the Work.

   2. Grant of Copyright License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      copyright license to reproduce, prepare Derivative Works of,
      publicly display, publicly perform, sublicense, and distribute the
      Work and such Derivative Works in Source or Object form.

   3. Grant of Patent License. Subject to the terms and conditions of
      this License, each Contributor hereby grants to You a perpetual,
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      (except as stated in this section) patent license to make, have made,
      use, offer to sell, sell, import, and otherwise transfer the Work,
      where such license applies only to those patent claims licensable
      by such Contributor that are necessarily infringed by their
      Contribution(s) alone or by combination of their Contribution(s)
      with the Work to which such Contribution(s) was submitted. If You
      institute patent litigation against any entity (including a
      cross-claim or counterclaim in a lawsuit) alleging that the Work
      or a Contribution incorporated within the Work constitutes direct
      or contributory patent infringement, then any patent licenses
      granted to You under this License for that Work shall terminate
      as of the date such litigation is filed.

   4. Redistribution. You may reproduce and distribute copies of the
      Work or Derivative Works thereof in any medium, with or without
      modifications, and in Source or Object form, provided that You
      meet the following conditions:

      (a) You must give any other recipients of the Work or
          Derivative Works a copy of this License; and

      (b) You must cause any modified files to carry prominent notices
          stating that You changed the files; and

      (c) You must retain, in the Source form of any Derivative Works
          that You distribute, all copyright, patent, trademark, and
          attribution notices from the Source form of the Work,
          excluding those notices that do not pertain to any part of
          the Derivative Works; and

      (d) If the Work includes a "NOTICE" text file as part of its
          distribution, then any Derivative Works that You distribute must
          include a readable copy of the attribution notices contained
          within such NOTICE file, excluding those notices that do not
          pertain to any part of the Derivative Works, in at least one
          of the following places: within a NOTICE text file distributed
          as part of the Derivative Works; within the Source form or
          documentation, if provided along with the Derivative Works; or,
          within a display generated by the Derivative Works, if and
          wherever such third-party notices normally appear. The contents
          of the NOTICE file are for informational purposes only and
          do not modify the License. You may add Your own attribution
          notices within Derivative Works that You distribute, alongside
          or as an addendum to the NOTICE text from the Work, provided
          that such additional attribution notices cannot be construed
          as modifying the License.

      You may add Your own copyright statement to Your modifications and
      may provide additional or different license terms and conditions
      for use, reproduction, or distribution of Your modifications, or
      for any such Derivative Works as a whole, provided Your use,
      reproduction, and distribution of the Work otherwise complies with
      the conditions stated in this License.

   5. Submission of Contributions. Unless You explicitly state otherwise,
      any Contribution intentionally submitted for inclusion in the Work
      by You to the Licensor shall be under the terms and conditions of
      this License, without any additional terms or conditions.
      Notwithstanding the above, nothing herein shall supersede or modify
      the terms of any separate license agreement you may have executed
      with Licensor regarding such Contributions.

   6. Trademarks. This License does not grant permission to use the trade
      names, trademarks, service marks, or product names of the Licensor,
      except as required for reasonable and customary use in describing the
      origin of the Work and reproducing the content of the NOTICE file.

   7. Disclaimer of Warranty. Unless required by applicable law or
      agreed to in writing, Licensor provides the Work (and each
      Contributor provides its Contributions) on an "AS IS" BASIS,
      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      implied, including, without limitation, any warranties or conditions
      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      PARTICULAR PURPOSE. You are solely responsible for determining the
      appropriateness of using or redistributing the Work and assume any
      risks associated with Your exercise of permissions under this License.

   8. Limitation of Liability. In no event and under no legal theory,
      whether in tort (including negligence), contract, or otherwise,
      unless required by applicable law (such as deliberate and grossly
      negligent acts) or agreed to in writing, shall any Contributor be
      liable to You for damages, including any direct, indirect, special,
      incidental, or consequential damages of any character arising as a
      result of this License or out of the use or inability to use the
      Work (including but not limited to damages for loss of goodwill,
      work stoppage, computer failure or malfunction, or any and all
      other commercial damages or losses), even if such Contributor
      has been advised of the possibility of such damages.

   9. Accepting Warranty or Additional Liability. While redistributing
      the Work or Derivative Works thereof, You may choose to offer,
      and charge a fee for, acceptance of support, warranty, indemnity,
      or other liability obligations and/or rights consistent with this
      License. However, in accepting such obligations, You may act only
      on Your own behalf and on Your sole responsibility, not on behalf
      of any other Contributor, and only if You agree to indemnify,
      defend, and hold each Contributor harmless for any liability
      incurred by, or claims asserted against, such Contributor by reason
      of your accepting any such warranty or additional liability.

   END OF TERMS AND CONDITIONS

   APPENDIX: How to apply the Apache License to your work.

      To apply the Apache License to your work, attach the following
      boilerplate notice, with the fields enclosed by brackets "[]"
      replaced with your own identifying information. (Don't include
      the brackets!)  The text should be enclosed in the appropriate
      comment syntax for the file format. We also recommend that a
      file or class name and description of purpose be included on the
      same "printed page" as the copyright notice for easier
      identification within third-party archives.

   Copyright [yyyy] [name of copyright owner]

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.


--- LLVM Exceptions to the Apache 2.0 License ----

As an exception, if, as a result of your compiling your source code, portions
of this Software are embedded into an Object form of such source code, you
may redistribute such embedded portions in such Object form without complying
with the conditions of Sections 4(a), 4(b) and 4(d) of the License.

In addition, if you combine or link compiled forms of this Software with
software that is licensed under the GPLv2 ("Combined Software") and if a
court of competent jurisdiction determines that the patent provision (Section
3), the indemnity provision (Section 9) or other Section of the License
conflicts with the conditions of the GPLv2, you may retroactively and
prospectively choose to deem waived or otherwise exclude such Section(s) of
the License, but only in their entirety and only with respect to the Combined
Software.



================================================
FILE: README.md
================================================
<div align="center">
  <h1>WasmEdge WASI-NN Examples</h1>

  <p><strong>High-level bindings for writing wasi-nn applications</strong></p>

  <p>
    <a href="https://github.com/second-state/WasmEdge-WASINN-examples/actions/workflows/llama.yml/">
      <img src="https://github.com/second-state/WasmEdge-WASINN-examples/actions/workflows/llama.yml/badge.svg" alt="CI status - llama"/>
    </a>
    <a href="https://github.com/second-state/WasmEdge-WASINN-examples/actions/workflows/pytorch.yml/">
      <img src="https://github.com/second-state/WasmEdge-WASINN-examples/actions/workflows/pytorch.yml/badge.svg" alt="CI status - pytorch"/>
    </a>
    <a href="https://github.com/second-state/WasmEdge-WASINN-examples/actions/workflows/tflite.yml/">
      <img src="https://github.com/second-state/WasmEdge-WASINN-examples/actions/workflows/tflite.yml/badge.svg" alt="CI status - tflite"/>
    </a>
  </p>
</div>

### Introduction

This project provides the examples of high-level [wasi-nn] bindings and WasmEdge-TensorFlow plug-ins on Rust programming language. Developers can refer to this project to write their machine learning application in a high-level language using the bindings, compile it to WebAssembly, and run it with a WebAssembly runtime that supports the [wasi-nn] proposal, such as [WasmEdge].

### Prerequisites

#### OpenVINO Installation

Developers should install the [OpenVINO] first before build and run WasmEdge with wasi-nn and the examples.
For this project, we use the version `2023.0.0`. Please refer to [WasmEdge Docs](https://wasmedge.org/docs/contribute/source/plugin/wasi_nn) and [OpenVINO™](https://docs.openvino.ai/2023.0/openvino_docs_install_guides_installing_openvino_apt.html)(2023)

```bash
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
echo "deb https://apt.repos.intel.com/openvino/2023 ubuntu20 main" | sudo tee /etc/apt/sources.list.d/intel-openvino-2023.list
sudo apt update
sudo apt-get -y install openvino
ldconfig
```
[OpenVINO]: https://www.intel.com/content/www/us/en/developer/tools/openvino-toolkit/overview.html

#### Rust Installation

For building the WASM files from rust source, please refer to the [Rust Official Site](https://www.rust-lang.org/tools/install) for the Rust installation.
After the installation, developers should add the `wasm32-wasip1` target.

```bash
rustup target add wasm32-wasip1
```

#### Download the `wasi-nn` Rust Crate

In Rust, download the [crate from crates.io](https://crates.io/crates/wasi-nn) by adding `wasi-nn = "0.4.0"` as a Cargo dependency.

For using WasmEdge-TensorFlow plug-ins, please download the [crate from crates.io](https://crates.io/crates/wasmedge_tensorflow_interface) by adding `wasmedge_tensorflow_interface = "0.3.0"` as a Cargo dependency.

#### WasmEdge Installation

You can refer to [here to install WasmEdge](https://wasmedge.org/docs/start/install#install).

For the examples with different wasi-nn backends or using the WasmEdge-Tensorflow plug-ins, please install with plug-ins and their dependencies:

- [wasi-nn plug-in with OpenVINO backend](https://wasmedge.org/docs/start/install#wasi-nn-plug-in-with-openvino-backend)
- [wasi-nn plug-in with PyTorch backend](https://wasmedge.org/docs/start/install#wasi-nn-plug-in-with-pytorch-backend)
- [wasi-nn plug-in with TensorFlow-Lite backend](https://wasmedge.org/docs/start/install#wasi-nn-plug-in-with-tensorflow-lite-backend)
- [WasmEdge-Image plug-in](https://wasmedge.org/docs/start/install#wasmedge-image-plug-in)
- [WasmEdge-TensorFlow plug-in](https://wasmedge.org/docs/start/install#wasmedge-tensorflow-plug-in)
- [WasmEdge-TensorFlow-Lite plug-in](https://wasmedge.org/docs/start/install#wasmedge-tensorflow-lite-plug-in)

### Examples

[Mobilenet](mobilenet)

### Related Links

- [WASI]
- [wasi-nn]
- [WasmEdge]
- [WasmEdge-TensorFlow rust interface](https://crates.io/crates/wasmedge_tensorflow_interface)
- [wasi-nn-guest](https://github.com/radu-matei/wasi-nn-guest)

[WasmEdge]: https://wasmedge.org/
[wasi-nn]: https://github.com/WebAssembly/wasi-nn
[WASI]: https://github.com/WebAssembly/WASI

### License

This project is licensed under the Apache 2.0 license. See [LICENSE](LICENSE) for more details.

### Contribution

Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in this project by you, as defined in the Apache-2.0 license, shall be licensed as above, without any additional terms or conditions.


================================================
FILE: openvino-mobilenet-image/README.md
================================================
# Mobilenet example with WasmEdge WASI-NN OpenVINO plugin

This example demonstrates how to use WasmEdge WASI-NN OpenVINO plugin to perform an inference task with Mobilenet model.

## Set up the environment

- Install `rustup` and `Rust`

  Go to the [official Rust webpage](https://www.rust-lang.org/tools/install) and follow the instructions to install `rustup` and `Rust`.

  > It is recommended to use Rust 1.68 or above in the stable channel.

  Then, add `wasm32-wasip1` target to the Rustup toolchain:

  ```bash
  rustup target add wasm32-wasip1
  ```

- Clone the example repo

  ```bash
  git clone https://github.com/second-state/WasmEdge-WASINN-examples.git
  ```

- Install OpenVINO

Please refer to [WasmEdge Docs](https://wasmedge.org/docs/contribute/source/plugin/wasi_nn) and [OpenVINO™](https://docs.openvino.ai/2023.0/openvino_docs_install_guides_installing_openvino_apt.html)(2023) for the installation process.

  ```bash
  bash WasmEdge-WASINN-examples/scripts/install_openvino.sh
  ldconfig
  ```

- Install WasmEdge with Wasi-NN OpenVINO plugin

  ```bash
  export CMAKE_BUILD_TYPE=Release
  export VERSION=0.13.2

  curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -s -- -v $VERSION -p /usr/local --plugins wasi_nn-openvino
  ldconfig
  ```

## Build and run `openvino-mobilenet-image` example

- Download `MobileNet` model file

  ```bash
  cd openvino-mobilenet-image
  bash download_mobilenet.sh
  ```

- Build and run the example

  ```bash
  cd rust
  cargo build --target wasm32-wasip1 --release
  cd ..

  wasmedge --dir .:. ./rust/target/wasm32-wasip1/release/wasmedge-wasinn-example-mobilenet.wasm mobilenet.xml mobilenet.bin input.jpg
  ```

  If the commands above run successfully, you will get the output:
  
  ```bash
  Read graph XML, size in bytes: 143525
  Read graph weights, size in bytes: 13956476
  Loaded graph into wasi-nn with ID: 0
  Created wasi-nn execution context with ID: 0
  Read input tensor, size in bytes: 602112
  Executed graph inference
     1.) [954](0.9789)banana
     2.) [940](0.0074)spaghetti squash
     3.) [951](0.0014)lemon
     4.) [969](0.0005)eggnog
     5.) [942](0.0005)butternut squash
  ```


================================================
FILE: openvino-mobilenet-image/download_mobilenet.sh
================================================
FIXTURE=https://github.com/intel/openvino-rs/raw/v0.3.3/crates/openvino/tests/fixtures/mobilenet
TODIR=$1

if [ ! -f $TODIR/mobilenet.bin ]; then
    wget --no-clobber --directory-prefix=$TODIR $FIXTURE/mobilenet.bin
fi
if [ ! -f $TODIR/mobilenet.xml ]; then
    wget --no-clobber --directory-prefix=$TODIR $FIXTURE/mobilenet.xml
fi


================================================
FILE: openvino-mobilenet-image/rust/Cargo.toml
================================================
[package]
name = "wasmedge-wasinn-example-mobilenet-image"
version = "0.1.0"
authors = ["Second-State"]
readme = "README.md"
edition = "2021"
publish = false

[dependencies]
image = { version = "0.23.14", default-features = false, features = ["gif", "jpeg", "ico", "png", "pnm", "tga", "tiff", "webp", "bmp", "hdr", "dxt", "dds", "farbfeld"]  }
wasi-nn = { version = "0.6.0" }

[workspace]


================================================
FILE: openvino-mobilenet-image/rust/src/imagenet_classes.rs
================================================
/**
 * @license
 * Copyright 2019 Google LLC. All Rights Reserved.
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 * =============================================================================
 */

/* The code in this file is adapted from https://github.com/tensorflow/tfjs-models/blob/master/mobilenet/src/imagenet_classes.ts */

pub const IMAGENET_CLASSES: [&str; 1000] = [
    "tench, Tinca tinca",
    "goldfish, Carassius auratus",
    "great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias",
    "tiger shark, Galeocerdo cuvieri",
    "hammerhead, hammerhead shark",
    "electric ray, crampfish, numbfish, torpedo",
    "stingray",
    "cock",
    "hen",
    "ostrich, Struthio camelus",
    "brambling, Fringilla montifringilla",
    "goldfinch, Carduelis carduelis",
    "house finch, linnet, Carpodacus mexicanus",
    "junco, snowbird",
    "indigo bunting, indigo finch, indigo bird, Passerina cyanea",
    "robin, American robin, Turdus migratorius",
    "bulbul",
    "jay",
    "magpie",
    "chickadee",
    "water ouzel, dipper",
    "kite",
    "bald eagle, American eagle, Haliaeetus leucocephalus",
    "vulture",
    "great grey owl, great gray owl, Strix nebulosa",
    "European fire salamander, Salamandra salamandra",
    "common newt, Triturus vulgaris",
    "eft",
    "spotted salamander, Ambystoma maculatum",
    "axolotl, mud puppy, Ambystoma mexicanum",
    "bullfrog, Rana catesbeiana",
    "tree frog, tree-frog",
    "tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui",
    "loggerhead, loggerhead turtle, Caretta caretta",
    "leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea",
    "mud turtle",
    "terrapin",
    "box turtle, box tortoise",
    "banded gecko",
    "common iguana, iguana, Iguana iguana",
    "American chameleon, anole, Anolis carolinensis",
    "whiptail, whiptail lizard",
    "agama",
    "frilled lizard, Chlamydosaurus kingi",
    "alligator lizard",
    "Gila monster, Heloderma suspectum",
    "green lizard, Lacerta viridis",
    "African chameleon, Chamaeleo chamaeleon",
    "Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis",
    "African crocodile, Nile crocodile, Crocodylus niloticus",
    "American alligator, Alligator mississipiensis",
    "triceratops",
    "thunder snake, worm snake, Carphophis amoenus",
    "ringneck snake, ring-necked snake, ring snake",
    "hognose snake, puff adder, sand viper",
    "green snake, grass snake",
    "king snake, kingsnake",
    "garter snake, grass snake",
    "water snake",
    "vine snake",
    "night snake, Hypsiglena torquata",
    "boa constrictor, Constrictor constrictor",
    "rock python, rock snake, Python sebae",
    "Indian cobra, Naja naja",
    "green mamba",
    "sea snake",
    "horned viper, cerastes, sand viper, horned asp, Cerastes cornutus",
    "diamondback, diamondback rattlesnake, Crotalus adamanteus",
    "sidewinder, horned rattlesnake, Crotalus cerastes",
    "trilobite",
    "harvestman, daddy longlegs, Phalangium opilio",
    "scorpion",
    "black and gold garden spider, Argiope aurantia",
    "barn spider, Araneus cavaticus",
    "garden spider, Aranea diademata",
    "black widow, Latrodectus mactans",
    "tarantula",
    "wolf spider, hunting spider",
    "tick",
    "centipede",
    "black grouse",
    "ptarmigan",
    "ruffed grouse, partridge, Bonasa umbellus",
    "prairie chicken, prairie grouse, prairie fowl",
    "peacock",
    "quail",
    "partridge",
    "African grey, African gray, Psittacus erithacus",
    "macaw",
    "sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita",
    "lorikeet",
    "coucal",
    "bee eater",
    "hornbill",
    "hummingbird",
    "jacamar",
    "toucan",
    "drake",
    "red-breasted merganser, Mergus serrator",
    "goose",
    "black swan, Cygnus atratus",
    "tusker",
    "echidna, spiny anteater, anteater",
    "platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus",
    "wallaby, brush kangaroo",
    "koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus",
    "wombat",
    "jelly fish",
    "sea anemone, anemone",
    "brain coral",
    "flatworm, platyhelminth",
    "nematode, nematode worm, roundworm",
    "conch",
    "snail",
    "slug",
    "sea slug, nudibranch",
    "chiton, coat-of-mail shell, sea cradle, polyplacophore",
    "chambered nautilus, pearly nautilus, nautilus",
    "Dungeness crab, Cancer magister",
    "rock crab, Cancer irroratus",
    "fiddler crab",
    "king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica",
    "American lobster, Northern lobster, Maine lobster, Homarus americanus",
    "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish",
    "crayfish, crawfish, crawdad, crawdaddy",
    "hermit crab",
    "isopod",
    "white stork, Ciconia ciconia",
    "black stork, Ciconia nigra",
    "spoonbill",
    "flamingo",
    "little blue heron, Egretta caerulea",
    "American egret, great white heron, Egretta albus",
    "bittern",
    "crane",
    "limpkin, Aramus pictus",
    "European gallinule, Porphyrio porphyrio",
    "American coot, marsh hen, mud hen, water hen, Fulica americana",
    "bustard",
    "ruddy turnstone, Arenaria interpres",
    "red-backed sandpiper, dunlin, Erolia alpina",
    "redshank, Tringa totanus",
    "dowitcher",
    "oystercatcher, oyster catcher",
    "pelican",
    "king penguin, Aptenodytes patagonica",
    "albatross, mollymawk",
    "grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus",
    "killer whale, killer, orca, grampus, sea wolf, Orcinus orca",
    "dugong, Dugong dugon",
    "sea lion",
    "Chihuahua",
    "Japanese spaniel",
    "Maltese dog, Maltese terrier, Maltese",
    "Pekinese, Pekingese, Peke",
    "Shih-Tzu",
    "Blenheim spaniel",
    "papillon",
    "toy terrier",
    "Rhodesian ridgeback",
    "Afghan hound, Afghan",
    "basset, basset hound",
    "beagle",
    "bloodhound, sleuthhound",
    "bluetick",
    "black-and-tan coonhound",
    "Walker hound, Walker foxhound",
    "English foxhound",
    "redbone",
    "borzoi, Russian wolfhound",
    "Irish wolfhound",
    "Italian greyhound",
    "whippet",
    "Ibizan hound, Ibizan Podenco",
    "Norwegian elkhound, elkhound",
    "otterhound, otter hound",
    "Saluki, gazelle hound",
    "Scottish deerhound, deerhound",
    "Weimaraner",
    "Staffordshire bullterrier, Staffordshire bull terrier",
    "American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier",
    "Bedlington terrier",
    "Border terrier",
    "Kerry blue terrier",
    "Irish terrier",
    "Norfolk terrier",
    "Norwich terrier",
    "Yorkshire terrier",
    "wire-haired fox terrier",
    "Lakeland terrier",
    "Sealyham terrier, Sealyham",
    "Airedale, Airedale terrier",
    "cairn, cairn terrier",
    "Australian terrier",
    "Dandie Dinmont, Dandie Dinmont terrier",
    "Boston bull, Boston terrier",
    "miniature schnauzer",
    "giant schnauzer",
    "standard schnauzer",
    "Scotch terrier, Scottish terrier, Scottie",
    "Tibetan terrier, chrysanthemum dog",
    "silky terrier, Sydney silky",
    "soft-coated wheaten terrier",
    "West Highland white terrier",
    "Lhasa, Lhasa apso",
    "flat-coated retriever",
    "curly-coated retriever",
    "golden retriever",
    "Labrador retriever",
    "Chesapeake Bay retriever",
    "German short-haired pointer",
    "vizsla, Hungarian pointer",
    "English setter",
    "Irish setter, red setter",
    "Gordon setter",
    "Brittany spaniel",
    "clumber, clumber spaniel",
    "English springer, English springer spaniel",
    "Welsh springer spaniel",
    "cocker spaniel, English cocker spaniel, cocker",
    "Sussex spaniel",
    "Irish water spaniel",
    "kuvasz",
    "schipperke",
    "groenendael",
    "malinois",
    "briard",
    "kelpie",
    "komondor",
    "Old English sheepdog, bobtail",
    "Shetland sheepdog, Shetland sheep dog, Shetland",
    "collie",
    "Border collie",
    "Bouvier des Flandres, Bouviers des Flandres",
    "Rottweiler",
    "German shepherd, German shepherd dog, German police dog, alsatian",
    "Doberman, Doberman pinscher",
    "miniature pinscher",
    "Greater Swiss Mountain dog",
    "Bernese mountain dog",
    "Appenzeller",
    "EntleBucher",
    "boxer",
    "bull mastiff",
    "Tibetan mastiff",
    "French bulldog",
    "Great Dane",
    "Saint Bernard, St Bernard",
    "Eskimo dog, husky",
    "malamute, malemute, Alaskan malamute",
    "Siberian husky",
    "dalmatian, coach dog, carriage dog",
    "affenpinscher, monkey pinscher, monkey dog",
    "basenji",
    "pug, pug-dog",
    "Leonberg",
    "Newfoundland, Newfoundland dog",
    "Great Pyrenees",
    "Samoyed, Samoyede",
    "Pomeranian",
    "chow, chow chow",
    "keeshond",
    "Brabancon griffon",
    "Pembroke, Pembroke Welsh corgi",
    "Cardigan, Cardigan Welsh corgi",
    "toy poodle",
    "miniature poodle",
    "standard poodle",
    "Mexican hairless",
    "timber wolf, grey wolf, gray wolf, Canis lupus",
    "white wolf, Arctic wolf, Canis lupus tundrarum",
    "red wolf, maned wolf, Canis rufus, Canis niger",
    "coyote, prairie wolf, brush wolf, Canis latrans",
    "dingo, warrigal, warragal, Canis dingo",
    "dhole, Cuon alpinus",
    "African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus",
    "hyena, hyaena",
    "red fox, Vulpes vulpes",
    "kit fox, Vulpes macrotis",
    "Arctic fox, white fox, Alopex lagopus",
    "grey fox, gray fox, Urocyon cinereoargenteus",
    "tabby, tabby cat",
    "tiger cat",
    "Persian cat",
    "Siamese cat, Siamese",
    "Egyptian cat",
    "cougar, puma, catamount, mountain lion, painter, panther, Felis concolor",
    "lynx, catamount",
    "leopard, Panthera pardus",
    "snow leopard, ounce, Panthera uncia",
    "jaguar, panther, Panthera onca, Felis onca",
    "lion, king of beasts, Panthera leo",
    "tiger, Panthera tigris",
    "cheetah, chetah, Acinonyx jubatus",
    "brown bear, bruin, Ursus arctos",
    "American black bear, black bear, Ursus americanus, Euarctos americanus",
    "ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus",
    "sloth bear, Melursus ursinus, Ursus ursinus",
    "mongoose",
    "meerkat, mierkat",
    "tiger beetle",
    "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle",
    "ground beetle, carabid beetle",
    "long-horned beetle, longicorn, longicorn beetle",
    "leaf beetle, chrysomelid",
    "dung beetle",
    "rhinoceros beetle",
    "weevil",
    "fly",
    "bee",
    "ant, emmet, pismire",
    "grasshopper, hopper",
    "cricket",
    "walking stick, walkingstick, stick insect",
    "cockroach, roach",
    "mantis, mantid",
    "cicada, cicala",
    "leafhopper",
    "lacewing, lacewing fly",
    "dragonfly, darning needle, devil\"s darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk",
    "damselfly",
    "admiral",
    "ringlet, ringlet butterfly",
    "monarch, monarch butterfly, milkweed butterfly, Danaus plexippus",
    "cabbage butterfly",
    "sulphur butterfly, sulfur butterfly",
    "lycaenid, lycaenid butterfly",
    "starfish, sea star",
    "sea urchin",
    "sea cucumber, holothurian",
    "wood rabbit, cottontail, cottontail rabbit",
    "hare",
    "Angora, Angora rabbit",
    "hamster",
    "porcupine, hedgehog",
    "fox squirrel, eastern fox squirrel, Sciurus niger",
    "marmot",
    "beaver",
    "guinea pig, Cavia cobaya",
    "sorrel",
    "zebra",
    "hog, pig, grunter, squealer, Sus scrofa",
    "wild boar, boar, Sus scrofa",
    "warthog",
    "hippopotamus, hippo, river horse, Hippopotamus amphibius",
    "ox",
    "water buffalo, water ox, Asiatic buffalo, Bubalus bubalis",
    "bison",
    "ram, tup",
    "bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis",
    "ibex, Capra ibex",
    "hartebeest",
    "impala, Aepyceros melampus",
    "gazelle",
    "Arabian camel, dromedary, Camelus dromedarius",
    "llama",
    "weasel",
    "mink",
    "polecat, fitch, foulmart, foumart, Mustela putorius",
    "black-footed ferret, ferret, Mustela nigripes",
    "otter",
    "skunk, polecat, wood pussy",
    "badger",
    "armadillo",
    "three-toed sloth, ai, Bradypus tridactylus",
    "orangutan, orang, orangutang, Pongo pygmaeus",
    "gorilla, Gorilla gorilla",
    "chimpanzee, chimp, Pan troglodytes",
    "gibbon, Hylobates lar",
    "siamang, Hylobates syndactylus, Symphalangus syndactylus",
    "guenon, guenon monkey",
    "patas, hussar monkey, Erythrocebus patas",
    "baboon",
    "macaque",
    "langur",
    "colobus, colobus monkey",
    "proboscis monkey, Nasalis larvatus",
    "marmoset",
    "capuchin, ringtail, Cebus capucinus",
    "howler monkey, howler",
    "titi, titi monkey",
    "spider monkey, Ateles geoffroyi",
    "squirrel monkey, Saimiri sciureus",
    "Madagascar cat, ring-tailed lemur, Lemur catta",
    "indri, indris, Indri indri, Indri brevicaudatus",
    "Indian elephant, Elephas maximus",
    "African elephant, Loxodonta africana",
    "lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens",
    "giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca",
    "barracouta, snoek",
    "eel",
    "coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch",
    "rock beauty, Holocanthus tricolor",
    "anemone fish",
    "sturgeon",
    "gar, garfish, garpike, billfish, Lepisosteus osseus",
    "lionfish",
    "puffer, pufferfish, blowfish, globefish",
    "abacus",
    "abaya",
    "academic gown, academic robe, judge\"s robe",
    "accordion, piano accordion, squeeze box",
    "acoustic guitar",
    "aircraft carrier, carrier, flattop, attack aircraft carrier",
    "airliner",
    "airship, dirigible",
    "altar",
    "ambulance",
    "amphibian, amphibious vehicle",
    "analog clock",
    "apiary, bee house",
    "apron",
    "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin",
    "assault rifle, assault gun",
    "backpack, back pack, knapsack, packsack, rucksack, haversack",
    "bakery, bakeshop, bakehouse",
    "balance beam, beam",
    "balloon",
    "ballpoint, ballpoint pen, ballpen, Biro",
    "Band Aid",
    "banjo",
    "bannister, banister, balustrade, balusters, handrail",
    "barbell",
    "barber chair",
    "barbershop",
    "barn",
    "barometer",
    "barrel, cask",
    "barrow, garden cart, lawn cart, wheelbarrow",
    "baseball",
    "basketball",
    "bassinet",
    "bassoon",
    "bathing cap, swimming cap",
    "bath towel",
    "bathtub, bathing tub, bath, tub",
    "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon",
    "beacon, lighthouse, beacon light, pharos",
    "beaker",
    "bearskin, busby, shako",
    "beer bottle",
    "beer glass",
    "bell cote, bell cot",
    "bib",
    "bicycle-built-for-two, tandem bicycle, tandem",
    "bikini, two-piece",
    "binder, ring-binder",
    "binoculars, field glasses, opera glasses",
    "birdhouse",
    "boathouse",
    "bobsled, bobsleigh, bob",
    "bolo tie, bolo, bola tie, bola",
    "bonnet, poke bonnet",
    "bookcase",
    "bookshop, bookstore, bookstall",
    "bottlecap",
    "bow",
    "bow tie, bow-tie, bowtie",
    "brass, memorial tablet, plaque",
    "brassiere, bra, bandeau",
    "breakwater, groin, groyne, mole, bulwark, seawall, jetty",
    "breastplate, aegis, egis",
    "broom",
    "bucket, pail",
    "buckle",
    "bulletproof vest",
    "bullet train, bullet",
    "butcher shop, meat market",
    "cab, hack, taxi, taxicab",
    "caldron, cauldron",
    "candle, taper, wax light",
    "cannon",
    "canoe",
    "can opener, tin opener",
    "cardigan",
    "car mirror",
    "carousel, carrousel, merry-go-round, roundabout, whirligig",
    "carpenter\"s kit, tool kit",
    "carton",
    "car wheel",
    "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM",
    "cassette",
    "cassette player",
    "castle",
    "catamaran",
    "CD player",
    "cello, violoncello",
    "cellular telephone, cellular phone, cellphone, cell, mobile phone",
    "chain",
    "chainlink fence",
    "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour",
    "chain saw, chainsaw",
    "chest",
    "chiffonier, commode",
    "chime, bell, gong",
    "china cabinet, china closet",
    "Christmas stocking",
    "church, church building",
    "cinema, movie theater, movie theatre, movie house, picture palace",
    "cleaver, meat cleaver, chopper",
    "cliff dwelling",
    "cloak",
    "clog, geta, patten, sabot",
    "cocktail shaker",
    "coffee mug",
    "coffeepot",
    "coil, spiral, volute, whorl, helix",
    "combination lock",
    "computer keyboard, keypad",
    "confectionery, confectionary, candy store",
    "container ship, containership, container vessel",
    "convertible",
    "corkscrew, bottle screw",
    "cornet, horn, trumpet, trump",
    "cowboy boot",
    "cowboy hat, ten-gallon hat",
    "cradle",
    "crane",
    "crash helmet",
    "crate",
    "crib, cot",
    "Crock Pot",
    "croquet ball",
    "crutch",
    "cuirass",
    "dam, dike, dyke",
    "desk",
    "desktop computer",
    "dial telephone, dial phone",
    "diaper, nappy, napkin",
    "digital clock",
    "digital watch",
    "dining table, board",
    "dishrag, dishcloth",
    "dishwasher, dish washer, dishwashing machine",
    "disk brake, disc brake",
    "dock, dockage, docking facility",
    "dogsled, dog sled, dog sleigh",
    "dome",
    "doormat, welcome mat",
    "drilling platform, offshore rig",
    "drum, membranophone, tympan",
    "drumstick",
    "dumbbell",
    "Dutch oven",
    "electric fan, blower",
    "electric guitar",
    "electric locomotive",
    "entertainment center",
    "envelope",
    "espresso maker",
    "face powder",
    "feather boa, boa",
    "file, file cabinet, filing cabinet",
    "fireboat",
    "fire engine, fire truck",
    "fire screen, fireguard",
    "flagpole, flagstaff",
    "flute, transverse flute",
    "folding chair",
    "football helmet",
    "forklift",
    "fountain",
    "fountain pen",
    "four-poster",
    "freight car",
    "French horn, horn",
    "frying pan, frypan, skillet",
    "fur coat",
    "garbage truck, dustcart",
    "gasmask, respirator, gas helmet",
    "gas pump, gasoline pump, petrol pump, island dispenser",
    "goblet",
    "go-kart",
    "golf ball",
    "golfcart, golf cart",
    "gondola",
    "gong, tam-tam",
    "gown",
    "grand piano, grand",
    "greenhouse, nursery, glasshouse",
    "grille, radiator grille",
    "grocery store, grocery, food market, market",
    "guillotine",
    "hair slide",
    "hair spray",
    "half track",
    "hammer",
    "hamper",
    "hand blower, blow dryer, blow drier, hair dryer, hair drier",
    "hand-held computer, hand-held microcomputer",
    "handkerchief, hankie, hanky, hankey",
    "hard disc, hard disk, fixed disk",
    "harmonica, mouth organ, harp, mouth harp",
    "harp",
    "harvester, reaper",
    "hatchet",
    "holster",
    "home theater, home theatre",
    "honeycomb",
    "hook, claw",
    "hoopskirt, crinoline",
    "horizontal bar, high bar",
    "horse cart, horse-cart",
    "hourglass",
    "iPod",
    "iron, smoothing iron",
    "jack-o\"-lantern",
    "jean, blue jean, denim",
    "jeep, landrover",
    "jersey, T-shirt, tee shirt",
    "jigsaw puzzle",
    "jinrikisha, ricksha, rickshaw",
    "joystick",
    "kimono",
    "knee pad",
    "knot",
    "lab coat, laboratory coat",
    "ladle",
    "lampshade, lamp shade",
    "laptop, laptop computer",
    "lawn mower, mower",
    "lens cap, lens cover",
    "letter opener, paper knife, paperknife",
    "library",
    "lifeboat",
    "lighter, light, igniter, ignitor",
    "limousine, limo",
    "liner, ocean liner",
    "lipstick, lip rouge",
    "Loafer",
    "lotion",
    "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system",
    "loupe, jeweler\"s loupe",
    "lumbermill, sawmill",
    "magnetic compass",
    "mailbag, postbag",
    "mailbox, letter box",
    "maillot",
    "maillot, tank suit",
    "manhole cover",
    "maraca",
    "marimba, xylophone",
    "mask",
    "matchstick",
    "maypole",
    "maze, labyrinth",
    "measuring cup",
    "medicine chest, medicine cabinet",
    "megalith, megalithic structure",
    "microphone, mike",
    "microwave, microwave oven",
    "military uniform",
    "milk can",
    "minibus",
    "miniskirt, mini",
    "minivan",
    "missile",
    "mitten",
    "mixing bowl",
    "mobile home, manufactured home",
    "Model T",
    "modem",
    "monastery",
    "monitor",
    "moped",
    "mortar",
    "mortarboard",
    "mosque",
    "mosquito net",
    "motor scooter, scooter",
    "mountain bike, all-terrain bike, off-roader",
    "mountain tent",
    "mouse, computer mouse",
    "mousetrap",
    "moving van",
    "muzzle",
    "nail",
    "neck brace",
    "necklace",
    "nipple",
    "notebook, notebook computer",
    "obelisk",
    "oboe, hautboy, hautbois",
    "ocarina, sweet potato",
    "odometer, hodometer, mileometer, milometer",
    "oil filter",
    "organ, pipe organ",
    "oscilloscope, scope, cathode-ray oscilloscope, CRO",
    "overskirt",
    "oxcart",
    "oxygen mask",
    "packet",
    "paddle, boat paddle",
    "paddlewheel, paddle wheel",
    "padlock",
    "paintbrush",
    "pajama, pyjama, pj\"s, jammies",
    "palace",
    "panpipe, pandean pipe, syrinx",
    "paper towel",
    "parachute, chute",
    "parallel bars, bars",
    "park bench",
    "parking meter",
    "passenger car, coach, carriage",
    "patio, terrace",
    "pay-phone, pay-station",
    "pedestal, plinth, footstall",
    "pencil box, pencil case",
    "pencil sharpener",
    "perfume, essence",
    "Petri dish",
    "photocopier",
    "pick, plectrum, plectron",
    "pickelhaube",
    "picket fence, paling",
    "pickup, pickup truck",
    "pier",
    "piggy bank, penny bank",
    "pill bottle",
    "pillow",
    "ping-pong ball",
    "pinwheel",
    "pirate, pirate ship",
    "pitcher, ewer",
    "plane, carpenter\"s plane, woodworking plane",
    "planetarium",
    "plastic bag",
    "plate rack",
    "plow, plough",
    "plunger, plumber\"s helper",
    "Polaroid camera, Polaroid Land camera",
    "pole",
    "police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria",
    "poncho",
    "pool table, billiard table, snooker table",
    "pop bottle, soda bottle",
    "pot, flowerpot",
    "potter\"s wheel",
    "power drill",
    "prayer rug, prayer mat",
    "printer",
    "prison, prison house",
    "projectile, missile",
    "projector",
    "puck, hockey puck",
    "punching bag, punch bag, punching ball, punchball",
    "purse",
    "quill, quill pen",
    "quilt, comforter, comfort, puff",
    "racer, race car, racing car",
    "racket, racquet",
    "radiator",
    "radio, wireless",
    "radio telescope, radio reflector",
    "rain barrel",
    "recreational vehicle, RV, R.V.",
    "reel",
    "reflex camera",
    "refrigerator, icebox",
    "remote control, remote",
    "restaurant, eating house, eating place, eatery",
    "revolver, six-gun, six-shooter",
    "rifle",
    "rocking chair, rocker",
    "rotisserie",
    "rubber eraser, rubber, pencil eraser",
    "rugby ball",
    "rule, ruler",
    "running shoe",
    "safe",
    "safety pin",
    "saltshaker, salt shaker",
    "sandal",
    "sarong",
    "sax, saxophone",
    "scabbard",
    "scale, weighing machine",
    "school bus",
    "schooner",
    "scoreboard",
    "screen, CRT screen",
    "screw",
    "screwdriver",
    "seat belt, seatbelt",
    "sewing machine",
    "shield, buckler",
    "shoe shop, shoe-shop, shoe store",
    "shoji",
    "shopping basket",
    "shopping cart",
    "shovel",
    "shower cap",
    "shower curtain",
    "ski",
    "ski mask",
    "sleeping bag",
    "slide rule, slipstick",
    "sliding door",
    "slot, one-armed bandit",
    "snorkel",
    "snowmobile",
    "snowplow, snowplough",
    "soap dispenser",
    "soccer ball",
    "sock",
    "solar dish, solar collector, solar furnace",
    "sombrero",
    "soup bowl",
    "space bar",
    "space heater",
    "space shuttle",
    "spatula",
    "speedboat",
    "spider web, spider\"s web",
    "spindle",
    "sports car, sport car",
    "spotlight, spot",
    "stage",
    "steam locomotive",
    "steel arch bridge",
    "steel drum",
    "stethoscope",
    "stole",
    "stone wall",
    "stopwatch, stop watch",
    "stove",
    "strainer",
    "streetcar, tram, tramcar, trolley, trolley car",
    "stretcher",
    "studio couch, day bed",
    "stupa, tope",
    "submarine, pigboat, sub, U-boat",
    "suit, suit of clothes",
    "sundial",
    "sunglass",
    "sunglasses, dark glasses, shades",
    "sunscreen, sunblock, sun blocker",
    "suspension bridge",
    "swab, swob, mop",
    "sweatshirt",
    "swimming trunks, bathing trunks",
    "swing",
    "switch, electric switch, electrical switch",
    "syringe",
    "table lamp",
    "tank, army tank, armored combat vehicle, armoured combat vehicle",
    "tape player",
    "teapot",
    "teddy, teddy bear",
    "television, television system",
    "tennis ball",
    "thatch, thatched roof",
    "theater curtain, theatre curtain",
    "thimble",
    "thresher, thrasher, threshing machine",
    "throne",
    "tile roof",
    "toaster",
    "tobacco shop, tobacconist shop, tobacconist",
    "toilet seat",
    "torch",
    "totem pole",
    "tow truck, tow car, wrecker",
    "toyshop",
    "tractor",
    "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi",
    "tray",
    "trench coat",
    "tricycle, trike, velocipede",
    "trimaran",
    "tripod",
    "triumphal arch",
    "trolleybus, trolley coach, trackless trolley",
    "trombone",
    "tub, vat",
    "turnstile",
    "typewriter keyboard",
    "umbrella",
    "unicycle, monocycle",
    "upright, upright piano",
    "vacuum, vacuum cleaner",
    "vase",
    "vault",
    "velvet",
    "vending machine",
    "vestment",
    "viaduct",
    "violin, fiddle",
    "volleyball",
    "waffle iron",
    "wall clock",
    "wallet, billfold, notecase, pocketbook",
    "wardrobe, closet, press",
    "warplane, military plane",
    "washbasin, handbasin, washbowl, lavabo, wash-hand basin",
    "washer, automatic washer, washing machine",
    "water bottle",
    "water jug",
    "water tower",
    "whiskey jug",
    "whistle",
    "wig",
    "window screen",
    "window shade",
    "Windsor tie",
    "wine bottle",
    "wing",
    "wok",
    "wooden spoon",
    "wool, woolen, woollen",
    "worm fence, snake fence, snake-rail fence, Virginia fence",
    "wreck",
    "yawl",
    "yurt",
    "web site, website, internet site, site",
    "comic book",
    "crossword puzzle, crossword",
    "street sign",
    "traffic light, traffic signal, stoplight",
    "book jacket, dust cover, dust jacket, dust wrapper",
    "menu",
    "plate",
    "guacamole",
    "consomme",
    "hot pot, hotpot",
    "trifle",
    "ice cream, icecream",
    "ice lolly, lolly, lollipop, popsicle",
    "French loaf",
    "bagel, beigel",
    "pretzel",
    "cheeseburger",
    "hotdog, hot dog, red hot",
    "mashed potato",
    "head cabbage",
    "broccoli",
    "cauliflower",
    "zucchini, courgette",
    "spaghetti squash",
    "acorn squash",
    "butternut squash",
    "cucumber, cuke",
    "artichoke, globe artichoke",
    "bell pepper",
    "cardoon",
    "mushroom",
    "Granny Smith",
    "strawberry",
    "orange",
    "lemon",
    "fig",
    "pineapple, ananas",
    "banana",
    "jackfruit, jak, jack",
    "custard apple",
    "pomegranate",
    "hay",
    "carbonara",
    "chocolate sauce, chocolate syrup",
    "dough",
    "meat loaf, meatloaf",
    "pizza, pizza pie",
    "potpie",
    "burrito",
    "red wine",
    "espresso",
    "cup",
    "eggnog",
    "alp",
    "bubble",
    "cliff, drop, drop-off",
    "coral reef",
    "geyser",
    "lakeside, lakeshore",
    "promontory, headland, head, foreland",
    "sandbar, sand bar",
    "seashore, coast, seacoast, sea-coast",
    "valley, vale",
    "volcano",
    "ballplayer, baseball player",
    "groom, bridegroom",
    "scuba diver",
    "rapeseed",
    "daisy",
    "yellow lady\"s slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum",
    "corn",
    "acorn",
    "hip, rose hip, rosehip",
    "buckeye, horse chestnut, conker",
    "coral fungus",
    "agaric",
    "gyromitra",
    "stinkhorn, carrion fungus",
    "earthstar",
    "hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa",
    "bolete",
    "ear, spike, capitulum",
    "toilet tissue, toilet paper, bathroom tissue"
];


================================================
FILE: openvino-mobilenet-image/rust/src/main.rs
================================================
use image::io::Reader;
use image::DynamicImage;
use std::env;
use wasi_nn;
mod imagenet_classes;

use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding, TensorType};

pub fn main() -> Result<(), Box<dyn std::error::Error>> {
    let args: Vec<String> = env::args().collect();
    let model_xml_path: &str = &args[1];
    let model_bin_path: &str = &args[2];
    let image_name: &str = &args[3];

    print!("Load graph ...");
    let graph = GraphBuilder::new(GraphEncoding::Openvino, ExecutionTarget::CPU)
        .build_from_files([model_xml_path, model_bin_path])?;
    println!("done");

    print!("Init execution context ...");
    let mut context = graph.init_execution_context()?;
    println!("done");

    print!("Set input tensor ...");
    let input_dims = vec![1, 3, 224, 224];
    let tensor_data = image_to_tensor(image_name.to_string(), 224, 224);
    context.set_input(0, TensorType::F32, &input_dims, tensor_data)?;
    println!("done");

    print!("Perform graph inference ...");
    context.compute()?;
    println!("done");

    print!("Retrieve the output ...");
    // Copy output to abuffer.
    let mut output_buffer = vec![0f32; 1001];
    let size_in_bytes = context.get_output(0, &mut output_buffer)?;
    println!("done");
    println!("The size of the output buffer is {} bytes", size_in_bytes);

    let results = sort_results(&output_buffer);
    for i in 0..5 {
        println!(
            "   {}.) [{}]({:.4}){}",
            i + 1,
            results[i].0,
            results[i].1,
            imagenet_classes::IMAGENET_CLASSES[results[i].0]
        );
    }

    Ok(())
}

// Sort the buffer of probabilities. The graph places the match probability for each class at the
// index for that class (e.g. the probability of class 42 is placed at buffer[42]). Here we convert
// to a wrapping InferenceResult and sort the results.
fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
    let mut results: Vec<InferenceResult> = buffer
        .iter()
        .skip(1)
        .enumerate()
        .map(|(c, p)| InferenceResult(c, *p))
        .collect();
    results.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
    results
}

// Take the image located at 'path', open it, resize it to height x width, and then converts
// the pixel precision to FP32. The resulting BGR pixel vector is then returned.
fn image_to_tensor(path: String, height: u32, width: u32) -> Vec<u8> {
    let pixels = Reader::open(path).unwrap().decode().unwrap();
    let dyn_img: DynamicImage = pixels.resize_exact(width, height, image::imageops::Triangle);
    let bgr_img = dyn_img.to_bgr8();
    // Get an array of the pixel values
    let raw_u8_arr: &[u8] = &bgr_img.as_raw()[..];

    // Transpose from [height, width, 3] to [3, height, width]
    let mut transposed: Vec<u8> = vec![0; raw_u8_arr.len()];
    for ch in 0..3 {
        for y in 0..height {
            for x in 0..width {
                let loc = y * height + x;
                transposed[(ch * width * height + loc) as usize] =
                    raw_u8_arr[(loc * 3 + ch) as usize];
            }
        }
    }

    // Create an array to hold the f32 value of those pixels
    let bytes_required = transposed.len() * 4;
    let mut u8_f32_arr: Vec<u8> = vec![0; bytes_required];

    for i in 0..transposed.len() {
        // Read the number as a f32 and break it into u8 bytes
        let u8_f32: f32 = transposed[i] as f32;
        let u8_bytes = u8_f32.to_ne_bytes();

        for j in 0..4 {
            u8_f32_arr[(i * 4) + j] = u8_bytes[j];
        }
    }
    return u8_f32_arr;
}

// A wrapper for class ID and match probabilities.
#[derive(Debug, PartialEq)]
struct InferenceResult(usize, f32);


================================================
FILE: openvino-mobilenet-raw/README.md
================================================
# Mobilenet example with WasmEdge WASI-NN OpenVINO plugin

This example demonstrates how to use WasmEdge WASI-NN OpenVINO plugin to perform an inference task with Mobilenet model.

## Set up the environment

- Install `rustup` and `Rust`

  Go to the [official Rust webpage](https://www.rust-lang.org/tools/install) and follow the instructions to install `rustup` and `Rust`.

  > It is recommended to use Rust 1.68 or above in the stable channel.

  Then, add `wasm32-wasip1` target to the Rustup toolchain:

  ```bash
  rustup target add wasm32-wasip1
  ```

- Clone the example repo

  ```bash
  git clone https://github.com/second-state/WasmEdge-WASINN-examples.git
  ```

- Install OpenVINO

Please refer to [WasmEdge Docs](https://wasmedge.org/docs/contribute/source/plugin/wasi_nn) and [OpenVINO™](https://docs.openvino.ai/2023.0/openvino_docs_install_guides_installing_openvino_apt.html)(2023) for the installation process.

  ```bash
  bash WasmEdge-WASINN-examples/scripts/install_openvino.sh
  ldconfig
  ```

- Install WasmEdge with Wasi-NN OpenVINO plugin

  ```bash
  export CMAKE_BUILD_TYPE=Release
  export VERSION=0.13.2

  curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -s -- -v $VERSION -p /usr/local --plugins wasi_nn-openvino
  ldconfig
  ```

## Build and run `openvino-mobilenet-raw` example

- Download `MobileNet` model file

  ```bash
  cd openvino-mobilenet-raw
  bash download_mobilenet.sh
  ```

- Build and run the example

  ```bash
  cd rust
  cargo build --target wasm32-wasip1 --release
  cd ..

  wasmedge --dir .:. ./rust/target/wasm32-wasip1/release/wasmedge-wasinn-example-mobilenet-image.wasm mobilenet.xml mobilenet.bin input.jpg
  ```

  If the commands above run successfully, you will get the output:

  ```bash
  Read graph XML, size in bytes: 143525
  Read graph weights, size in bytes: 13956476
  Loaded graph into wasi-nn with ID: 0
  Created wasi-nn execution context with ID: 0
  Read input tensor, size in bytes: 602112
  Executed graph inference
     1.) [963](0.7113)pizza, pizza pie
     2.) [762](0.0707)restaurant, eating house, eating place, eatery
     3.) [909](0.0364)wok
     4.) [926](0.0155)hot pot, hotpot
     5.) [567](0.0153)frying pan, frypan, skillet
  ```


================================================
FILE: openvino-mobilenet-raw/download_mobilenet.sh
================================================
FIXTURE=https://github.com/intel/openvino-rs/raw/v0.3.3/crates/openvino/tests/fixtures/mobilenet
TODIR=$1

if [ ! -f $TODIR/mobilenet.bin ]; then
    wget --no-clobber --directory-prefix=$TODIR $FIXTURE/mobilenet.bin
fi
if [ ! -f $TODIR/mobilenet.xml ]; then
    wget --no-clobber --directory-prefix=$TODIR $FIXTURE/mobilenet.xml
fi
if [ ! -f $TODIR/tensor-1x224x224x3-f32.bgr ]; then
    wget --no-clobber --directory-prefix=$TODIR $FIXTURE/tensor-1x224x224x3-f32.bgr
fi


================================================
FILE: openvino-mobilenet-raw/rust/Cargo.toml
================================================
[package]
name = "wasmedge-wasinn-example-mobilenet"
version = "0.1.0"
authors = ["Second-State"]
readme = "README.md"
edition = "2021"
publish = false

[dependencies]
wasi-nn = { version = "0.6.0" }

[workspace]


================================================
FILE: openvino-mobilenet-raw/rust/src/imagenet_classes.rs
================================================
/**
 * @license
 * Copyright 2019 Google LLC. All Rights Reserved.
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 * =============================================================================
 */

/* The code in this file is adapted from https://github.com/tensorflow/tfjs-models/blob/master/mobilenet/src/imagenet_classes.ts */

pub const IMAGENET_CLASSES: [&str; 1000] = [
    "tench, Tinca tinca",
    "goldfish, Carassius auratus",
    "great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias",
    "tiger shark, Galeocerdo cuvieri",
    "hammerhead, hammerhead shark",
    "electric ray, crampfish, numbfish, torpedo",
    "stingray",
    "cock",
    "hen",
    "ostrich, Struthio camelus",
    "brambling, Fringilla montifringilla",
    "goldfinch, Carduelis carduelis",
    "house finch, linnet, Carpodacus mexicanus",
    "junco, snowbird",
    "indigo bunting, indigo finch, indigo bird, Passerina cyanea",
    "robin, American robin, Turdus migratorius",
    "bulbul",
    "jay",
    "magpie",
    "chickadee",
    "water ouzel, dipper",
    "kite",
    "bald eagle, American eagle, Haliaeetus leucocephalus",
    "vulture",
    "great grey owl, great gray owl, Strix nebulosa",
    "European fire salamander, Salamandra salamandra",
    "common newt, Triturus vulgaris",
    "eft",
    "spotted salamander, Ambystoma maculatum",
    "axolotl, mud puppy, Ambystoma mexicanum",
    "bullfrog, Rana catesbeiana",
    "tree frog, tree-frog",
    "tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui",
    "loggerhead, loggerhead turtle, Caretta caretta",
    "leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea",
    "mud turtle",
    "terrapin",
    "box turtle, box tortoise",
    "banded gecko",
    "common iguana, iguana, Iguana iguana",
    "American chameleon, anole, Anolis carolinensis",
    "whiptail, whiptail lizard",
    "agama",
    "frilled lizard, Chlamydosaurus kingi",
    "alligator lizard",
    "Gila monster, Heloderma suspectum",
    "green lizard, Lacerta viridis",
    "African chameleon, Chamaeleo chamaeleon",
    "Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis",
    "African crocodile, Nile crocodile, Crocodylus niloticus",
    "American alligator, Alligator mississipiensis",
    "triceratops",
    "thunder snake, worm snake, Carphophis amoenus",
    "ringneck snake, ring-necked snake, ring snake",
    "hognose snake, puff adder, sand viper",
    "green snake, grass snake",
    "king snake, kingsnake",
    "garter snake, grass snake",
    "water snake",
    "vine snake",
    "night snake, Hypsiglena torquata",
    "boa constrictor, Constrictor constrictor",
    "rock python, rock snake, Python sebae",
    "Indian cobra, Naja naja",
    "green mamba",
    "sea snake",
    "horned viper, cerastes, sand viper, horned asp, Cerastes cornutus",
    "diamondback, diamondback rattlesnake, Crotalus adamanteus",
    "sidewinder, horned rattlesnake, Crotalus cerastes",
    "trilobite",
    "harvestman, daddy longlegs, Phalangium opilio",
    "scorpion",
    "black and gold garden spider, Argiope aurantia",
    "barn spider, Araneus cavaticus",
    "garden spider, Aranea diademata",
    "black widow, Latrodectus mactans",
    "tarantula",
    "wolf spider, hunting spider",
    "tick",
    "centipede",
    "black grouse",
    "ptarmigan",
    "ruffed grouse, partridge, Bonasa umbellus",
    "prairie chicken, prairie grouse, prairie fowl",
    "peacock",
    "quail",
    "partridge",
    "African grey, African gray, Psittacus erithacus",
    "macaw",
    "sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita",
    "lorikeet",
    "coucal",
    "bee eater",
    "hornbill",
    "hummingbird",
    "jacamar",
    "toucan",
    "drake",
    "red-breasted merganser, Mergus serrator",
    "goose",
    "black swan, Cygnus atratus",
    "tusker",
    "echidna, spiny anteater, anteater",
    "platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus",
    "wallaby, brush kangaroo",
    "koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus",
    "wombat",
    "jelly fish",
    "sea anemone, anemone",
    "brain coral",
    "flatworm, platyhelminth",
    "nematode, nematode worm, roundworm",
    "conch",
    "snail",
    "slug",
    "sea slug, nudibranch",
    "chiton, coat-of-mail shell, sea cradle, polyplacophore",
    "chambered nautilus, pearly nautilus, nautilus",
    "Dungeness crab, Cancer magister",
    "rock crab, Cancer irroratus",
    "fiddler crab",
    "king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica",
    "American lobster, Northern lobster, Maine lobster, Homarus americanus",
    "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish",
    "crayfish, crawfish, crawdad, crawdaddy",
    "hermit crab",
    "isopod",
    "white stork, Ciconia ciconia",
    "black stork, Ciconia nigra",
    "spoonbill",
    "flamingo",
    "little blue heron, Egretta caerulea",
    "American egret, great white heron, Egretta albus",
    "bittern",
    "crane",
    "limpkin, Aramus pictus",
    "European gallinule, Porphyrio porphyrio",
    "American coot, marsh hen, mud hen, water hen, Fulica americana",
    "bustard",
    "ruddy turnstone, Arenaria interpres",
    "red-backed sandpiper, dunlin, Erolia alpina",
    "redshank, Tringa totanus",
    "dowitcher",
    "oystercatcher, oyster catcher",
    "pelican",
    "king penguin, Aptenodytes patagonica",
    "albatross, mollymawk",
    "grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus",
    "killer whale, killer, orca, grampus, sea wolf, Orcinus orca",
    "dugong, Dugong dugon",
    "sea lion",
    "Chihuahua",
    "Japanese spaniel",
    "Maltese dog, Maltese terrier, Maltese",
    "Pekinese, Pekingese, Peke",
    "Shih-Tzu",
    "Blenheim spaniel",
    "papillon",
    "toy terrier",
    "Rhodesian ridgeback",
    "Afghan hound, Afghan",
    "basset, basset hound",
    "beagle",
    "bloodhound, sleuthhound",
    "bluetick",
    "black-and-tan coonhound",
    "Walker hound, Walker foxhound",
    "English foxhound",
    "redbone",
    "borzoi, Russian wolfhound",
    "Irish wolfhound",
    "Italian greyhound",
    "whippet",
    "Ibizan hound, Ibizan Podenco",
    "Norwegian elkhound, elkhound",
    "otterhound, otter hound",
    "Saluki, gazelle hound",
    "Scottish deerhound, deerhound",
    "Weimaraner",
    "Staffordshire bullterrier, Staffordshire bull terrier",
    "American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier",
    "Bedlington terrier",
    "Border terrier",
    "Kerry blue terrier",
    "Irish terrier",
    "Norfolk terrier",
    "Norwich terrier",
    "Yorkshire terrier",
    "wire-haired fox terrier",
    "Lakeland terrier",
    "Sealyham terrier, Sealyham",
    "Airedale, Airedale terrier",
    "cairn, cairn terrier",
    "Australian terrier",
    "Dandie Dinmont, Dandie Dinmont terrier",
    "Boston bull, Boston terrier",
    "miniature schnauzer",
    "giant schnauzer",
    "standard schnauzer",
    "Scotch terrier, Scottish terrier, Scottie",
    "Tibetan terrier, chrysanthemum dog",
    "silky terrier, Sydney silky",
    "soft-coated wheaten terrier",
    "West Highland white terrier",
    "Lhasa, Lhasa apso",
    "flat-coated retriever",
    "curly-coated retriever",
    "golden retriever",
    "Labrador retriever",
    "Chesapeake Bay retriever",
    "German short-haired pointer",
    "vizsla, Hungarian pointer",
    "English setter",
    "Irish setter, red setter",
    "Gordon setter",
    "Brittany spaniel",
    "clumber, clumber spaniel",
    "English springer, English springer spaniel",
    "Welsh springer spaniel",
    "cocker spaniel, English cocker spaniel, cocker",
    "Sussex spaniel",
    "Irish water spaniel",
    "kuvasz",
    "schipperke",
    "groenendael",
    "malinois",
    "briard",
    "kelpie",
    "komondor",
    "Old English sheepdog, bobtail",
    "Shetland sheepdog, Shetland sheep dog, Shetland",
    "collie",
    "Border collie",
    "Bouvier des Flandres, Bouviers des Flandres",
    "Rottweiler",
    "German shepherd, German shepherd dog, German police dog, alsatian",
    "Doberman, Doberman pinscher",
    "miniature pinscher",
    "Greater Swiss Mountain dog",
    "Bernese mountain dog",
    "Appenzeller",
    "EntleBucher",
    "boxer",
    "bull mastiff",
    "Tibetan mastiff",
    "French bulldog",
    "Great Dane",
    "Saint Bernard, St Bernard",
    "Eskimo dog, husky",
    "malamute, malemute, Alaskan malamute",
    "Siberian husky",
    "dalmatian, coach dog, carriage dog",
    "affenpinscher, monkey pinscher, monkey dog",
    "basenji",
    "pug, pug-dog",
    "Leonberg",
    "Newfoundland, Newfoundland dog",
    "Great Pyrenees",
    "Samoyed, Samoyede",
    "Pomeranian",
    "chow, chow chow",
    "keeshond",
    "Brabancon griffon",
    "Pembroke, Pembroke Welsh corgi",
    "Cardigan, Cardigan Welsh corgi",
    "toy poodle",
    "miniature poodle",
    "standard poodle",
    "Mexican hairless",
    "timber wolf, grey wolf, gray wolf, Canis lupus",
    "white wolf, Arctic wolf, Canis lupus tundrarum",
    "red wolf, maned wolf, Canis rufus, Canis niger",
    "coyote, prairie wolf, brush wolf, Canis latrans",
    "dingo, warrigal, warragal, Canis dingo",
    "dhole, Cuon alpinus",
    "African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus",
    "hyena, hyaena",
    "red fox, Vulpes vulpes",
    "kit fox, Vulpes macrotis",
    "Arctic fox, white fox, Alopex lagopus",
    "grey fox, gray fox, Urocyon cinereoargenteus",
    "tabby, tabby cat",
    "tiger cat",
    "Persian cat",
    "Siamese cat, Siamese",
    "Egyptian cat",
    "cougar, puma, catamount, mountain lion, painter, panther, Felis concolor",
    "lynx, catamount",
    "leopard, Panthera pardus",
    "snow leopard, ounce, Panthera uncia",
    "jaguar, panther, Panthera onca, Felis onca",
    "lion, king of beasts, Panthera leo",
    "tiger, Panthera tigris",
    "cheetah, chetah, Acinonyx jubatus",
    "brown bear, bruin, Ursus arctos",
    "American black bear, black bear, Ursus americanus, Euarctos americanus",
    "ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus",
    "sloth bear, Melursus ursinus, Ursus ursinus",
    "mongoose",
    "meerkat, mierkat",
    "tiger beetle",
    "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle",
    "ground beetle, carabid beetle",
    "long-horned beetle, longicorn, longicorn beetle",
    "leaf beetle, chrysomelid",
    "dung beetle",
    "rhinoceros beetle",
    "weevil",
    "fly",
    "bee",
    "ant, emmet, pismire",
    "grasshopper, hopper",
    "cricket",
    "walking stick, walkingstick, stick insect",
    "cockroach, roach",
    "mantis, mantid",
    "cicada, cicala",
    "leafhopper",
    "lacewing, lacewing fly",
    "dragonfly, darning needle, devil\"s darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk",
    "damselfly",
    "admiral",
    "ringlet, ringlet butterfly",
    "monarch, monarch butterfly, milkweed butterfly, Danaus plexippus",
    "cabbage butterfly",
    "sulphur butterfly, sulfur butterfly",
    "lycaenid, lycaenid butterfly",
    "starfish, sea star",
    "sea urchin",
    "sea cucumber, holothurian",
    "wood rabbit, cottontail, cottontail rabbit",
    "hare",
    "Angora, Angora rabbit",
    "hamster",
    "porcupine, hedgehog",
    "fox squirrel, eastern fox squirrel, Sciurus niger",
    "marmot",
    "beaver",
    "guinea pig, Cavia cobaya",
    "sorrel",
    "zebra",
    "hog, pig, grunter, squealer, Sus scrofa",
    "wild boar, boar, Sus scrofa",
    "warthog",
    "hippopotamus, hippo, river horse, Hippopotamus amphibius",
    "ox",
    "water buffalo, water ox, Asiatic buffalo, Bubalus bubalis",
    "bison",
    "ram, tup",
    "bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis",
    "ibex, Capra ibex",
    "hartebeest",
    "impala, Aepyceros melampus",
    "gazelle",
    "Arabian camel, dromedary, Camelus dromedarius",
    "llama",
    "weasel",
    "mink",
    "polecat, fitch, foulmart, foumart, Mustela putorius",
    "black-footed ferret, ferret, Mustela nigripes",
    "otter",
    "skunk, polecat, wood pussy",
    "badger",
    "armadillo",
    "three-toed sloth, ai, Bradypus tridactylus",
    "orangutan, orang, orangutang, Pongo pygmaeus",
    "gorilla, Gorilla gorilla",
    "chimpanzee, chimp, Pan troglodytes",
    "gibbon, Hylobates lar",
    "siamang, Hylobates syndactylus, Symphalangus syndactylus",
    "guenon, guenon monkey",
    "patas, hussar monkey, Erythrocebus patas",
    "baboon",
    "macaque",
    "langur",
    "colobus, colobus monkey",
    "proboscis monkey, Nasalis larvatus",
    "marmoset",
    "capuchin, ringtail, Cebus capucinus",
    "howler monkey, howler",
    "titi, titi monkey",
    "spider monkey, Ateles geoffroyi",
    "squirrel monkey, Saimiri sciureus",
    "Madagascar cat, ring-tailed lemur, Lemur catta",
    "indri, indris, Indri indri, Indri brevicaudatus",
    "Indian elephant, Elephas maximus",
    "African elephant, Loxodonta africana",
    "lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens",
    "giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca",
    "barracouta, snoek",
    "eel",
    "coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch",
    "rock beauty, Holocanthus tricolor",
    "anemone fish",
    "sturgeon",
    "gar, garfish, garpike, billfish, Lepisosteus osseus",
    "lionfish",
    "puffer, pufferfish, blowfish, globefish",
    "abacus",
    "abaya",
    "academic gown, academic robe, judge\"s robe",
    "accordion, piano accordion, squeeze box",
    "acoustic guitar",
    "aircraft carrier, carrier, flattop, attack aircraft carrier",
    "airliner",
    "airship, dirigible",
    "altar",
    "ambulance",
    "amphibian, amphibious vehicle",
    "analog clock",
    "apiary, bee house",
    "apron",
    "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin",
    "assault rifle, assault gun",
    "backpack, back pack, knapsack, packsack, rucksack, haversack",
    "bakery, bakeshop, bakehouse",
    "balance beam, beam",
    "balloon",
    "ballpoint, ballpoint pen, ballpen, Biro",
    "Band Aid",
    "banjo",
    "bannister, banister, balustrade, balusters, handrail",
    "barbell",
    "barber chair",
    "barbershop",
    "barn",
    "barometer",
    "barrel, cask",
    "barrow, garden cart, lawn cart, wheelbarrow",
    "baseball",
    "basketball",
    "bassinet",
    "bassoon",
    "bathing cap, swimming cap",
    "bath towel",
    "bathtub, bathing tub, bath, tub",
    "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon",
    "beacon, lighthouse, beacon light, pharos",
    "beaker",
    "bearskin, busby, shako",
    "beer bottle",
    "beer glass",
    "bell cote, bell cot",
    "bib",
    "bicycle-built-for-two, tandem bicycle, tandem",
    "bikini, two-piece",
    "binder, ring-binder",
    "binoculars, field glasses, opera glasses",
    "birdhouse",
    "boathouse",
    "bobsled, bobsleigh, bob",
    "bolo tie, bolo, bola tie, bola",
    "bonnet, poke bonnet",
    "bookcase",
    "bookshop, bookstore, bookstall",
    "bottlecap",
    "bow",
    "bow tie, bow-tie, bowtie",
    "brass, memorial tablet, plaque",
    "brassiere, bra, bandeau",
    "breakwater, groin, groyne, mole, bulwark, seawall, jetty",
    "breastplate, aegis, egis",
    "broom",
    "bucket, pail",
    "buckle",
    "bulletproof vest",
    "bullet train, bullet",
    "butcher shop, meat market",
    "cab, hack, taxi, taxicab",
    "caldron, cauldron",
    "candle, taper, wax light",
    "cannon",
    "canoe",
    "can opener, tin opener",
    "cardigan",
    "car mirror",
    "carousel, carrousel, merry-go-round, roundabout, whirligig",
    "carpenter\"s kit, tool kit",
    "carton",
    "car wheel",
    "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM",
    "cassette",
    "cassette player",
    "castle",
    "catamaran",
    "CD player",
    "cello, violoncello",
    "cellular telephone, cellular phone, cellphone, cell, mobile phone",
    "chain",
    "chainlink fence",
    "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour",
    "chain saw, chainsaw",
    "chest",
    "chiffonier, commode",
    "chime, bell, gong",
    "china cabinet, china closet",
    "Christmas stocking",
    "church, church building",
    "cinema, movie theater, movie theatre, movie house, picture palace",
    "cleaver, meat cleaver, chopper",
    "cliff dwelling",
    "cloak",
    "clog, geta, patten, sabot",
    "cocktail shaker",
    "coffee mug",
    "coffeepot",
    "coil, spiral, volute, whorl, helix",
    "combination lock",
    "computer keyboard, keypad",
    "confectionery, confectionary, candy store",
    "container ship, containership, container vessel",
    "convertible",
    "corkscrew, bottle screw",
    "cornet, horn, trumpet, trump",
    "cowboy boot",
    "cowboy hat, ten-gallon hat",
    "cradle",
    "crane",
    "crash helmet",
    "crate",
    "crib, cot",
    "Crock Pot",
    "croquet ball",
    "crutch",
    "cuirass",
    "dam, dike, dyke",
    "desk",
    "desktop computer",
    "dial telephone, dial phone",
    "diaper, nappy, napkin",
    "digital clock",
    "digital watch",
    "dining table, board",
    "dishrag, dishcloth",
    "dishwasher, dish washer, dishwashing machine",
    "disk brake, disc brake",
    "dock, dockage, docking facility",
    "dogsled, dog sled, dog sleigh",
    "dome",
    "doormat, welcome mat",
    "drilling platform, offshore rig",
    "drum, membranophone, tympan",
    "drumstick",
    "dumbbell",
    "Dutch oven",
    "electric fan, blower",
    "electric guitar",
    "electric locomotive",
    "entertainment center",
    "envelope",
    "espresso maker",
    "face powder",
    "feather boa, boa",
    "file, file cabinet, filing cabinet",
    "fireboat",
    "fire engine, fire truck",
    "fire screen, fireguard",
    "flagpole, flagstaff",
    "flute, transverse flute",
    "folding chair",
    "football helmet",
    "forklift",
    "fountain",
    "fountain pen",
    "four-poster",
    "freight car",
    "French horn, horn",
    "frying pan, frypan, skillet",
    "fur coat",
    "garbage truck, dustcart",
    "gasmask, respirator, gas helmet",
    "gas pump, gasoline pump, petrol pump, island dispenser",
    "goblet",
    "go-kart",
    "golf ball",
    "golfcart, golf cart",
    "gondola",
    "gong, tam-tam",
    "gown",
    "grand piano, grand",
    "greenhouse, nursery, glasshouse",
    "grille, radiator grille",
    "grocery store, grocery, food market, market",
    "guillotine",
    "hair slide",
    "hair spray",
    "half track",
    "hammer",
    "hamper",
    "hand blower, blow dryer, blow drier, hair dryer, hair drier",
    "hand-held computer, hand-held microcomputer",
    "handkerchief, hankie, hanky, hankey",
    "hard disc, hard disk, fixed disk",
    "harmonica, mouth organ, harp, mouth harp",
    "harp",
    "harvester, reaper",
    "hatchet",
    "holster",
    "home theater, home theatre",
    "honeycomb",
    "hook, claw",
    "hoopskirt, crinoline",
    "horizontal bar, high bar",
    "horse cart, horse-cart",
    "hourglass",
    "iPod",
    "iron, smoothing iron",
    "jack-o\"-lantern",
    "jean, blue jean, denim",
    "jeep, landrover",
    "jersey, T-shirt, tee shirt",
    "jigsaw puzzle",
    "jinrikisha, ricksha, rickshaw",
    "joystick",
    "kimono",
    "knee pad",
    "knot",
    "lab coat, laboratory coat",
    "ladle",
    "lampshade, lamp shade",
    "laptop, laptop computer",
    "lawn mower, mower",
    "lens cap, lens cover",
    "letter opener, paper knife, paperknife",
    "library",
    "lifeboat",
    "lighter, light, igniter, ignitor",
    "limousine, limo",
    "liner, ocean liner",
    "lipstick, lip rouge",
    "Loafer",
    "lotion",
    "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system",
    "loupe, jeweler\"s loupe",
    "lumbermill, sawmill",
    "magnetic compass",
    "mailbag, postbag",
    "mailbox, letter box",
    "maillot",
    "maillot, tank suit",
    "manhole cover",
    "maraca",
    "marimba, xylophone",
    "mask",
    "matchstick",
    "maypole",
    "maze, labyrinth",
    "measuring cup",
    "medicine chest, medicine cabinet",
    "megalith, megalithic structure",
    "microphone, mike",
    "microwave, microwave oven",
    "military uniform",
    "milk can",
    "minibus",
    "miniskirt, mini",
    "minivan",
    "missile",
    "mitten",
    "mixing bowl",
    "mobile home, manufactured home",
    "Model T",
    "modem",
    "monastery",
    "monitor",
    "moped",
    "mortar",
    "mortarboard",
    "mosque",
    "mosquito net",
    "motor scooter, scooter",
    "mountain bike, all-terrain bike, off-roader",
    "mountain tent",
    "mouse, computer mouse",
    "mousetrap",
    "moving van",
    "muzzle",
    "nail",
    "neck brace",
    "necklace",
    "nipple",
    "notebook, notebook computer",
    "obelisk",
    "oboe, hautboy, hautbois",
    "ocarina, sweet potato",
    "odometer, hodometer, mileometer, milometer",
    "oil filter",
    "organ, pipe organ",
    "oscilloscope, scope, cathode-ray oscilloscope, CRO",
    "overskirt",
    "oxcart",
    "oxygen mask",
    "packet",
    "paddle, boat paddle",
    "paddlewheel, paddle wheel",
    "padlock",
    "paintbrush",
    "pajama, pyjama, pj\"s, jammies",
    "palace",
    "panpipe, pandean pipe, syrinx",
    "paper towel",
    "parachute, chute",
    "parallel bars, bars",
    "park bench",
    "parking meter",
    "passenger car, coach, carriage",
    "patio, terrace",
    "pay-phone, pay-station",
    "pedestal, plinth, footstall",
    "pencil box, pencil case",
    "pencil sharpener",
    "perfume, essence",
    "Petri dish",
    "photocopier",
    "pick, plectrum, plectron",
    "pickelhaube",
    "picket fence, paling",
    "pickup, pickup truck",
    "pier",
    "piggy bank, penny bank",
    "pill bottle",
    "pillow",
    "ping-pong ball",
    "pinwheel",
    "pirate, pirate ship",
    "pitcher, ewer",
    "plane, carpenter\"s plane, woodworking plane",
    "planetarium",
    "plastic bag",
    "plate rack",
    "plow, plough",
    "plunger, plumber\"s helper",
    "Polaroid camera, Polaroid Land camera",
    "pole",
    "police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria",
    "poncho",
    "pool table, billiard table, snooker table",
    "pop bottle, soda bottle",
    "pot, flowerpot",
    "potter\"s wheel",
    "power drill",
    "prayer rug, prayer mat",
    "printer",
    "prison, prison house",
    "projectile, missile",
    "projector",
    "puck, hockey puck",
    "punching bag, punch bag, punching ball, punchball",
    "purse",
    "quill, quill pen",
    "quilt, comforter, comfort, puff",
    "racer, race car, racing car",
    "racket, racquet",
    "radiator",
    "radio, wireless",
    "radio telescope, radio reflector",
    "rain barrel",
    "recreational vehicle, RV, R.V.",
    "reel",
    "reflex camera",
    "refrigerator, icebox",
    "remote control, remote",
    "restaurant, eating house, eating place, eatery",
    "revolver, six-gun, six-shooter",
    "rifle",
    "rocking chair, rocker",
    "rotisserie",
    "rubber eraser, rubber, pencil eraser",
    "rugby ball",
    "rule, ruler",
    "running shoe",
    "safe",
    "safety pin",
    "saltshaker, salt shaker",
    "sandal",
    "sarong",
    "sax, saxophone",
    "scabbard",
    "scale, weighing machine",
    "school bus",
    "schooner",
    "scoreboard",
    "screen, CRT screen",
    "screw",
    "screwdriver",
    "seat belt, seatbelt",
    "sewing machine",
    "shield, buckler",
    "shoe shop, shoe-shop, shoe store",
    "shoji",
    "shopping basket",
    "shopping cart",
    "shovel",
    "shower cap",
    "shower curtain",
    "ski",
    "ski mask",
    "sleeping bag",
    "slide rule, slipstick",
    "sliding door",
    "slot, one-armed bandit",
    "snorkel",
    "snowmobile",
    "snowplow, snowplough",
    "soap dispenser",
    "soccer ball",
    "sock",
    "solar dish, solar collector, solar furnace",
    "sombrero",
    "soup bowl",
    "space bar",
    "space heater",
    "space shuttle",
    "spatula",
    "speedboat",
    "spider web, spider\"s web",
    "spindle",
    "sports car, sport car",
    "spotlight, spot",
    "stage",
    "steam locomotive",
    "steel arch bridge",
    "steel drum",
    "stethoscope",
    "stole",
    "stone wall",
    "stopwatch, stop watch",
    "stove",
    "strainer",
    "streetcar, tram, tramcar, trolley, trolley car",
    "stretcher",
    "studio couch, day bed",
    "stupa, tope",
    "submarine, pigboat, sub, U-boat",
    "suit, suit of clothes",
    "sundial",
    "sunglass",
    "sunglasses, dark glasses, shades",
    "sunscreen, sunblock, sun blocker",
    "suspension bridge",
    "swab, swob, mop",
    "sweatshirt",
    "swimming trunks, bathing trunks",
    "swing",
    "switch, electric switch, electrical switch",
    "syringe",
    "table lamp",
    "tank, army tank, armored combat vehicle, armoured combat vehicle",
    "tape player",
    "teapot",
    "teddy, teddy bear",
    "television, television system",
    "tennis ball",
    "thatch, thatched roof",
    "theater curtain, theatre curtain",
    "thimble",
    "thresher, thrasher, threshing machine",
    "throne",
    "tile roof",
    "toaster",
    "tobacco shop, tobacconist shop, tobacconist",
    "toilet seat",
    "torch",
    "totem pole",
    "tow truck, tow car, wrecker",
    "toyshop",
    "tractor",
    "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi",
    "tray",
    "trench coat",
    "tricycle, trike, velocipede",
    "trimaran",
    "tripod",
    "triumphal arch",
    "trolleybus, trolley coach, trackless trolley",
    "trombone",
    "tub, vat",
    "turnstile",
    "typewriter keyboard",
    "umbrella",
    "unicycle, monocycle",
    "upright, upright piano",
    "vacuum, vacuum cleaner",
    "vase",
    "vault",
    "velvet",
    "vending machine",
    "vestment",
    "viaduct",
    "violin, fiddle",
    "volleyball",
    "waffle iron",
    "wall clock",
    "wallet, billfold, notecase, pocketbook",
    "wardrobe, closet, press",
    "warplane, military plane",
    "washbasin, handbasin, washbowl, lavabo, wash-hand basin",
    "washer, automatic washer, washing machine",
    "water bottle",
    "water jug",
    "water tower",
    "whiskey jug",
    "whistle",
    "wig",
    "window screen",
    "window shade",
    "Windsor tie",
    "wine bottle",
    "wing",
    "wok",
    "wooden spoon",
    "wool, woolen, woollen",
    "worm fence, snake fence, snake-rail fence, Virginia fence",
    "wreck",
    "yawl",
    "yurt",
    "web site, website, internet site, site",
    "comic book",
    "crossword puzzle, crossword",
    "street sign",
    "traffic light, traffic signal, stoplight",
    "book jacket, dust cover, dust jacket, dust wrapper",
    "menu",
    "plate",
    "guacamole",
    "consomme",
    "hot pot, hotpot",
    "trifle",
    "ice cream, icecream",
    "ice lolly, lolly, lollipop, popsicle",
    "French loaf",
    "bagel, beigel",
    "pretzel",
    "cheeseburger",
    "hotdog, hot dog, red hot",
    "mashed potato",
    "head cabbage",
    "broccoli",
    "cauliflower",
    "zucchini, courgette",
    "spaghetti squash",
    "acorn squash",
    "butternut squash",
    "cucumber, cuke",
    "artichoke, globe artichoke",
    "bell pepper",
    "cardoon",
    "mushroom",
    "Granny Smith",
    "strawberry",
    "orange",
    "lemon",
    "fig",
    "pineapple, ananas",
    "banana",
    "jackfruit, jak, jack",
    "custard apple",
    "pomegranate",
    "hay",
    "carbonara",
    "chocolate sauce, chocolate syrup",
    "dough",
    "meat loaf, meatloaf",
    "pizza, pizza pie",
    "potpie",
    "burrito",
    "red wine",
    "espresso",
    "cup",
    "eggnog",
    "alp",
    "bubble",
    "cliff, drop, drop-off",
    "coral reef",
    "geyser",
    "lakeside, lakeshore",
    "promontory, headland, head, foreland",
    "sandbar, sand bar",
    "seashore, coast, seacoast, sea-coast",
    "valley, vale",
    "volcano",
    "ballplayer, baseball player",
    "groom, bridegroom",
    "scuba diver",
    "rapeseed",
    "daisy",
    "yellow lady\"s slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum",
    "corn",
    "acorn",
    "hip, rose hip, rosehip",
    "buckeye, horse chestnut, conker",
    "coral fungus",
    "agaric",
    "gyromitra",
    "stinkhorn, carrion fungus",
    "earthstar",
    "hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa",
    "bolete",
    "ear, spike, capitulum",
    "toilet tissue, toilet paper, bathroom tissue"
];


================================================
FILE: openvino-mobilenet-raw/rust/src/main.rs
================================================
use std::env;
use std::fs;
use wasi_nn;
mod imagenet_classes;

use wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding, TensorType};

pub fn main() -> Result<(), Box<dyn std::error::Error>> {
    let args: Vec<String> = env::args().collect();
    let model_xml_path: &str = &args[1];
    let model_bin_path: &str = &args[2];
    let tensor_name: &str = &args[3];

    print!("Load graph ...");
    let graph = GraphBuilder::new(GraphEncoding::Openvino, ExecutionTarget::CPU)
        .build_from_files([model_xml_path, model_bin_path])?;
    println!("done");

    print!("Init execution context ...");
    let mut context = graph.init_execution_context()?;
    println!("done");

    // Load a tensor that precisely matches the graph input tensor (see
    // `fixture/frozen_inference_graph.xml`).
    print!("Set input tensor ...");
    let input_dims = vec![1, 3, 224, 224];
    let tensor_data = fs::read(tensor_name).unwrap();
    context.set_input(0, TensorType::F32, &input_dims, tensor_data)?;
    println!("done");

    print!("Perform graph inference ...");
    context.compute()?;
    println!("done");

    print!("Retrieve the output ...");
    // Copy output to abuffer.
    let mut output_buffer = vec![0f32; 1001];
    let size_in_bytes = context.get_output(0, &mut output_buffer)?;
    println!("done");
    println!("The size of the output buffer is {} bytes", size_in_bytes);

    let results = sort_results(&output_buffer);
    for i in 0..5 {
        println!(
            "   {}.) [{}]({:.4}){}",
            i + 1,
            results[i].0,
            results[i].1,
            imagenet_classes::IMAGENET_CLASSES[results[i].0]
        );
    }

    Ok(())
}

// Sort the buffer of probabilities. The graph places the match probability for each class at the
// index for that class (e.g. the probability of class 42 is placed at buffer[42]). Here we convert
// to a wrapping InferenceResult and sort the results.
fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
    let mut results: Vec<InferenceResult> = buffer
        .iter()
        .skip(1)
        .enumerate()
        .map(|(c, p)| InferenceResult(c, *p))
        .collect();
    results.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
    results
}

// A wrapper for class ID and match probabilities.
#[derive(Debug, PartialEq)]
struct InferenceResult(usize, f32);


================================================
FILE: openvino-road-segmentation-adas/README.md
================================================

# OpenVINO Road Segmentation ADAS Example on WasmEdge Runtime

## Overview

In this example, we'll use `WasmEdge wasi-nn interfaces` to demonstrates a popular task used in the CV-based `Advanced Driver Assist Systems (ADAS)`: road segmentation.

|                                                                                                                             |                                                                                                                             |
| --------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- |
| <img src="https://user-images.githubusercontent.com/36741649/127848003-9e45c8da-2e43-48ac-803f-9f51a8e9ea89.jpg" width=300> | <img src="https://user-images.githubusercontent.com/36741649/127847882-6305d483-f2ce-4c2f-a3b5-8573d1522d15.png" width=300> |

The model files and the images used for this demonstration are from the [Intel openvino_notebooks repo](https://github.com/openvinotoolkit/openvino_notebooks/blob/main/notebooks/003-hello-segmentation/README.md) on Github.

## Set up the environment

- Install `rustup` and `Rust`

  Go to the [official Rust webpage](https://www.rust-lang.org/tools/install) and follow the instructions to install `rustup` and `Rust`.

  > It is recommended to use Rust 1.68 or above in the stable channel.

  Then, add `wasm32-wasip1` target to the Rustup toolchain:

  ```bash
  rustup target add wasm32-wasip1
  ```

- Clone the example repo

  ```bash
  git clone https://github.com/second-state/WasmEdge-WASINN-examples.git
  ```

- Install OpenVINO

Please refer to [WasmEdge Docs](https://wasmedge.org/docs/contribute/source/plugin/wasi_nn) and [OpenVINO™](https://docs.openvino.ai/2023.0/openvino_docs_install_guides_installing_openvino_apt.html)(2023) for the installation process.

  ```bash
  bash WasmEdge-WASINN-examples/scripts/install_openvino.sh
  ldconfig
  ```

- Install WasmEdge with Wasi-NN OpenVINO plugin

  ```bash
  export CMAKE_BUILD_TYPE=Release
  export VERSION=0.13.2

  curl -sSf https://raw.githubusercontent.com/WasmEdge/WasmEdge/master/utils/install.sh | bash -s -- -v $VERSION -p /usr/local --plugins wasi_nn-openvino
  ldconfig
  ```

## Build and run `openvino-road-segmentation-adas` example

- Download `MobileNet` model file

  ```bash
  cd openvino-road-segmentation-adas
  ```

- Build and run the example

  ```bash
  cd openvino-road-seg-adas
  cargo build --target wasm32-wasip1 --release
  cd ..

  wasmedge --dir .:. ./openvino-road-seg-adas/target/wasm32-wasip1/release/openvino-road-seg-adas.wasm \
  ./model/road-segmentation-adas-0001.xml \
  ./model/road-segmentation-adas-0001.bin \
  ./image/empty_road_mapillary.jpg
  ```

  If the commands run successfully, an output tensor will be generated and saved to `wasinn-openvino-inference-output-1x4x512x896xf32.tensor`.

## Visualize the inference result

To visualize the input image and the inference output tensor, you can use the `visualize_inference_result.ipynb` file. You may have to modify some file paths in the file, if the files used in the notebook are dumped in different directories. The following picture shows the inference result of road segmentation task.

![road segmentation result](image/segmentation_result.png)


================================================
FILE: openvino-road-segmentation-adas/model/road-segmentation-adas-0001.xml
================================================
<?xml version="1.0" ?>
<net name="road-segmentation-adas-0001" version="10">
	<layers>
		<layer id="0" name="data" type="Parameter" version="opset1">
			<data element_type="f32" shape="1, 3, 512, 896"/>
			<output>
				<port id="0" names="data" precision="FP32">
					<dim>1</dim>
					<dim>3</dim>
					<dim>512</dim>
					<dim>896</dim>
				</port>
			</output>
		</layer>
		<layer id="1" name="373" type="Const" version="opset1">
			<data element_type="f32" offset="0" shape="16, 3, 3, 3" size="1728"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>3</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="2" name="L0000_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" strides="2, 2"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>3</dim>
					<dim>512</dim>
					<dim>896</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>3</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="3" name="L0000_Conv2d_BN/Dims5753" type="Const" version="opset1">
			<data element_type="f32" offset="1728" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="4" name="L0000_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0000_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="5" name="L0001_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0001_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="6" name="555" type="Const" version="opset1">
			<data element_type="f32" offset="1792" shape="8, 16, 1, 1" size="512"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="7" name="L0002_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="8" name="L0002_Conv2d_BN/Dims6479" type="Const" version="opset1">
			<data element_type="f32" offset="2304" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="9" name="L0002_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0002_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="10" name="L0003_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0003_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="11" name="335" type="Const" version="opset1">
			<data element_type="f32" offset="2336" shape="8, 1, 1, 3, 3" size="288"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="12" name="L0004_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="13" name="L0004_Conv2d_BN/Dims6323" type="Const" version="opset1">
			<data element_type="f32" offset="2624" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="14" name="L0004_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0004_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="15" name="L0005_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0005_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="16" name="397" type="Const" version="opset1">
			<data element_type="f32" offset="2656" shape="16, 8, 1, 1" size="512"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="17" name="L0006_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="18" name="L0006_Conv2d_BN/Dims5963" type="Const" version="opset1">
			<data element_type="f32" offset="3168" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="19" name="L0006_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0006_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="20" name="L0007_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0007_AddBackward1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="21" name="L0008_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0008_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="22" name="351" type="Const" version="opset1">
			<data element_type="f32" offset="3232" shape="8, 16, 1, 1" size="512"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="23" name="L0009_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="24" name="L0009_Conv2d_BN/Dims5945" type="Const" version="opset1">
			<data element_type="f32" offset="3744" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="25" name="L0009_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0009_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="26" name="L0010_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0010_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="27" name="531" type="Const" version="opset1">
			<data element_type="f32" offset="3776" shape="8, 1, 1, 3, 3" size="288"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="28" name="L0011_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="29" name="L0011_Conv2d_BN/Dims6257" type="Const" version="opset1">
			<data element_type="f32" offset="4064" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="30" name="L0011_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0011_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="31" name="L0012_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0012_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="32" name="435" type="Const" version="opset1">
			<data element_type="f32" offset="4096" shape="24, 8, 1, 1" size="768"/>
			<output>
				<port id="0" precision="FP32">
					<dim>24</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="33" name="L0013_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>24</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="34" name="L0013_Conv2d_BN/Dims5789" type="Const" version="opset1">
			<data element_type="f32" offset="4864" shape="1, 24, 1, 1" size="96"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="35" name="L0013_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0013_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="36" name="491" type="Const" version="opset1">
			<data element_type="f32" offset="4960" shape="24, 16, 1, 1" size="1536"/>
			<output>
				<port id="0" precision="FP32">
					<dim>24</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="37" name="L0014_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>24</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="38" name="L0014_Conv2d_BN/Dims6437" type="Const" version="opset1">
			<data element_type="f32" offset="6496" shape="1, 24, 1, 1" size="96"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="39" name="L0014_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0014_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="40" name="L0015_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0015_AddBackward1" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="41" name="L0016_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0016_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="42" name="329" type="Const" version="opset1">
			<data element_type="f32" offset="6592" shape="8, 24, 1, 1" size="768"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="43" name="L0017_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="44" name="L0017_Conv2d_BN/Dims6119" type="Const" version="opset1">
			<data element_type="f32" offset="7360" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="45" name="L0017_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0017_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="46" name="L0018_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0018_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="47" name="551" type="Const" version="opset1">
			<data element_type="f32" offset="7392" shape="8, 1, 1, 3, 3" size="288"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="48" name="L0019_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="49" name="L0019_Conv2d_BN/Dims6473" type="Const" version="opset1">
			<data element_type="f32" offset="7680" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="50" name="L0019_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0019_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="51" name="L0020_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0020_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="52" name="379" type="Const" version="opset1">
			<data element_type="f32" offset="7712" shape="24, 8, 1, 1" size="768"/>
			<output>
				<port id="0" precision="FP32">
					<dim>24</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="53" name="L0021_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>24</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="54" name="L0021_Conv2d_BN/Dims6341" type="Const" version="opset1">
			<data element_type="f32" offset="8480" shape="1, 24, 1, 1" size="96"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="55" name="L0021_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0021_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="56" name="L0022_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0022_AddBackward1" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="57" name="L0023_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0023_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="58" name="535" type="Const" version="opset1">
			<data element_type="f32" offset="8576" shape="8, 24, 1, 1" size="768"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="59" name="L0024_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="60" name="L0024_Conv2d_BN/Dims6269" type="Const" version="opset1">
			<data element_type="f32" offset="9344" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="61" name="L0024_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0024_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="62" name="L0025_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0025_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="63" name="557" type="Const" version="opset1">
			<data element_type="f32" offset="9376" shape="8, 1, 1, 3, 3" size="288"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="64" name="L0026_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="65" name="L0026_Conv2d_BN/Dims5873" type="Const" version="opset1">
			<data element_type="f32" offset="9664" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="66" name="L0026_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0026_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="67" name="L0027_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0027_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="68" name="571" type="Const" version="opset1">
			<data element_type="f32" offset="9696" shape="32, 8, 1, 1" size="1024"/>
			<output>
				<port id="0" precision="FP32">
					<dim>32</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="69" name="L0028_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>32</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="70" name="L0028_Conv2d_BN/Dims6095" type="Const" version="opset1">
			<data element_type="f32" offset="10720" shape="1, 32, 1, 1" size="128"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="71" name="L0028_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0028_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="72" name="449" type="Const" version="opset1">
			<data element_type="f32" offset="10848" shape="32, 24, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>32</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="73" name="L0029_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>24</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>32</dim>
					<dim>24</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="74" name="L0029_Conv2d_BN/Dims5807" type="Const" version="opset1">
			<data element_type="f32" offset="13920" shape="1, 32, 1, 1" size="128"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="75" name="L0029_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0029_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="76" name="L0030_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0030_AddBackward1" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="77" name="L0031_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0031_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="78" name="597" type="Const" version="opset1">
			<data element_type="f32" offset="14048" shape="8, 32, 1, 1" size="1024"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="79" name="L0032_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="80" name="L0032_Conv2d_BN/Dims5927" type="Const" version="opset1">
			<data element_type="f32" offset="15072" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="81" name="L0032_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0032_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="82" name="L0033_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0033_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="83" name="489" type="Const" version="opset1">
			<data element_type="f32" offset="15104" shape="8, 1, 1, 3, 3" size="288"/>
			<output>
				<port id="0" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="84" name="L0034_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="85" name="L0034_Conv2d_BN/Dims6221" type="Const" version="opset1">
			<data element_type="f32" offset="15392" shape="1, 8, 1, 1" size="32"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="86" name="L0034_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0034_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="87" name="L0035_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0035_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="88" name="399" type="Const" version="opset1">
			<data element_type="f32" offset="15424" shape="32, 8, 1, 1" size="1024"/>
			<output>
				<port id="0" precision="FP32">
					<dim>32</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="89" name="L0036_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>8</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>32</dim>
					<dim>8</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="90" name="L0036_Conv2d_BN/Dims5969" type="Const" version="opset1">
			<data element_type="f32" offset="16448" shape="1, 32, 1, 1" size="128"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="91" name="L0036_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0036_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="92" name="L0037_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0037_AddBackward1" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="93" name="L0038_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0038_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</output>
		</layer>
		<layer id="94" name="347" type="Const" version="opset1">
			<data element_type="f32" offset="16576" shape="16, 32, 2, 2" size="8192"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>32</dim>
					<dim>2</dim>
					<dim>2</dim>
				</port>
			</output>
		</layer>
		<layer id="95" name="L0039_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="2, 2"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>32</dim>
					<dim>2</dim>
					<dim>2</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="96" name="L0039_Conv2d_BN/Dims5699" type="Const" version="opset1">
			<data element_type="f32" offset="24768" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="97" name="L0039_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0039_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="98" name="L0040_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0040_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="99" name="325" type="Const" version="opset1">
			<data element_type="f32" offset="24832" shape="16, 1, 1, 3, 3" size="576"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="100" name="L0041_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="101" name="L0041_Conv2d_BN/Dims5933" type="Const" version="opset1">
			<data element_type="f32" offset="25408" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="102" name="L0041_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0041_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="103" name="L0042_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0042_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="104" name="433" type="Const" version="opset1">
			<data element_type="f32" offset="25472" shape="48, 16, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="105" name="L0043_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="106" name="L0043_Conv2d_BN/Dims6185" type="Const" version="opset1">
			<data element_type="f32" offset="28544" shape="1, 48, 1, 1" size="192"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="107" name="L0043_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0043_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="108" name="L0044_MaxPool2d" type="MaxPool" version="opset1">
			<data auto_pad="explicit" kernel="2, 2" pads_begin="0, 0" pads_end="0, 0" rounding_type="ceil" strides="2, 2"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>256</dim>
					<dim>448</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0044_MaxPool2d" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="109" name="533" type="Const" version="opset1">
			<data element_type="f32" offset="28736" shape="48, 32, 1, 1" size="6144"/>
			<output>
				<port id="0" precision="FP32">
					<dim>48</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="110" name="L0045_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>32</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>48</dim>
					<dim>32</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="111" name="L0045_Conv2d_BN/Dims6263" type="Const" version="opset1">
			<data element_type="f32" offset="34880" shape="1, 48, 1, 1" size="192"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="112" name="L0045_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0045_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="113" name="L0046_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0046_AddBackward1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="114" name="L0047_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0047_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="115" name="505" type="Const" version="opset1">
			<data element_type="f32" offset="35072" shape="16, 48, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="116" name="L0048_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="117" name="L0048_Conv2d_BN/Dims6053" type="Const" version="opset1">
			<data element_type="f32" offset="38144" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="118" name="L0048_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0048_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="119" name="L0049_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0049_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="120" name="559" type="Const" version="opset1">
			<data element_type="f32" offset="38208" shape="16, 1, 1, 3, 3" size="576"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="121" name="L0050_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="122" name="L0050_Conv2d_BN/Dims5879" type="Const" version="opset1">
			<data element_type="f32" offset="38784" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="123" name="L0050_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0050_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="124" name="L0051_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0051_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="125" name="527" type="Const" version="opset1">
			<data element_type="f32" offset="38848" shape="48, 16, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="126" name="L0052_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="127" name="L0052_Conv2d_BN/Dims6077" type="Const" version="opset1">
			<data element_type="f32" offset="41920" shape="1, 48, 1, 1" size="192"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="128" name="L0052_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0052_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="129" name="L0053_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0053_AddBackward1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="130" name="L0054_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0054_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="131" name="407" type="Const" version="opset1">
			<data element_type="f32" offset="42112" shape="16, 48, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="132" name="L0055_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="133" name="L0055_Conv2d_BN/Dims6359" type="Const" version="opset1">
			<data element_type="f32" offset="45184" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="134" name="L0055_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0055_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="135" name="L0056_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0056_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="136" name="385" type="Const" version="opset1">
			<data element_type="f32" offset="45248" shape="16, 1, 1, 3, 3" size="576"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="137" name="L0057_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="138" name="L0057_Conv2d_BN/Dims5765" type="Const" version="opset1">
			<data element_type="f32" offset="45824" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="139" name="L0057_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0057_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="140" name="L0058_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0058_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="141" name="451" type="Const" version="opset1">
			<data element_type="f32" offset="45888" shape="48, 16, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="142" name="L0059_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="143" name="L0059_Conv2d_BN/Dims5813" type="Const" version="opset1">
			<data element_type="f32" offset="48960" shape="1, 48, 1, 1" size="192"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="144" name="L0059_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0059_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="145" name="L0060_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0060_AddBackward1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="146" name="L0061_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0061_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="147" name="443" type="Const" version="opset1">
			<data element_type="f32" offset="49152" shape="16, 48, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="148" name="L0062_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="149" name="L0062_Conv2d_BN/Dims6389" type="Const" version="opset1">
			<data element_type="f32" offset="52224" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="150" name="L0062_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0062_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="151" name="L0063_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0063_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="152" name="375" type="Const" version="opset1">
			<data element_type="f32" offset="52288" shape="16, 1, 1, 3, 3" size="576"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="153" name="L0064_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="2, 2" pads_begin="2, 2" pads_end="2, 2" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="154" name="L0064_Conv2d_BN/Dims5759" type="Const" version="opset1">
			<data element_type="f32" offset="52864" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="155" name="L0064_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0064_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="156" name="L0065_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0065_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="157" name="363" type="Const" version="opset1">
			<data element_type="f32" offset="52928" shape="48, 16, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="158" name="L0066_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="159" name="L0066_Conv2d_BN/Dims5723" type="Const" version="opset1">
			<data element_type="f32" offset="56000" shape="1, 48, 1, 1" size="192"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="160" name="L0066_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0066_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="161" name="L0067_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0067_AddBackward1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="162" name="L0068_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0068_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="163" name="439" type="Const" version="opset1">
			<data element_type="f32" offset="56192" shape="16, 48, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="164" name="L0069_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="165" name="L0069_Conv2d_BN/Dims5801" type="Const" version="opset1">
			<data element_type="f32" offset="59264" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="166" name="L0069_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0069_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="167" name="L0070_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0070_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="168" name="501" type="Const" version="opset1">
			<data element_type="f32" offset="59328" shape="16, 1, 1, 3, 3" size="576"/>
			<output>
				<port id="0" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</output>
		</layer>
		<layer id="169" name="L0071_Conv2d_BN/WithoutBiases" type="GroupConvolution" version="opset1">
			<data auto_pad="explicit" dilations="3, 3" pads_begin="3, 3" pads_end="3, 3" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
					<dim>3</dim>
					<dim>3</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="170" name="L0071_Conv2d_BN/Dims6047" type="Const" version="opset1">
			<data element_type="f32" offset="59904" shape="1, 16, 1, 1" size="64"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="171" name="L0071_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0071_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="172" name="L0072_ReLU" type="ReLU" version="opset1">
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<output>
				<port id="1" names="L0072_ReLU" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="173" name="517" type="Const" version="opset1">
			<data element_type="f32" offset="59968" shape="48, 16, 1, 1" size="3072"/>
			<output>
				<port id="0" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="174" name="L0073_Conv2d_BN/WithoutBiases" type="Convolution" version="opset1">
			<data auto_pad="explicit" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" strides="1, 1"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>16</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>48</dim>
					<dim>16</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="175" name="L0073_Conv2d_BN/Dims6245" type="Const" version="opset1">
			<data element_type="f32" offset="63040" shape="1, 48, 1, 1" size="192"/>
			<output>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</output>
		</layer>
		<layer id="176" name="L0073_Conv2d_BN" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>1</dim>
					<dim>1</dim>
				</port>
			</input>
			<output>
				<port id="2" names="L0073_Conv2d_BN" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</output>
		</layer>
		<layer id="177" name="L0074_AddBackward1" type="Add" version="opset1">
			<data auto_broadcast="numpy"/>
			<input>
				<port id="0" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
				<port id="1" precision="FP32">
					<dim>1</dim>
					<dim>48</dim>
					<dim>128</dim>
					<dim>224</dim>
				</port>
			</input>
			<outpu
Download .txt
gitextract_91qb3w5n/

├── .github/
│   └── workflows/
│       ├── build_openvino_mobilenet.yml
│       ├── build_openvino_road_seg_adas.yml
│       ├── build_pytorch_yolo.yml
│       ├── chatTTS.yml
│       ├── llama.yml
│       ├── piper.yml
│       ├── pytorch.yml
│       └── tflite.yml
├── .gitignore
├── LICENSE
├── README.md
├── openvino-mobilenet-image/
│   ├── README.md
│   ├── download_mobilenet.sh
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── imagenet_classes.rs
│   │       └── main.rs
│   └── wasmedge-wasinn-example-mobilenet-image.wasm
├── openvino-mobilenet-raw/
│   ├── README.md
│   ├── download_mobilenet.sh
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── imagenet_classes.rs
│   │       └── main.rs
│   └── wasmedge-wasinn-example-mobilenet.wasm
├── openvino-road-segmentation-adas/
│   ├── README.md
│   ├── model/
│   │   └── road-segmentation-adas-0001.xml
│   ├── openvino-road-seg-adas/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       └── main.rs
│   ├── tensor/
│   │   ├── wasinn-openvino-inference-input-512x896x3xf32-bgr.tensor
│   │   └── wasinn-openvino-inference-output-1x4x512x896xf32.tensor
│   └── visualize_inference_result.ipynb
├── openvinogenai-raw/
│   ├── README.md
│   └── rust/
│       ├── Cargo.toml
│       └── src/
│           └── main.rs
├── pytorch-mobilenet-image/
│   ├── README.md
│   ├── gen_mobilenet_model.py
│   ├── gen_tensor.py
│   ├── image-1x3x224x224.rgb
│   ├── mobilenet.pt
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── imagenet_classes.rs
│   │       ├── main.rs
│   │       └── named_model.rs
│   ├── wasmedge-wasinn-example-mobilenet-image-named-model.wasm
│   └── wasmedge-wasinn-example-mobilenet-image.wasm
├── pytorch-yolo-image/
│   ├── README.md
│   ├── get_model.py
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── main.rs
│   │       └── yolo_classes.rs
│   └── yolov8n.torchscript
├── scripts/
│   ├── install_libtorch.sh
│   └── install_openvino.sh
├── tflite-birds_v1-image/
│   ├── README.md
│   ├── lite-model_aiy_vision_classifier_birds_V1_3.tflite
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       ├── imagenet_classes.rs
│   │       └── main.rs
│   └── wasmedge-wasinn-example-tflite-bird-image.wasm
├── wasmedge-chatTTS/
│   ├── .gitignore
│   ├── Cargo.toml
│   ├── README.md
│   ├── assets/
│   │   └── demo.webm
│   ├── src/
│   │   └── main.rs
│   └── wasmedge-chattts.wasm
├── wasmedge-ggml/
│   ├── README.md
│   ├── basic/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-basic.wasm
│   ├── chatml/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-chatml.wasm
│   ├── command-r/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-command-r.wasm
│   ├── embedding/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llama-embedding.wasm
│   ├── gemma/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-gemma.wasm
│   ├── gemma-3/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   ├── base64.rs
│   │   │   └── main.rs
│   │   ├── wasmedge-ggml-gemma-3-base64.wasm
│   │   └── wasmedge-ggml-gemma-3.wasm
│   ├── gemma-4/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-gemma-4.wasm
│   ├── grammar/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-grammar.wasm
│   ├── json-schema/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-json-schema.wasm
│   ├── llama/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llama.wasm
│   ├── llama-stream/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llama-stream.wasm
│   ├── llava/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llava.wasm
│   ├── llava-base64-stream/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-llava-base64-stream.wasm
│   ├── multimodel/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-multimodel.wasm
│   ├── nnrpc/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-nnrpc.wasm
│   ├── qwen/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   └── src/
│   │       └── main.rs
│   ├── qwen2vl/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-qwen2vl.wasm
│   ├── test/
│   │   ├── model-not-found/
│   │   │   ├── Cargo.toml
│   │   │   ├── README.md
│   │   │   ├── src/
│   │   │   │   └── main.rs
│   │   │   └── wasmedge-ggml-model-not-found.wasm
│   │   ├── phi-3/
│   │   │   ├── Cargo.toml
│   │   │   ├── README.md
│   │   │   ├── src/
│   │   │   │   └── main.rs
│   │   │   └── wasmedge-ggml-phi-3.wasm
│   │   ├── set-input-twice/
│   │   │   ├── Cargo.toml
│   │   │   ├── README.md
│   │   │   ├── src/
│   │   │   │   └── main.rs
│   │   │   └── wasmedge-ggml-set-input-twice.wasm
│   │   └── unload/
│   │       ├── Cargo.toml
│   │       ├── README.md
│   │       ├── src/
│   │       │   └── main.rs
│   │       └── wasmedge-ggml-unload.wasm
│   ├── tts/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── src/
│   │   │   └── main.rs
│   │   └── wasmedge-ggml-tts.wasm
│   └── whisper/
│       ├── Cargo.toml
│       ├── README.md
│       ├── src/
│       │   └── main.rs
│       └── whisper-basic.wasm
├── wasmedge-mlx/
│   ├── llama/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   └── src/
│   │       └── main.rs
│   ├── vlm/
│   │   ├── Cargo.toml
│   │   ├── README.md
│   │   ├── decode.py
│   │   ├── encode.py
│   │   └── src/
│   │       └── main.rs
│   └── whisper/
│       ├── Cargo.toml
│       ├── README.md
│       └── src/
│           └── main.rs
├── wasmedge-piper/
│   ├── Cargo.toml
│   ├── README.md
│   ├── config.schema.json
│   ├── dependencies.d2
│   ├── json_input.schema.json
│   └── src/
│       └── main.rs
├── wasmedge-tf-llama/
│   ├── README.md
│   └── rust/
│       ├── Cargo.toml
│       └── src/
│           └── main.rs
├── wasmedge-tf-mobilenet_v2/
│   ├── README.md
│   ├── imagenet_slim_labels.txt
│   ├── mobilenet_v2_1.4_224_frozen.pb
│   ├── rust/
│   │   ├── Cargo.toml
│   │   └── src/
│   │       └── main.rs
│   └── wasmedge-tf-example-mobilenet.wasm
└── wasmedge-tf-mtcnn/
    ├── README.md
    ├── mtcnn.pb
    ├── rust/
    │   ├── Cargo.toml
    │   └── src/
    │       └── main.rs
    └── wasmedge-tf-example-mtcnn.wasm
Download .txt
SYMBOL INDEX (240 symbols across 47 files)

FILE: openvino-mobilenet-image/rust/src/imagenet_classes.rs
  constant IMAGENET_CLASSES (line 20) | pub const IMAGENET_CLASSES: [&str; 1000] = [

FILE: openvino-mobilenet-image/rust/src/main.rs
  function main (line 9) | pub fn main() -> Result<(), Box<dyn std::error::Error>> {
  function sort_results (line 58) | fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
  function image_to_tensor (line 71) | fn image_to_tensor(path: String, height: u32, width: u32) -> Vec<u8> {
  type InferenceResult (line 108) | struct InferenceResult(usize, f32);

FILE: openvino-mobilenet-raw/rust/src/imagenet_classes.rs
  constant IMAGENET_CLASSES (line 20) | pub const IMAGENET_CLASSES: [&str; 1000] = [

FILE: openvino-mobilenet-raw/rust/src/main.rs
  function main (line 8) | pub fn main() -> Result<(), Box<dyn std::error::Error>> {
  function sort_results (line 59) | fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
  type InferenceResult (line 72) | struct InferenceResult(usize, f32);

FILE: openvino-road-segmentation-adas/openvino-road-seg-adas/src/main.rs
  function main (line 5) | fn main() -> Result<(), Box<dyn std::error::Error>> {
  function dump (line 47) | fn dump<T>(
  function image_to_tensor (line 70) | fn image_to_tensor(path: String, height: u32, width: u32) -> Vec<u8> {

FILE: openvinogenai-raw/rust/src/main.rs
  function main (line 6) | pub fn main() -> Result<(), Box<dyn std::error::Error>> {

FILE: pytorch-mobilenet-image/rust/src/imagenet_classes.rs
  constant IMAGENET_CLASSES (line 20) | pub const IMAGENET_CLASSES: [&str; 1000] = [

FILE: pytorch-mobilenet-image/rust/src/main.rs
  function main (line 8) | pub fn main() {
  function sort_results (line 52) | fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
  function image_to_tensor (line 64) | fn image_to_tensor(path: String, height: u32, width: u32) -> Vec<u8> {
  type InferenceResult (line 96) | struct InferenceResult(usize, f32);

FILE: pytorch-mobilenet-image/rust/src/named_model.rs
  function main (line 8) | pub fn main() {
  function sort_results (line 52) | fn sort_results(buffer: &[f32]) -> Vec<InferenceResult> {
  function image_to_tensor (line 64) | fn image_to_tensor(path: String, height: u32, width: u32) -> Vec<u8> {
  type InferenceResult (line 96) | struct InferenceResult(usize, f32);

FILE: pytorch-yolo-image/rust/src/main.rs
  function main (line 11) | pub fn main() {
  constant SIZE (line 74) | const SIZE: usize = 640;
  constant SIZE_U32 (line 75) | const SIZE_U32: u32 = 640;
  type Channel (line 76) | type Channel = Vec<Vec<f32>>;
  type ResizeScale (line 78) | struct ResizeScale(pub f32);
  function pre_process_image (line 80) | fn pre_process_image(path: String) -> ([Channel; 3], ResizeScale) {
  function post_process_results (line 131) | fn post_process_results(
  function transpose (line 185) | fn transpose<T>(v: Vec<Vec<T>>) -> Vec<Vec<T>> {
  type InferenceResult (line 204) | struct InferenceResult {

FILE: pytorch-yolo-image/rust/src/yolo_classes.rs
  constant YOLO_CLASSES (line 1) | pub const YOLO_CLASSES: [&str; 80] = [

FILE: tflite-birds_v1-image/rust/src/imagenet_classes.rs
  constant AIY_BIRDS_V1 (line 20) | pub const AIY_BIRDS_V1: [&str; 965] = [

FILE: tflite-birds_v1-image/rust/src/main.rs
  function main (line 9) | pub fn main() -> Result<(), Box<dyn Error>> {
  function sort_results (line 54) | fn sort_results(buffer: &[u8]) -> Vec<InferenceResult> {
  function image_to_tensor (line 66) | fn image_to_tensor(path: String, height: u32, width: u32) -> Vec<u8> {
  type InferenceResult (line 77) | struct InferenceResult(usize, u8);

FILE: wasmedge-chatTTS/src/main.rs
  function get_data_from_context (line 7) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function main (line 17) | fn main() {

FILE: wasmedge-ggml/basic/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 52) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 57) | fn set_metadata_to_context(
  function get_data_from_context (line 64) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 76) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 81) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 85) | fn main() {

FILE: wasmedge-ggml/chatml/src/main.rs
  function read_input (line 9) | fn read_input() -> String {
  function get_options_from_env (line 21) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 42) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 47) | fn set_metadata_to_context(
  function get_data_from_context (line 54) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 66) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 71) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 75) | fn main() {

FILE: wasmedge-ggml/command-r/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 41) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 46) | fn set_metadata_to_context(
  function get_data_from_context (line 53) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 65) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 69) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 73) | fn main() {

FILE: wasmedge-ggml/embedding/src/main.rs
  function read_input (line 9) | fn read_input() -> String {
  function get_options_from_env (line 21) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 39) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 44) | fn set_metadata_to_context(
  function get_data_from_context (line 51) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 62) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 66) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function get_embd_from_context (line 70) | fn get_embd_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 74) | fn main() {

FILE: wasmedge-ggml/gemma-3/src/base64.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> HashMap<&'static str, Value> {
  function set_data_to_context (line 60) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function get_data_from_context (line 64) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 76) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 80) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 84) | fn main() {

FILE: wasmedge-ggml/gemma-3/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> HashMap<&'static str, Value> {
  function set_data_to_context (line 66) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function get_data_from_context (line 70) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 82) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 86) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 90) | fn main() {

FILE: wasmedge-ggml/gemma-4/src/main.rs
  constant MULTIMODAL_IMAGE_MARKER (line 10) | const MULTIMODAL_IMAGE_MARKER: &str = "<image>";
  function read_input (line 12) | fn read_input() -> String {
  function get_options_from_env (line 24) | fn get_options_from_env() -> HashMap<&'static str, Value> {
  function set_data_to_context (line 73) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function get_data_from_context (line 77) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 89) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 93) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function get_system_prompt_from_env (line 97) | fn get_system_prompt_from_env() -> String {
  function get_enable_thinking_from_env (line 101) | fn get_enable_thinking_from_env() -> bool {
  function build_gemma4_system_turn (line 108) | fn build_gemma4_system_turn(system_prompt: &str, enable_thinking: bool) ...
  function build_gemma4_user_turn (line 116) | fn build_gemma4_user_turn(user_content: &str) -> String {
  function build_user_content (line 120) | fn build_user_content(user_input: &str, multimodal_enabled: bool) -> Str...
  function strip_gemma4_thoughts (line 128) | fn strip_gemma4_thoughts(text: &str) -> String {
  function strip_gemma4_turn_suffix (line 159) | fn strip_gemma4_turn_suffix(text: &str) -> String {
  function parse_gemma4_output (line 167) | fn parse_gemma4_output(text: &str) -> (String, String) {
  function main (line 179) | fn main() {

FILE: wasmedge-ggml/gemma/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 47) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 52) | fn set_metadata_to_context(
  function get_data_from_context (line 59) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 71) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 76) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 80) | fn main() {

FILE: wasmedge-ggml/grammar/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 45) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 50) | fn set_metadata_to_context(
  function get_data_from_context (line 57) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 69) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 73) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  constant JSON_GRAMMAR (line 77) | const JSON_GRAMMAR: &str = r#"
  function main (line 99) | fn main() {

FILE: wasmedge-ggml/json-schema/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 48) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 53) | fn set_metadata_to_context(
  function get_data_from_context (line 60) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 72) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 76) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  constant JSON_SCHEMA (line 80) | const JSON_SCHEMA: &str = r#"
  function main (line 111) | fn main() {

FILE: wasmedge-ggml/llama-stream/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 53) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 58) | fn set_metadata_to_context(
  function get_data_from_context (line 65) | fn get_data_from_context(context: &GraphExecutionContext, index: usize, ...
  function get_output_from_context (line 84) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_single_output_from_context (line 88) | fn get_single_output_from_context(context: &GraphExecutionContext) -> St...
  function get_metadata_from_context (line 93) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 97) | fn main() {

FILE: wasmedge-ggml/llama/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 51) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 56) | fn set_metadata_to_context(
  function get_data_from_context (line 63) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 75) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 79) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 83) | fn main() {

FILE: wasmedge-ggml/llava-base64-stream/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> HashMap<&'static str, Value> {
  function set_data_to_context (line 52) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function get_data_from_context (line 56) | fn get_data_from_context(context: &GraphExecutionContext, index: usize, ...
  function get_output_from_context (line 74) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_single_output_from_context (line 78) | fn get_single_output_from_context(context: &GraphExecutionContext) -> St...
  function main (line 82) | fn main() {

FILE: wasmedge-ggml/llava/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> HashMap<&'static str, Value> {
  function set_data_to_context (line 58) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function get_data_from_context (line 62) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 74) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 78) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 82) | fn main() {

FILE: wasmedge-ggml/multimodel/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 56) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 61) | fn set_metadata_to_context(
  function get_data_from_context (line 68) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 80) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 85) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 89) | fn main() {

FILE: wasmedge-ggml/nnrpc/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 41) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function set_metadata_to_context (line 46) | fn set_metadata_to_context(
  function get_data_from_context (line 53) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 65) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 70) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 74) | fn main() {

FILE: wasmedge-ggml/qwen/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> Value {
  function set_data_to_context (line 41) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function get_data_from_context (line 45) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 59) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function main (line 63) | fn main() {

FILE: wasmedge-ggml/qwen2vl/src/main.rs
  function read_input (line 10) | fn read_input() -> String {
  function get_options_from_env (line 22) | fn get_options_from_env() -> HashMap<&'static str, Value> {
  function set_data_to_context (line 58) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function get_data_from_context (line 62) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 74) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function get_metadata_from_context (line 78) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 82) | fn main() {

FILE: wasmedge-ggml/test/model-not-found/src/main.rs
  function main (line 4) | fn main() {

FILE: wasmedge-ggml/test/phi-3/src/main.rs
  function get_options_from_env (line 9) | fn get_options_from_env() -> Value {
  function get_data_from_context (line 27) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 39) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function main (line 43) | fn main() {

FILE: wasmedge-ggml/test/set-input-twice/src/main.rs
  function get_options_from_env (line 8) | fn get_options_from_env() -> Value {
  function get_data_from_context (line 27) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_metadata_from_context (line 39) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 43) | fn main() {

FILE: wasmedge-ggml/test/unload/src/main.rs
  function get_options_from_env (line 9) | fn get_options_from_env() -> Value {
  function get_data_from_context (line 27) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 39) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function main (line 43) | fn main() {

FILE: wasmedge-ggml/tts/src/main.rs
  function get_options_from_env (line 11) | fn get_options_from_env() -> HashMap<&'static str, Value> {
  function set_data_to_context (line 82) | fn set_data_to_context(context: &mut GraphExecutionContext, data: Vec<u8...
  function get_data_from_context (line 86) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_metadata_from_context (line 98) | fn get_metadata_from_context(context: &GraphExecutionContext) -> Value {
  function main (line 103) | fn main() {

FILE: wasmedge-ggml/whisper/src/main.rs
  function main (line 6) | pub fn main() -> Result<(), Box<dyn Error>> {

FILE: wasmedge-mlx/llama/src/main.rs
  function get_data_from_context (line 6) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 18) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function main (line 21) | fn main() {

FILE: wasmedge-mlx/vlm/decode.py
  function _remove_space (line 6) | def _remove_space(x):
  class Detokenizer (line 12) | class Detokenizer():
    method __init__ (line 13) | def __init__(self, tokenizer, trim_space=True):
    method add_token (line 27) | def add_token(self, token):
  function decode (line 40) | def decode(token: list, model_path: str, **kwargs):

FILE: wasmedge-mlx/vlm/encode.py
  function encode (line 8) | def encode(processor, image, prompts):

FILE: wasmedge-mlx/vlm/src/main.rs
  function get_data_from_context (line 14) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 26) | fn get_output_from_context(context: &GraphExecutionContext) -> NDTensorI...
  function read_json (line 30) | fn read_json(path: &str) -> io::Result<Value> {
  function main (line 38) | fn main() {

FILE: wasmedge-mlx/whisper/src/main.rs
  function get_data_from_context (line 10) | fn get_data_from_context(context: &GraphExecutionContext, index: usize) ...
  function get_output_from_context (line 22) | fn get_output_from_context(context: &GraphExecutionContext) -> String {
  function read_json (line 26) | fn read_json(path: &str) -> io::Result<Value> {
  function main (line 33) | fn main() {

FILE: wasmedge-piper/src/main.rs
  function main (line 1) | fn main() {

FILE: wasmedge-tf-llama/rust/src/main.rs
  type ChatbotError (line 14) | pub enum ChatbotError {
    method from (line 26) | fn from(error: wasi_nn::Error) -> Self {
  type Result (line 31) | type Result<T> = std::result::Result<T, ChatbotError>;
  type Tokenizer (line 34) | struct Tokenizer {
    method new (line 41) | fn new(initial_vocab: Vec<(&str, i32)>) -> Self {
    method tokenize (line 67) | fn tokenize(&mut self, input: &str) -> Vec<i32> {
    method tokenize_with_fixed_length (line 82) | fn tokenize_with_fixed_length(&mut self, input: &str, max_length: usiz...
    method detokenize (line 94) | fn detokenize(&self, tokens: &[i32]) -> String {
  function load_model (line 109) | fn load_model(model_path: &str) -> Result<wasi_nn::Graph> {
  function init_context (line 118) | fn init_context(graph: wasi_nn::Graph) -> Result<wasi_nn::GraphExecution...
  function main (line 122) | fn main() -> Result<()> {
  function run_inference (line 217) | fn run_inference(ctx: &wasi_nn::GraphExecutionContext) -> Result<()> {
  function get_model_output (line 222) | fn get_model_output(

FILE: wasmedge-tf-mobilenet_v2/rust/src/main.rs
  function main (line 6) | fn main() {

FILE: wasmedge-tf-mtcnn/rust/src/main.rs
  function main (line 10) | fn main() {
Condensed preview — 189 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (3,199K chars).
[
  {
    "path": ".github/workflows/build_openvino_mobilenet.yml",
    "chars": 2358,
    "preview": "name: OpenVINO Mobilenet Example\n\non:\n  schedule:\n    - cron: \"0 0 * * *\"\n  push:\n    branches: [master]\n    paths:\n    "
  },
  {
    "path": ".github/workflows/build_openvino_road_seg_adas.yml",
    "chars": 2009,
    "preview": "name: OpenVINO Road Segmentation ADAS Example\n\non:\n  schedule:\n    - cron: \"0 0 * * *\"\n  push:\n    branches: [master]\n  "
  },
  {
    "path": ".github/workflows/build_pytorch_yolo.yml",
    "chars": 1742,
    "preview": "name: Pytorch Yolo Detection\n\non:\n  schedule:\n    - cron: \"0 0 * * *\"\n  push:\n    branches: ['*']\n    paths:\n      - \".g"
  },
  {
    "path": ".github/workflows/chatTTS.yml",
    "chars": 1770,
    "preview": "name: ChatTTS example\n\non:\n  schedule:\n    - cron: \"0 0 * * *\"\n  push:\n    paths:\n      - \".github/workflows/chatTTS.yml"
  },
  {
    "path": ".github/workflows/llama.yml",
    "chars": 11304,
    "preview": "name: ggml llama2 examples\n\non:\n  schedule:\n    - cron: \"0 0 * * *\"\n  workflow_dispatch:\n    inputs:\n      logLevel:\n   "
  },
  {
    "path": ".github/workflows/piper.yml",
    "chars": 2361,
    "preview": "name: Piper Example\n\non:\n  schedule:\n    - cron: \"0 0 * * *\"\n  push:\n    paths:\n      - \".github/workflows/piper.yml\"\n  "
  },
  {
    "path": ".github/workflows/pytorch.yml",
    "chars": 2622,
    "preview": "name: PyTorch examples\n\non:\n  schedule:\n    - cron: \"0 0 * * *\"\n  workflow_dispatch:\n    inputs:\n      logLevel:\n       "
  },
  {
    "path": ".github/workflows/tflite.yml",
    "chars": 2158,
    "preview": "name: TFlite examples\n\non:\n  schedule:\n    - cron: \"0 0 * * *\"\n  workflow_dispatch:\n    inputs:\n      logLevel:\n        "
  },
  {
    "path": ".gitignore",
    "chars": 360,
    "preview": "**/build\n**/target\n**/*.gguf\n\nopenvino-mobilenet-image/mobilenet.bin\nopenvino-mobilenet-image/mobilenet.xml\n\nopenvino-mo"
  },
  {
    "path": "LICENSE",
    "chars": 12243,
    "preview": "\n                                 Apache License\n                           Version 2.0, January 2004\n                  "
  },
  {
    "path": "README.md",
    "chars": 4522,
    "preview": "<div align=\"center\">\n  <h1>WasmEdge WASI-NN Examples</h1>\n\n  <p><strong>High-level bindings for writing wasi-nn applicat"
  },
  {
    "path": "openvino-mobilenet-image/README.md",
    "chars": 2213,
    "preview": "# Mobilenet example with WasmEdge WASI-NN OpenVINO plugin\n\nThis example demonstrates how to use WasmEdge WASI-NN OpenVIN"
  },
  {
    "path": "openvino-mobilenet-image/download_mobilenet.sh",
    "chars": 333,
    "preview": "FIXTURE=https://github.com/intel/openvino-rs/raw/v0.3.3/crates/openvino/tests/fixtures/mobilenet\nTODIR=$1\n\nif [ ! -f $TO"
  },
  {
    "path": "openvino-mobilenet-image/rust/Cargo.toml",
    "chars": 390,
    "preview": "[package]\nname = \"wasmedge-wasinn-example-mobilenet-image\"\nversion = \"0.1.0\"\nauthors = [\"Second-State\"]\nreadme = \"README"
  },
  {
    "path": "openvino-mobilenet-image/rust/src/imagenet_classes.rs",
    "chars": 29571,
    "preview": "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (th"
  },
  {
    "path": "openvino-mobilenet-image/rust/src/main.rs",
    "chars": 3705,
    "preview": "use image::io::Reader;\nuse image::DynamicImage;\nuse std::env;\nuse wasi_nn;\nmod imagenet_classes;\n\nuse wasi_nn::{Executio"
  },
  {
    "path": "openvino-mobilenet-raw/README.md",
    "chars": 2271,
    "preview": "# Mobilenet example with WasmEdge WASI-NN OpenVINO plugin\n\nThis example demonstrates how to use WasmEdge WASI-NN OpenVIN"
  },
  {
    "path": "openvino-mobilenet-raw/download_mobilenet.sh",
    "chars": 472,
    "preview": "FIXTURE=https://github.com/intel/openvino-rs/raw/v0.3.3/crates/openvino/tests/fixtures/mobilenet\nTODIR=$1\n\nif [ ! -f $TO"
  },
  {
    "path": "openvino-mobilenet-raw/rust/Cargo.toml",
    "chars": 213,
    "preview": "[package]\nname = \"wasmedge-wasinn-example-mobilenet\"\nversion = \"0.1.0\"\nauthors = [\"Second-State\"]\nreadme = \"README.md\"\ne"
  },
  {
    "path": "openvino-mobilenet-raw/rust/src/imagenet_classes.rs",
    "chars": 29571,
    "preview": "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (th"
  },
  {
    "path": "openvino-mobilenet-raw/rust/src/main.rs",
    "chars": 2357,
    "preview": "use std::env;\nuse std::fs;\nuse wasi_nn;\nmod imagenet_classes;\n\nuse wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncodin"
  },
  {
    "path": "openvino-road-segmentation-adas/README.md",
    "chars": 3428,
    "preview": "\n# OpenVINO Road Segmentation ADAS Example on WasmEdge Runtime\n\n## Overview\n\nIn this example, we'll use `WasmEdge wasi-n"
  },
  {
    "path": "openvino-road-segmentation-adas/model/road-segmentation-adas-0001.xml",
    "chars": 401509,
    "preview": "<?xml version=\"1.0\" ?>\n<net name=\"road-segmentation-adas-0001\" version=\"10\">\n\t<layers>\n\t\t<layer id=\"0\" name=\"data\" type="
  },
  {
    "path": "openvino-road-segmentation-adas/openvino-road-seg-adas/Cargo.toml",
    "chars": 325,
    "preview": "[package]\nauthors = [\"Sam Liu <sam@secondstate.io>\"]\nedition = \"2021\"\nname = \"openvino-road-seg-adas\"\nversion = \"0.2.0\"\n"
  },
  {
    "path": "openvino-road-segmentation-adas/openvino-road-seg-adas/src/main.rs",
    "chars": 3076,
    "preview": "use image::{io::Reader, DynamicImage};\nuse std::env;\nuse wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding, TensorT"
  },
  {
    "path": "openvino-road-segmentation-adas/visualize_inference_result.ipynb",
    "chars": 381327,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"code\",\n   \"execution_count\": 29,\n   \"metadata\": {},\n   \"outputs\": [],\n   \"source\": [\n"
  },
  {
    "path": "openvinogenai-raw/README.md",
    "chars": 3230,
    "preview": "# Deepseek example with WasmEdge WASI-NN OpenVINO GenAI plugin\n\nThis example demonstrates how to use WasmEdge WASI-NN Op"
  },
  {
    "path": "openvinogenai-raw/rust/Cargo.toml",
    "chars": 297,
    "preview": "[package]\nname = \"openvinogenai-deepseek-raw\"\nversion = \"0.1.0\"\nauthors = [\"Second-State\"]\nreadme = \"README.md\"\nedition "
  },
  {
    "path": "openvinogenai-raw/rust/src/main.rs",
    "chars": 1560,
    "preview": "use std::env;\nuse std::io::{self, Write};\nuse wasmedge_wasi_nn;\nuse wasmedge_wasi_nn::{ExecutionTarget, GraphBuilder, Gr"
  },
  {
    "path": "pytorch-mobilenet-image/README.md",
    "chars": 3290,
    "preview": "# Mobilenet Example For WASI-NN with PyTorch Backend\n\nThis package is a high-level Rust bindings for [wasi-nn] example o"
  },
  {
    "path": "pytorch-mobilenet-image/gen_mobilenet_model.py",
    "chars": 501,
    "preview": "import os\nimport torch\nfrom torch import jit\n\nwith torch.no_grad():\n    fake_input = torch.rand(1, 3, 224, 224)\n    mode"
  },
  {
    "path": "pytorch-mobilenet-image/gen_tensor.py",
    "chars": 697,
    "preview": "import sys\nimport struct\nfrom PIL import Image\nfrom torchvision import transforms\n\nif __name__ == '__main__':\n    input_"
  },
  {
    "path": "pytorch-mobilenet-image/rust/Cargo.toml",
    "chars": 567,
    "preview": "[package]\nname = \"wasmedge-wasinn-example-mobilenet-image\"\nversion = \"0.1.0\"\nauthors = [\"Second-State\"]\nreadme = \"README"
  },
  {
    "path": "pytorch-mobilenet-image/rust/src/imagenet_classes.rs",
    "chars": 29571,
    "preview": "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (th"
  },
  {
    "path": "pytorch-mobilenet-image/rust/src/main.rs",
    "chars": 3470,
    "preview": "use image;\nuse std::env;\nuse std::fs::File;\nuse std::io::Read;\nuse wasi_nn;\nmod imagenet_classes;\n\npub fn main() {\n    l"
  },
  {
    "path": "pytorch-mobilenet-image/rust/src/named_model.rs",
    "chars": 3462,
    "preview": "use image;\nuse std::env;\nuse std::fs::File;\nuse std::io::Read;\nuse wasi_nn;\nmod imagenet_classes;\n\npub fn main() {\n    l"
  },
  {
    "path": "pytorch-yolo-image/README.md",
    "chars": 1125,
    "preview": "# YOLOV8 detection Example For WASI-NN with PyTorch Backend\n\nCode for a working example of object detection using the yo"
  },
  {
    "path": "pytorch-yolo-image/get_model.py",
    "chars": 302,
    "preview": "# requires ultralytics to be installed\nfrom ultralytics import YOLO\n\n# Load a model, saving it to disk\n# Note there are "
  },
  {
    "path": "pytorch-yolo-image/rust/Cargo.toml",
    "chars": 522,
    "preview": "[package]\nname = \"wasmedge-wasinn-example-yolo-image\"\nversion = \"0.1.0\"\nauthors = [\"Second-State\"]\nreadme = \"README.md\"\n"
  },
  {
    "path": "pytorch-yolo-image/rust/src/main.rs",
    "chars": 6467,
    "preview": "use image::{self, GenericImage, RgbImage};\nuse std::env;\nuse std::fs::File;\nuse std::io::Read;\nuse wasi_nn;\n\nuse crate::"
  },
  {
    "path": "pytorch-yolo-image/rust/src/yolo_classes.rs",
    "chars": 1227,
    "preview": "pub const YOLO_CLASSES: [&str; 80] = [\n    \"person\",\n    \"bicycle\",\n    \"car\",\n    \"motorbike\",\n    \"aeroplane\",\n    \"bu"
  },
  {
    "path": "scripts/install_libtorch.sh",
    "chars": 797,
    "preview": "#!/usr/bin/env bash\n# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception\n# SPDX-FileCopyrightText: 2019-2022 Second"
  },
  {
    "path": "scripts/install_openvino.sh",
    "chars": 471,
    "preview": "#!/usr/bin/env bash\nset -e\necho \"Installing OpenVINO with version 2024.2.0\"\nKEY_FILE=GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB\nw"
  },
  {
    "path": "tflite-birds_v1-image/README.md",
    "chars": 1863,
    "preview": "# Mobilenet Example For WASI-NN with Tensorflow Lite Backend\n\nThis package is a high-level Rust bindings for [wasi-nn] e"
  },
  {
    "path": "tflite-birds_v1-image/rust/Cargo.toml",
    "chars": 378,
    "preview": "[package]\nname = \"wasmedge-wasinn-example-tflite-bird-image\"\nversion = \"0.1.0\"\nauthors = [\"Second-State\"]\nreadme = \"READ"
  },
  {
    "path": "tflite-birds_v1-image/rust/src/imagenet_classes.rs",
    "chars": 26261,
    "preview": "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (th"
  },
  {
    "path": "tflite-birds_v1-image/rust/src/main.rs",
    "chars": 2794,
    "preview": "use image::io::Reader;\nuse image::DynamicImage;\nuse std::env;\nuse std::error::Error;\nuse std::fs;\nuse wasi_nn::{Executio"
  },
  {
    "path": "wasmedge-chatTTS/.gitignore",
    "chars": 12,
    "preview": "asset\nconfig"
  },
  {
    "path": "wasmedge-chatTTS/Cargo.toml",
    "chars": 218,
    "preview": "[package]\nname = \"wasmedge-chattts\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wasi-"
  },
  {
    "path": "wasmedge-chatTTS/README.md",
    "chars": 2236,
    "preview": "# ChatTTS example with WasmEdge WASI-NN ChatTTS plugin\nThis example demonstrates how to use the WasmEdge WASI-NN ChatTTS"
  },
  {
    "path": "wasmedge-chatTTS/src/main.rs",
    "chars": 2112,
    "preview": "use hound;\nuse serde_json::json;\nuse wasmedge_wasi_nn::{\n    self, ExecutionTarget, GraphBuilder, GraphEncoding, GraphEx"
  },
  {
    "path": "wasmedge-ggml/README.md",
    "chars": 10927,
    "preview": "# Llama Example For WASI-NN with GGML Backend\n\n[See it in action!](https://x.com/juntao/status/1705588244602114303)\n\n## "
  },
  {
    "path": "wasmedge-ggml/basic/Cargo.toml",
    "chars": 136,
    "preview": "[package]\nname = \"wasmedge-ggml-basic\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wa"
  },
  {
    "path": "wasmedge-ggml/basic/README.md",
    "chars": 3691,
    "preview": "# Basic Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README.md) f"
  },
  {
    "path": "wasmedge-ggml/basic/src/main.rs",
    "chars": 5657,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, "
  },
  {
    "path": "wasmedge-ggml/chatml/Cargo.toml",
    "chars": 137,
    "preview": "[package]\nname = \"wasmedge-ggml-chatml\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-w"
  },
  {
    "path": "wasmedge-ggml/chatml/README.md",
    "chars": 1692,
    "preview": "# `chatml`\n\n## Execute\n\n````console\n$ wasmedge --dir .:. \\\n  --nn-preload default:GGML:AUTO:mixtral-8x7b-instruct-v0.1.Q"
  },
  {
    "path": "wasmedge-ggml/chatml/src/main.rs",
    "chars": 6939,
    "preview": "use serde_json::{json, Value};\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, Error, Executi"
  },
  {
    "path": "wasmedge-ggml/command-r/Cargo.toml",
    "chars": 140,
    "preview": "[package]\nname = \"wasmedge-ggml-command-r\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedg"
  },
  {
    "path": "wasmedge-ggml/command-r/README.md",
    "chars": 1412,
    "preview": "# Command-R Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README.m"
  },
  {
    "path": "wasmedge-ggml/command-r/src/main.rs",
    "chars": 9784,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, "
  },
  {
    "path": "wasmedge-ggml/embedding/Cargo.toml",
    "chars": 146,
    "preview": "[package]\nname = \"wasmedge-ggml-llama-embedding\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nw"
  },
  {
    "path": "wasmedge-ggml/embedding/README.md",
    "chars": 1392,
    "preview": "# Embedding Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README.m"
  },
  {
    "path": "wasmedge-ggml/embedding/src/main.rs",
    "chars": 6475,
    "preview": "use serde_json::{json, Value};\nuse std::env;\nuse std::io::{self};\nuse wasmedge_wasi_nn::{\n    self, BackendError, Error,"
  },
  {
    "path": "wasmedge-ggml/gemma/Cargo.toml",
    "chars": 136,
    "preview": "[package]\nname = \"wasmedge-ggml-gemma\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wa"
  },
  {
    "path": "wasmedge-ggml/gemma/README.md",
    "chars": 2544,
    "preview": "# `gemma`\n\n## Get Model\n\n```console\nwget https://huggingface.co/second-state/Gemma-2b-it-GGUF/resolve/main/gemma-2b-it-Q"
  },
  {
    "path": "wasmedge-ggml/gemma/src/main.rs",
    "chars": 7127,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, "
  },
  {
    "path": "wasmedge-ggml/gemma-3/Cargo.toml",
    "chars": 269,
    "preview": "[package]\nname = \"wasmedge-ggml-gemma-3\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-"
  },
  {
    "path": "wasmedge-ggml/gemma-3/README.md",
    "chars": 5480,
    "preview": "# Gemma-3 Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README.md)"
  },
  {
    "path": "wasmedge-ggml/gemma-3/src/base64.rs",
    "chars": 729766,
    "preview": "use serde_json::Value;\nuse std::collections::HashMap;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, Backe"
  },
  {
    "path": "wasmedge-ggml/gemma-3/src/main.rs",
    "chars": 7212,
    "preview": "use serde_json::Value;\nuse std::collections::HashMap;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, Backe"
  },
  {
    "path": "wasmedge-ggml/gemma-4/Cargo.toml",
    "chars": 198,
    "preview": "[package]\nname = \"wasmedge-ggml-gemma-4\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-"
  },
  {
    "path": "wasmedge-ggml/gemma-4/README.md",
    "chars": 2673,
    "preview": "# Gemma-4 Example For WASI-NN With GGML Backend\n## Requirements\n\n- WasmEdge with the WASI-NN GGML plugin installed\n- Rus"
  },
  {
    "path": "wasmedge-ggml/gemma-4/src/main.rs",
    "chars": 9744,
    "preview": "use serde_json::Value;\nuse std::collections::HashMap;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, Backe"
  },
  {
    "path": "wasmedge-ggml/grammar/Cargo.toml",
    "chars": 138,
    "preview": "[package]\nname = \"wasmedge-ggml-grammar\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-"
  },
  {
    "path": "wasmedge-ggml/grammar/README.md",
    "chars": 1505,
    "preview": "# Grammar Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README.md)"
  },
  {
    "path": "wasmedge-ggml/grammar/src/main.rs",
    "chars": 6479,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, "
  },
  {
    "path": "wasmedge-ggml/json-schema/Cargo.toml",
    "chars": 142,
    "preview": "[package]\nname = \"wasmedge-ggml-json-schema\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasme"
  },
  {
    "path": "wasmedge-ggml/json-schema/README.md",
    "chars": 1672,
    "preview": "# JSON Schema Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README"
  },
  {
    "path": "wasmedge-ggml/json-schema/src/main.rs",
    "chars": 6689,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, "
  },
  {
    "path": "wasmedge-ggml/llama/Cargo.toml",
    "chars": 136,
    "preview": "[package]\nname = \"wasmedge-ggml-llama\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wa"
  },
  {
    "path": "wasmedge-ggml/llama/README.md",
    "chars": 1681,
    "preview": "# `llama`\n\n## Execute - llama 3\n\n### Model Download Link\n\n```console\nwget https://huggingface.co/QuantFactory/Meta-Llama"
  },
  {
    "path": "wasmedge-ggml/llama/src/main.rs",
    "chars": 7863,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, "
  },
  {
    "path": "wasmedge-ggml/llama-stream/Cargo.toml",
    "chars": 143,
    "preview": "[package]\nname = \"wasmedge-ggml-llama-stream\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasm"
  },
  {
    "path": "wasmedge-ggml/llama-stream/README.md",
    "chars": 1702,
    "preview": "# `llama-stream`\n\n## Execute - llama 3\n\n### Model Download Link\n\n```console\nwget https://huggingface.co/QuantFactory/Met"
  },
  {
    "path": "wasmedge-ggml/llama-stream/src/main.rs",
    "chars": 10655,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io::{self, Write};\nuse wasmedge_wasi_nn::{\n    self,"
  },
  {
    "path": "wasmedge-ggml/llava/Cargo.toml",
    "chars": 136,
    "preview": "[package]\nname = \"wasmedge-ggml-llava\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wa"
  },
  {
    "path": "wasmedge-ggml/llava/README.md",
    "chars": 4085,
    "preview": "# Llava Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README.md) f"
  },
  {
    "path": "wasmedge-ggml/llava/src/main.rs",
    "chars": 6940,
    "preview": "use serde_json::Value;\nuse std::collections::HashMap;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, Backe"
  },
  {
    "path": "wasmedge-ggml/llava-base64-stream/Cargo.toml",
    "chars": 150,
    "preview": "[package]\nname = \"wasmedge-ggml-llava-base64-stream\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1."
  },
  {
    "path": "wasmedge-ggml/llava-base64-stream/README.md",
    "chars": 1438,
    "preview": "# Llava Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README.md) f"
  },
  {
    "path": "wasmedge-ggml/llava-base64-stream/src/main.rs",
    "chars": 1086763,
    "preview": "use serde_json::Value;\nuse std::collections::HashMap;\nuse std::env;\nuse std::io::{self, Write};\nuse wasmedge_wasi_nn::{\n"
  },
  {
    "path": "wasmedge-ggml/multimodel/Cargo.toml",
    "chars": 141,
    "preview": "[package]\nname = \"wasmedge-ggml-multimodel\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmed"
  },
  {
    "path": "wasmedge-ggml/multimodel/README.md",
    "chars": 2626,
    "preview": "\n# Multiple Models Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../R"
  },
  {
    "path": "wasmedge-ggml/multimodel/src/main.rs",
    "chars": 6797,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, "
  },
  {
    "path": "wasmedge-ggml/nnrpc/Cargo.toml",
    "chars": 136,
    "preview": "[package]\nname = \"wasmedge-ggml-nnrpc\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wa"
  },
  {
    "path": "wasmedge-ggml/nnrpc/README.md",
    "chars": 1399,
    "preview": "# RPC Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README.md) for"
  },
  {
    "path": "wasmedge-ggml/nnrpc/src/main.rs",
    "chars": 6682,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, "
  },
  {
    "path": "wasmedge-ggml/qwen/Cargo.toml",
    "chars": 233,
    "preview": "[package]\nname = \"wasmedge-ggml-qwen\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https"
  },
  {
    "path": "wasmedge-ggml/qwen/README.md",
    "chars": 959,
    "preview": "# `通义千问`\n\n## Execute - Tong Yi Qwen\n\n### Model Download Link\n\n```console\nwget https://huggingface.co/Qwen/Qwen1.5-0.5B-C"
  },
  {
    "path": "wasmedge-ggml/qwen/src/main.rs",
    "chars": 4330,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, BackendError, "
  },
  {
    "path": "wasmedge-ggml/qwen2vl/Cargo.toml",
    "chars": 138,
    "preview": "[package]\nname = \"wasmedge-ggml-qwen2vl\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-"
  },
  {
    "path": "wasmedge-ggml/qwen2vl/README.md",
    "chars": 1585,
    "preview": "# Qwen-2VL Example For WASI-NN with GGML Backend\n\n> [!NOTE]\n> Please refer to the [wasmedge-ggml/README.md](../README.md"
  },
  {
    "path": "wasmedge-ggml/qwen2vl/src/main.rs",
    "chars": 7122,
    "preview": "use serde_json::Value;\nuse std::collections::HashMap;\nuse std::env;\nuse std::io;\nuse wasmedge_wasi_nn::{\n    self, Backe"
  },
  {
    "path": "wasmedge-ggml/test/model-not-found/Cargo.toml",
    "chars": 146,
    "preview": "[package]\nname = \"wasmedge-ggml-model-not-found\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nw"
  },
  {
    "path": "wasmedge-ggml/test/model-not-found/README.md",
    "chars": 249,
    "preview": "# `model-not-found`\n\nEnsure that we get the `ModelNotFound` error when the model does not exist.\n\n## Execute\n\n```console"
  },
  {
    "path": "wasmedge-ggml/test/model-not-found/src/main.rs",
    "chars": 694,
    "preview": "use std::env;\nuse wasmedge_wasi_nn::{self, BackendError, Error, ExecutionTarget, GraphBuilder, GraphEncoding};\n\nfn main("
  },
  {
    "path": "wasmedge-ggml/test/phi-3/Cargo.toml",
    "chars": 136,
    "preview": "[package]\nname = \"wasmedge-ggml-phi-3\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wa"
  },
  {
    "path": "wasmedge-ggml/test/phi-3/README.md",
    "chars": 349,
    "preview": "# `phi-3-mini`\n\nEnsure that we can use the `phi-3-mini` model.\n\n## Execute\n\n```console\n$ curl -LO https://huggingface.co"
  },
  {
    "path": "wasmedge-ggml/test/phi-3/src/main.rs",
    "chars": 3440,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse wasmedge_wasi_nn::{\n    self, BackendError, Error, Execut"
  },
  {
    "path": "wasmedge-ggml/test/set-input-twice/Cargo.toml",
    "chars": 146,
    "preview": "[package]\nname = \"wasmedge-ggml-set-input-twice\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nw"
  },
  {
    "path": "wasmedge-ggml/test/set-input-twice/README.md",
    "chars": 365,
    "preview": "# `set-input-twice`\n\nEnsure that we get the same result from executing `set_input` twice.\n\n## Execute\n\n```console\n$ curl"
  },
  {
    "path": "wasmedge-ggml/test/set-input-twice/src/main.rs",
    "chars": 3986,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse wasmedge_wasi_nn::{\n    self, ExecutionTarget, GraphBuild"
  },
  {
    "path": "wasmedge-ggml/test/unload/Cargo.toml",
    "chars": 137,
    "preview": "[package]\nname = \"wasmedge-ggml-unload\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-w"
  },
  {
    "path": "wasmedge-ggml/test/unload/README.md",
    "chars": 358,
    "preview": "# `unload`\n\nEnsure that we can unload and reload the graph multiple times without errors.\n\n## Execute\n\n```console\n$ curl"
  },
  {
    "path": "wasmedge-ggml/test/unload/src/main.rs",
    "chars": 3765,
    "preview": "use serde_json::json;\nuse serde_json::Value;\nuse std::env;\nuse wasmedge_wasi_nn::{\n    self, BackendError, Error, Execut"
  },
  {
    "path": "wasmedge-ggml/tts/Cargo.toml",
    "chars": 134,
    "preview": "[package]\nname = \"wasmedge-ggml-tts\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wasi"
  },
  {
    "path": "wasmedge-ggml/tts/README.md",
    "chars": 1016,
    "preview": "# `tts` - Text-to-Speech Example\n\n## Model Download\n\n```console\nwget https://huggingface.co/second-state/OuteTTS-0.2-500"
  },
  {
    "path": "wasmedge-ggml/tts/src/main.rs",
    "chars": 5977,
    "preview": "use serde_json::Value;\nuse std::collections::HashMap;\nuse std::env;\nuse std::fs::File;\nuse std::io::Write;\nuse wasmedge_"
  },
  {
    "path": "wasmedge-ggml/whisper/Cargo.toml",
    "chars": 111,
    "preview": "[package]\nname = \"whisper-basic\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nwasmedge-wasi-nn = \"0.8.0\"\n"
  },
  {
    "path": "wasmedge-ggml/whisper/README.md",
    "chars": 2273,
    "preview": "# Basic Example For WASI-NN with Whisper Backend\n\nThis example is for a basic audio recognition with WASI-NN whisper bac"
  },
  {
    "path": "wasmedge-ggml/whisper/src/main.rs",
    "chars": 1184,
    "preview": "use std::env;\nuse std::error::Error;\nuse std::fs;\nuse wasmedge_wasi_nn::{ExecutionTarget, GraphBuilder, GraphEncoding, T"
  },
  {
    "path": "wasmedge-mlx/llama/Cargo.toml",
    "chars": 129,
    "preview": "[package]\nname = \"wasmedge-mlx\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wasi-nn ="
  },
  {
    "path": "wasmedge-mlx/llama/README.md",
    "chars": 3083,
    "preview": "# MLX example with WasmEdge WASI-NN MLX plugin\n\nThis example demonstrates using WasmEdge WASI-NN MLX plugin to perform a"
  },
  {
    "path": "wasmedge-mlx/llama/src/main.rs",
    "chars": 1818,
    "preview": "use serde_json::json;\nuse std::env;\nuse wasmedge_wasi_nn::{\n    self, ExecutionTarget, GraphBuilder, GraphEncoding, Grap"
  },
  {
    "path": "wasmedge-mlx/vlm/Cargo.toml",
    "chars": 250,
    "preview": "[package]\nname = \"wasmedge-vlm\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wasi-nn ="
  },
  {
    "path": "wasmedge-mlx/vlm/README.md",
    "chars": 2481,
    "preview": "# VLM example with WasmEdge WASI-NN MLX plugin\n\nThis example demonstrates using WasmEdge WASI-NN MLX plugin to perform a"
  },
  {
    "path": "wasmedge-mlx/vlm/decode.py",
    "chars": 1527,
    "preview": "from transformers import AutoProcessor\nimport mlx.core as mx\nimport sys\n\n\ndef _remove_space(x):\n    if x and x[0] == \" \""
  },
  {
    "path": "wasmedge-mlx/vlm/encode.py",
    "chars": 1641,
    "preview": "\nfrom transformers import AutoProcessor\nimport mlx.core as mx\nfrom PIL import Image, ImageOps\nimport sys\n\n\ndef encode(pr"
  },
  {
    "path": "wasmedge-mlx/vlm/src/main.rs",
    "chars": 3853,
    "preview": "use rust_processor::auto::processing_auto::AutoProcessor;\nuse rust_processor::gemma3::detokenizer::decode;\nuse rust_proc"
  },
  {
    "path": "wasmedge-mlx/whisper/Cargo.toml",
    "chars": 133,
    "preview": "[package]\nname = \"wasmedge-whisper\"\nversion = \"0.1.0\"\nedition = \"2024\"\n\n[dependencies]\nserde_json = \"1.0\"\nwasmedge-wasi-"
  },
  {
    "path": "wasmedge-mlx/whisper/README.md",
    "chars": 1830,
    "preview": "# Whisper example with WasmEdge WASI-NN MLX plugin\n\nThis example demonstrates using WasmEdge WASI-NN MLX plugin to perfo"
  },
  {
    "path": "wasmedge-mlx/whisper/src/main.rs",
    "chars": 1932,
    "preview": "use std::env;\nuse wasmedge_wasi_nn::{\n    self, ExecutionTarget, GraphBuilder, GraphEncoding, GraphExecutionContext, Ten"
  },
  {
    "path": "wasmedge-piper/Cargo.toml",
    "chars": 233,
    "preview": "[package]\nname = \"wasmedge-piper\"\nversion = \"0.1.0\"\nedition = \"2021\"\n\n# See more keys and their definitions at https://d"
  },
  {
    "path": "wasmedge-piper/README.md",
    "chars": 5086,
    "preview": "# Text to speech example with WasmEdge WASI-NN Piper plugin\n\nThis example demonstrates how to use WasmEdge WASI-NN Piper"
  },
  {
    "path": "wasmedge-piper/config.schema.json",
    "chars": 2423,
    "preview": "{\n    \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n    \"properties\": {\n        \"model\": {\n            \"descript"
  },
  {
    "path": "wasmedge-piper/dependencies.d2",
    "chars": 181,
    "preview": "direction: right\nWasmEdge WASI-NN Piper plugin -> piper\npiper -> piper-phonemize\npiper -> espeak-ng\npiper -> onnxruntime"
  },
  {
    "path": "wasmedge-piper/json_input.schema.json",
    "chars": 531,
    "preview": "{\n    \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n    \"properties\": {\n        \"text\": {\n            \"descripti"
  },
  {
    "path": "wasmedge-piper/src/main.rs",
    "chars": 1244,
    "preview": "fn main() {\n    // create graph with the config\n    let config = serde_json::json!({\n        \"model\": \"en_US-lessac-medi"
  },
  {
    "path": "wasmedge-tf-llama/README.md",
    "chars": 1728,
    "preview": "# Llama Example For WasmEdge-Tensorflow plug-in\n\nThis package is a high-level Rust bindings for [WasmEdge-TensorFlow plu"
  },
  {
    "path": "wasmedge-tf-llama/rust/Cargo.toml",
    "chars": 360,
    "preview": "[package]\nname = \"wasmedge-tf-example-llama\"\nversion = \"0.1.0\"\nauthors = [\"victoryang00\"]\nreadme = \"README.md\"\nedition ="
  },
  {
    "path": "wasmedge-tf-llama/rust/src/main.rs",
    "chars": 6798,
    "preview": "#![feature(str_as_str)]\nuse bytemuck::{cast_slice, cast_slice_mut};\nuse std::collections::HashMap;\nuse std::env;\nuse std"
  },
  {
    "path": "wasmedge-tf-mobilenet_v2/README.md",
    "chars": 1788,
    "preview": "# Mobilenet Example For WasmEdge-Tensorflow plug-in\n\nThis package is a high-level Rust bindings for [WasmEdge-TensorFlow"
  },
  {
    "path": "wasmedge-tf-mobilenet_v2/imagenet_slim_labels.txt",
    "chars": 10480,
    "preview": "dummy\ntench\ngoldfish\ngreat white shark\ntiger shark\nhammerhead\nelectric ray\nstingray\ncock\nhen\nostrich\nbrambling\ngoldfinch"
  },
  {
    "path": "wasmedge-tf-mobilenet_v2/rust/Cargo.toml",
    "chars": 217,
    "preview": "[package]\nname = \"wasmedge-tf-example-mobilenet\"\nversion = \"0.1.0\"\nauthors = [\"Second-State\"]\nreadme = \"README.md\"\nediti"
  },
  {
    "path": "wasmedge-tf-mobilenet_v2/rust/src/main.rs",
    "chars": 1873,
    "preview": "use std::env;\nuse std::fs::File;\nuse std::io::Read;\nuse wasmedge_tensorflow_interface;\n\nfn main() {\n    let args: Vec<St"
  },
  {
    "path": "wasmedge-tf-mtcnn/README.md",
    "chars": 1548,
    "preview": "# Mobilenet Example For WasmEdge-Tensorflow plug-in\n\nThis package is a high-level Rust bindings for [WasmEdge-TensorFlow"
  },
  {
    "path": "wasmedge-tf-mtcnn/rust/Cargo.toml",
    "chars": 326,
    "preview": "[package]\nname = \"wasmedge-tf-example-mtcnn\"\nversion = \"0.1.0\"\nauthors = [\"Second-State\"]\nreadme = \"README.md\"\nedition ="
  },
  {
    "path": "wasmedge-tf-mtcnn/rust/src/main.rs",
    "chars": 2248,
    "preview": "use std::env;\nuse std::fs::File;\nuse std::io::Read;\nuse wasmedge_tensorflow_interface::TFSession;\n\nuse image::{GenericIm"
  }
]

// ... and 40 more files (download for full content)

About this extraction

This page contains the full source code of the second-state/WasmEdge-WASINN-examples GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 189 files (52.4 MB), approximately 774.1k tokens, and a symbol index with 240 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!